diff --git a/.github/ISSUE_TEMPLATE/bug.yml b/.github/ISSUE_TEMPLATE/bug.yml index a3782490f81939..f5377e3d9b75d5 100644 --- a/.github/ISSUE_TEMPLATE/bug.yml +++ b/.github/ISSUE_TEMPLATE/bug.yml @@ -58,7 +58,6 @@ body: - Caffe - ONNX - PyTorch - - mxnet - PaddlePaddle validations: required: false diff --git a/.github/workflows/coverage.yml b/.github/workflows/coverage.yml index 44ba4445ba8bb8..3aecbe2367da05 100644 --- a/.github/workflows/coverage.yml +++ b/.github/workflows/coverage.yml @@ -51,7 +51,6 @@ jobs: # For running TensorFlow frontend unit tests python3 -m pip install -r ${{ github.workspace }}/src/frontends/tensorflow/tests/requirements.txt # For MO unit tests - python3 -m pip install -r ${{ github.workspace }}/tools/mo/requirements_mxnet.txt python3 -m pip install -r ${{ github.workspace }}/tools/mo/requirements_caffe.txt python3 -m pip install -r ${{ github.workspace }}/tools/mo/requirements_kaldi.txt python3 -m pip install -r ${{ github.workspace }}/tools/mo/requirements_onnx.txt diff --git a/.github/workflows/job_python_unit_tests.yml b/.github/workflows/job_python_unit_tests.yml index e4d4a1f1a9f75b..81cc2f4c538284 100644 --- a/.github/workflows/job_python_unit_tests.yml +++ b/.github/workflows/job_python_unit_tests.yml @@ -99,10 +99,6 @@ jobs: extras_to_install="caffe,kaldi,onnx,tensorflow2,pytorch" - if [[ "${{ runner.arch }}" != "ARM64" ]]; then - extras_to_install="mxnet,$extras_to_install" - fi - # Find and install OV dev wheel pushd ${INSTALL_DIR}/tools ov_dev_wheel_name=$(find . -name 'openvino_dev*.whl') @@ -142,8 +138,7 @@ jobs: # Skips under tickets: 133405, 122666 python3 -m pytest -s ${INSTALL_TEST_DIR}/mo/unit_tests \ - --junitxml=${INSTALL_TEST_DIR}/TEST-ModelOptimizer.xml \ - --ignore-glob="**/mo/unit_tests/mo/front/mxnet/**" + --junitxml=${INSTALL_TEST_DIR}/TEST-ModelOptimizer.xml - name: Python ONNX operators tests if: (fromJSON(inputs.affected-components).Python_API.test || diff --git a/.github/workflows/mo.yml b/.github/workflows/mo.yml index ee3e3e9fe5af77..75ce4adae9496a 100644 --- a/.github/workflows/mo.yml +++ b/.github/workflows/mo.yml @@ -46,7 +46,6 @@ jobs: # For UT pip install unittest-xml-reporting==3.0.2 # MO requirements - pip install -r requirements_mxnet.txt pip install -r requirements_caffe.txt pip install -r requirements_kaldi.txt pip install -r requirements_onnx.txt diff --git a/.github/workflows/windows_vs2019_release.yml b/.github/workflows/windows_vs2019_release.yml index 59e2fdd56de5f3..16091224690eba 100644 --- a/.github/workflows/windows_vs2019_release.yml +++ b/.github/workflows/windows_vs2019_release.yml @@ -278,7 +278,7 @@ jobs: # Find and install the dev OV wheel $ovDevWheelPath=Get-ChildItem -Path "${{ env.INSTALL_DIR }}\tools" -Filter openvino_dev*.whl | % { $_.FullName } - python3 -m pip install "$ovDevWheelPath[mxnet,caffe,kaldi,onnx,tensorflow2,pytorch]" + python3 -m pip install "$ovDevWheelPath[caffe,kaldi,onnx,tensorflow2,pytorch]" - name: Install Python API tests dependencies run: | @@ -308,7 +308,7 @@ jobs: if: fromJSON(needs.smart_ci.outputs.affected_components).MO.test shell: cmd run: | - python3 -m pytest -s ${{ env.INSTALL_TEST_DIR }}/mo/unit_tests --ignore=${{ env.INSTALL_TEST_DIR }}/mo/unit_tests/mo/front/mxnet --junitxml=${{ env.INSTALL_TEST_DIR }}/TEST-ModelOptimizer.xml + python3 -m pytest -s ${{ env.INSTALL_TEST_DIR }}/mo/unit_tests --junitxml=${{ env.INSTALL_TEST_DIR }}/TEST-ModelOptimizer.xml - name: Install Python Layer tests dependencies run: | diff --git a/docs/articles_en/documentation/legacy-features.rst b/docs/articles_en/documentation/legacy-features.rst index 5c9095e4e9e972..f859a3a4572f88 100644 --- a/docs/articles_en/documentation/legacy-features.rst +++ b/docs/articles_en/documentation/legacy-features.rst @@ -78,7 +78,7 @@ offering. Discontinued: ############# -.. dropdown:: Apache MXNet, Caffe, and Kaldi model formats +.. dropdown:: Caffe, and Kaldi model formats | *New solution:* conversion to ONNX via external tools | *Old solution:* model support discontinued with OpenVINO 2024.0 diff --git a/docs/articles_en/documentation/legacy-features/transition-legacy-conversion-api/legacy-conversion-api/[legacy]-model-optimizer-faq.rst b/docs/articles_en/documentation/legacy-features/transition-legacy-conversion-api/legacy-conversion-api/[legacy]-model-optimizer-faq.rst index 402df29fd9e7b1..f035101d715e9b 100644 --- a/docs/articles_en/documentation/legacy-features/transition-legacy-conversion-api/legacy-conversion-api/[legacy]-model-optimizer-faq.rst +++ b/docs/articles_en/documentation/legacy-features/transition-legacy-conversion-api/legacy-conversion-api/[legacy]-model-optimizer-faq.rst @@ -234,7 +234,7 @@ Q14. What does the message "Cannot infer shape for node {} because there is no C Q15. What does the message "Framework name can not be deduced from the given options. Use --framework to choose one of Caffe, TensorFlow, MXNet" mean? ###################################################################################################################################################### -**A:** You have run Model Optimizer without a flag ``--framework caffe|tf|mxnet``. Model Optimizer tries to deduce the framework by the extension of input model file (``.pb`` for TensorFlow, ``.caffemodel`` for Caffe, ``.params`` for Apache MXNet). Your input model might have a different extension and you need to explicitly set the source framework. For example, use ``--framework caffe``. +**A:** You have run Model Optimizer without a flag ``--framework caffe|tf``. Model Optimizer tries to deduce the framework by the extension of input model file (``.pb`` for TensorFlow, ``.caffemodel`` for Caffe, ``.params`` for Apache MXNet). Your input model might have a different extension and you need to explicitly set the source framework. For example, use ``--framework caffe``. .. _question-16: diff --git a/docs/dev/ov_dependencies.txt b/docs/dev/ov_dependencies.txt index f2f65836d72e76..7cf26c58a9436d 100644 --- a/docs/dev/ov_dependencies.txt +++ b/docs/dev/ov_dependencies.txt @@ -147,7 +147,6 @@ mccabe mistune mongo-python-driver more-itertools -mxnet mypy mypy-extensions networkx diff --git a/docs/dev/pypi_publish/pypi-openvino-dev.md b/docs/dev/pypi_publish/pypi-openvino-dev.md index 24f9c35b1e4831..868a7298b10a14 100644 --- a/docs/dev/pypi_publish/pypi-openvino-dev.md +++ b/docs/dev/pypi_publish/pypi-openvino-dev.md @@ -83,7 +83,6 @@ pip install openvino-dev[extras] | :-------------------------------| :------------------------------------------------------------------------------- | | caffe | [Caffe*](https://caffe.berkeleyvision.org/) | | kaldi | [Kaldi*](https://github.com/kaldi-asr/kaldi) | -| mxnet | [Apache MXNet*](https://mxnet.apache.org/) | | onnx | [ONNX*](https://github.com/microsoft/onnxruntime/) | | pytorch | [PyTorch*](https://pytorch.org/) | | tensorflow | [TensorFlow* 1.x](https://www.tensorflow.org/versions#tensorflow_1) | @@ -149,14 +148,14 @@ Users in China might encounter errors while downloading sources via PIP during O If you use zsh (Z shell) interpreter, that is the default shell for macOS starting with version 10.15 (Catalina), you may encounter the following error while installing `openvino-dev` package with extras: ```sh -pip install openvino-dev[tensorflow2,mxnet,caffe] -zsh: no matches found: openvino-dev[tensorflow2,mxnet,caffe] +pip install openvino-dev[tensorflow2,caffe] +zsh: no matches found: openvino-dev[tensorflow2,caffe] ``` By default zsh interprets square brackets as an expression for pattern matching. To resolve this issue, you need to escape the command with quotes: ```sh -pip install 'openvino-dev[tensorflow2,mxnet,caffe]' +pip install 'openvino-dev[tensorflow2,caffe]' ``` To avoid such issues you can also disable globbing for PIP commands by defining an alias in `~/.zshrc` file: diff --git a/docs/sphinx_setup/_static/download/supported_models.csv b/docs/sphinx_setup/_static/download/supported_models.csv index ac37b2813a3355..1c7db3ab6bfc9d 100644 --- a/docs/sphinx_setup/_static/download/supported_models.csv +++ b/docs/sphinx_setup/_static/download/supported_models.csv @@ -211,11 +211,6 @@ facenet,Object Detection,onnx,FP16,,,+ facenet,Object Detection,onnx,FP16-INT8,,,+ facenet-20180408-102900,Object Detection,tf,FP16,+,+,+ facenet-20180408-102900,Object Detection,tf,FP32,+,+, -face-recognition-mobilefacenet-arcface,Object Detection,mxnet,FP16,,,+ -face-recognition-mobilefacenet-arcface,Object Detection,mxnet,FP16-INT8,,,+ -face-recognition-resnet50-arcface,Object Detection,mxnet,FP16,+,+,+ -face-recognition-resnet50-aws,Object Detection,mxnet,FP16,,,+ -face-recognition-resnet50-aws,Object Detection,mxnet,FP32,,,+ face-reidentification-retail-0095,Object Detection,onnx,FP16,,,+ face-reidentification-retail-0095,Object Detection,onnx,FP16-INT8,,,+ facial-landmarks-35-adas-0002,Object Detection,caffe,FP16,,,+ @@ -428,8 +423,6 @@ ocrnet-hrnet-w18,Text Detection,paddle,FP16-INT8,+,+, ocrnet-hrnet-w18,Text Detection,paddle,FP32,+,, ocrnet-hrnet-w48,Text Detection,paddle,FP16,+,, ocrnet-hrnet-w48,Text Detection,paddle,FP32,+,, -octave-resnext-101-0.25,Image Classification,mxnet,FP16,+,+,+ -octave-resnext-101-0.25,Image Classification,mxnet,FP32,+,, openchat-3.6-8b-20240522,Large Language Model,pytorch,intel-optimum default,,+, open-closed-eye-0001,Image Classification,onnx,FP16,,,+ open-closed-eye-0001,Image Classification,onnx,FP16-INT8,,,+ @@ -629,8 +622,6 @@ squeezenet1.0-12,Image Classification,onnx,FP16,,,+ squeezenet1.0-12,Image Classification,onnx,FP32,+,+,+ squeezenet1.1-caffe,Image Classification,caffe,FP16,+,+,+ squeezenet1.1-caffe,Image Classification,caffe,FP32,+,+,+ -squeezenet1.1-mxnet,Image Classification,mxnet,FP16,+,+,+ -squeezenet1.1-mxnet,Image Classification,mxnet,FP32,+,+,+ squeezenet1.1-onnx,Image Classification,onnx,FP32,+,, srgan-onnx,"Image Processing, Enhancement",onnx,FP16,+,,+ srgan-tf,"Image Processing, Enhancement",tf,FP16,+,+, diff --git a/src/tests/test_utils/functional_test_utils/layer_tests_summary/conformance_helper_tools/conformance_vs_accuracy_comparator.py b/src/tests/test_utils/functional_test_utils/layer_tests_summary/conformance_helper_tools/conformance_vs_accuracy_comparator.py index 23c6e9c749fda6..19e6140ce54114 100644 --- a/src/tests/test_utils/functional_test_utils/layer_tests_summary/conformance_helper_tools/conformance_vs_accuracy_comparator.py +++ b/src/tests/test_utils/functional_test_utils/layer_tests_summary/conformance_helper_tools/conformance_vs_accuracy_comparator.py @@ -38,7 +38,7 @@ def get_precision(self): return self.__model_prc def path_to_model(model_path: os.path, prefix: str): - frameworks = {'tf', 'tf2', 'caffe', 'onnx', 'mxnet', 'paddle', 'kaldi'} + frameworks = {'tf', 'tf2', 'caffe', 'onnx', 'paddle', 'kaldi'} precisions = {'FP16', 'FP32', 'INT8', 'INT1'} # remove share path + model.xml model_path = model_path.replace('\n', '') diff --git a/tools/constraints.txt b/tools/constraints.txt index d7d8d84ac4cb7e..b19b18fc844de4 100644 --- a/tools/constraints.txt +++ b/tools/constraints.txt @@ -3,8 +3,6 @@ # files because the version differs between them: # tensorflow, numpy h5py>=3.1.0,<3.11.0 -mxnet~=1.2.0; sys_platform == 'win32' -mxnet>=1.7.0.post2,<=1.9.1; sys_platform != 'win32' onnx>=1.8.1,<=1.16.0 networkx<=3.1.0 pytest>=5.0,<8.4 diff --git a/tools/mo/README.md b/tools/mo/README.md index 7d1fbb64881111..4eeeb6886b2e66 100644 --- a/tools/mo/README.md +++ b/tools/mo/README.md @@ -18,10 +18,6 @@ E.g. the command below will install dependencies to support ONNX\* and TensorFlo ``` pip install openvino-dev[onnx,tensorflow2] ``` -To enable support of MxNet\* models run: -``` -pip install openvino-dev[mxnet] -``` To enable support of all frameworks: ``` pip install openvino-dev[all] diff --git a/tools/mo/automation/package_BOM.txt b/tools/mo/automation/package_BOM.txt index 2e56196c3bc515..1f09faefde1f39 100644 --- a/tools/mo/automation/package_BOM.txt +++ b/tools/mo/automation/package_BOM.txt @@ -252,111 +252,6 @@ openvino/tools/mo/front/LayerNorm.py openvino/tools/mo/front/Log1p.py openvino/tools/mo/front/MatMul_normalizer.py openvino/tools/mo/front/MoveEmbeddedInputsToInputs.py -openvino/tools/mo/front/mxnet/__init__.py -openvino/tools/mo/front/mxnet/activation.py -openvino/tools/mo/front/mxnet/adaptive_avg_pooling_ext.py -openvino/tools/mo/front/mxnet/add_input_data_to_prior_boxes.py -openvino/tools/mo/front/mxnet/arange_ext.py -openvino/tools/mo/front/mxnet/arange_like_ext.py -openvino/tools/mo/front/mxnet/arange_like_replacer.py -openvino/tools/mo/front/mxnet/arange_replacer.py -openvino/tools/mo/front/mxnet/batch_dot_ext.py -openvino/tools/mo/front/mxnet/block_grad_ext.py -openvino/tools/mo/front/mxnet/box_nms_ext.py -openvino/tools/mo/front/mxnet/cast_ext.py -openvino/tools/mo/front/mxnet/check_softmax_node_inputs.py -openvino/tools/mo/front/mxnet/clip_ext.py -openvino/tools/mo/front/mxnet/conv_ext.py -openvino/tools/mo/front/mxnet/copy_ext.py -openvino/tools/mo/front/mxnet/crop_ext.py -openvino/tools/mo/front/mxnet/cumsum.py -openvino/tools/mo/front/mxnet/cumsum_ext.py -openvino/tools/mo/front/mxnet/custom.py -openvino/tools/mo/front/mxnet/custom_rpn_proposal.py -openvino/tools/mo/front/mxnet/deformable_conv_ext.py -openvino/tools/mo/front/mxnet/deformable_psroi_pooling_ext.py -openvino/tools/mo/front/mxnet/div_sqrt_dim.py -openvino/tools/mo/front/mxnet/dropout_ext.py -openvino/tools/mo/front/mxnet/einsum_ext.py -openvino/tools/mo/front/mxnet/elementwise_ext.py -openvino/tools/mo/front/mxnet/eltwise_scalar_replacers.py -openvino/tools/mo/front/mxnet/exp_ext.py -openvino/tools/mo/front/mxnet/expand_dims_ext.py -openvino/tools/mo/front/mxnet/extractor.py -openvino/tools/mo/front/mxnet/extractors/__init__.py -openvino/tools/mo/front/mxnet/extractors/add_n.py -openvino/tools/mo/front/mxnet/extractors/batchnorm.py -openvino/tools/mo/front/mxnet/extractors/concat.py -openvino/tools/mo/front/mxnet/extractors/l2_normalization.py -openvino/tools/mo/front/mxnet/extractors/multibox_prior.py -openvino/tools/mo/front/mxnet/extractors/relu.py -openvino/tools/mo/front/mxnet/extractors/scaleshift.py -openvino/tools/mo/front/mxnet/extractors/slice_axis.py -openvino/tools/mo/front/mxnet/extractors/utils.py -openvino/tools/mo/front/mxnet/eye_ext.py -openvino/tools/mo/front/mxnet/eye_mx_to_eye.py -openvino/tools/mo/front/mxnet/fft_ext.py -openvino/tools/mo/front/mxnet/flatten_ext.py -openvino/tools/mo/front/mxnet/fully_connected_ext.py -openvino/tools/mo/front/mxnet/gather.py -openvino/tools/mo/front/mxnet/gather_ext.py -openvino/tools/mo/front/mxnet/gluoncv_ssd_anchors.py -openvino/tools/mo/front/mxnet/instance_norm_ext.py -openvino/tools/mo/front/mxnet/layer_norm_ext.py -openvino/tools/mo/front/mxnet/leaky_relu.py -openvino/tools/mo/front/mxnet/loader.py -openvino/tools/mo/front/mxnet/lrn_ext.py -openvino/tools/mo/front/mxnet/max_ext.py -openvino/tools/mo/front/mxnet/modulated_deformable_conv_ext.py -openvino/tools/mo/front/mxnet/modulated_deformable_conv_replacer.py -openvino/tools/mo/front/mxnet/multibox_detection_ext.py -openvino/tools/mo/front/mxnet/mx_reshape_reverse.py -openvino/tools/mo/front/mxnet/mx_reshape_to_reshape.py -openvino/tools/mo/front/mxnet/MXFFTToDFT.py -openvino/tools/mo/front/mxnet/MXRepeatReplacer.py -openvino/tools/mo/front/mxnet/nd_to_params.py -openvino/tools/mo/front/mxnet/null_ext.py -openvino/tools/mo/front/mxnet/pad_ext.py -openvino/tools/mo/front/mxnet/pooling_ext.py -openvino/tools/mo/front/mxnet/proposal_ext.py -openvino/tools/mo/front/mxnet/psroi_pooling_ext.py -openvino/tools/mo/front/mxnet/random_uniform_ext.py -openvino/tools/mo/front/mxnet/register_custom_ops.py -openvino/tools/mo/front/mxnet/repeat_ext.py -openvino/tools/mo/front/mxnet/reshape_ext.py -openvino/tools/mo/front/mxnet/RNN_ext.py -openvino/tools/mo/front/mxnet/rnn_param_concat.py -openvino/tools/mo/front/mxnet/roi_pooling_ext.py -openvino/tools/mo/front/mxnet/roll_ext.py -openvino/tools/mo/front/mxnet/shape_array_ext.py -openvino/tools/mo/front/mxnet/sigmoid.py -openvino/tools/mo/front/mxnet/slice_channel_ext.py -openvino/tools/mo/front/mxnet/slice_ext.py -openvino/tools/mo/front/mxnet/slice_like_ext.py -openvino/tools/mo/front/mxnet/slice_replacers.py -openvino/tools/mo/front/mxnet/softmax.py -openvino/tools/mo/front/mxnet/softmax_activation_ext.py -openvino/tools/mo/front/mxnet/softmax_ext.py -openvino/tools/mo/front/mxnet/softmax_output_ext.py -openvino/tools/mo/front/mxnet/softsign_ext.py -openvino/tools/mo/front/mxnet/squeeze_ext.py -openvino/tools/mo/front/mxnet/ssd_anchor_reshape.py -openvino/tools/mo/front/mxnet/ssd_detection_output_replacer.py -openvino/tools/mo/front/mxnet/ssd_pattern_flatten_softmax_activation.py -openvino/tools/mo/front/mxnet/ssd_pattern_remove_flatten.py -openvino/tools/mo/front/mxnet/ssd_pattern_remove_reshape.py -openvino/tools/mo/front/mxnet/ssd_pattern_remove_transpose.py -openvino/tools/mo/front/mxnet/ssd_reorder_detection_out_inputs.py -openvino/tools/mo/front/mxnet/stack_ext.py -openvino/tools/mo/front/mxnet/swapaxis_ext.py -openvino/tools/mo/front/mxnet/take_ext.py -openvino/tools/mo/front/mxnet/tile_ext.py -openvino/tools/mo/front/mxnet/tile_replacer.py -openvino/tools/mo/front/mxnet/transpose_ext.py -openvino/tools/mo/front/mxnet/up_sampling_ext.py -openvino/tools/mo/front/mxnet/where_ext.py -openvino/tools/mo/front/mxnet/yolo_v3_mobilenet1_voc.json -openvino/tools/mo/front/mxnet/zeros_ext.py openvino/tools/mo/front/no_op_eraser.py openvino/tools/mo/front/non_max_suppression_normalize.py openvino/tools/mo/front/OneHotDepthNormalizer.py @@ -692,8 +587,6 @@ openvino/tools/mo/load/caffe/loader.py openvino/tools/mo/load/kaldi/__init__.py openvino/tools/mo/load/kaldi/loader.py openvino/tools/mo/load/loader.py -openvino/tools/mo/load/mxnet/__init__.py -openvino/tools/mo/load/mxnet/loader.py openvino/tools/mo/load/onnx/__init__.py openvino/tools/mo/load/onnx/loader.py openvino/tools/mo/load/tf/__init__.py @@ -701,7 +594,6 @@ openvino/tools/mo/load/tf/loader.py openvino/tools/mo/main.py openvino/tools/mo/main_caffe.py openvino/tools/mo/main_kaldi.py -openvino/tools/mo/main_mxnet.py openvino/tools/mo/main_onnx.py openvino/tools/mo/main_paddle.py openvino/tools/mo/main_tf.py @@ -827,7 +719,6 @@ openvino/tools/mo/middle/UselessSplitEraser.py openvino/tools/mo/mo.py openvino/tools/mo/mo_caffe.py openvino/tools/mo/mo_kaldi.py -openvino/tools/mo/mo_mxnet.py openvino/tools/mo/mo_onnx.py openvino/tools/mo/mo_paddle.py openvino/tools/mo/mo_tf.py @@ -845,7 +736,6 @@ openvino/tools/mo/ops/__init__.py openvino/tools/mo/ops/activation.py openvino/tools/mo/ops/activation_ops.py openvino/tools/mo/ops/adaptive_avg_pooling.py -openvino/tools/mo/ops/arange_like.py openvino/tools/mo/ops/argmax.py openvino/tools/mo/ops/argmin.py openvino/tools/mo/ops/assert_op.py @@ -882,7 +772,6 @@ openvino/tools/mo/ops/dequantize_linear.py openvino/tools/mo/ops/detection_output_onnx.py openvino/tools/mo/ops/DetectionOutput.py openvino/tools/mo/ops/dft.py -openvino/tools/mo/ops/div_sqrt_dim.py openvino/tools/mo/ops/dropoutmask.py openvino/tools/mo/ops/einsum.py openvino/tools/mo/ops/elementwise.py @@ -932,7 +821,6 @@ openvino/tools/mo/ops/multinomial.py openvino/tools/mo/ops/mvn.py openvino/tools/mo/ops/mxfft.py openvino/tools/mo/ops/mxrepeat.py -openvino/tools/mo/ops/mxreshape.py openvino/tools/mo/ops/NextIteration.py openvino/tools/mo/ops/nms_rotated.py openvino/tools/mo/ops/non_max_suppression.py diff --git a/tools/mo/openvino/tools/mo/analysis/inputs.py b/tools/mo/openvino/tools/mo/analysis/inputs.py index eb797359e2bf8d..2b541ee6bea3ca 100644 --- a/tools/mo/openvino/tools/mo/analysis/inputs.py +++ b/tools/mo/openvino/tools/mo/analysis/inputs.py @@ -37,21 +37,6 @@ def fifo_queue_analysis(cls, graph: Graph, inputs_desc: dict): 'data_type': fifo_queue.types[port_ind]} return inputs_to_ignore - @classmethod - def ignore_mxnet_softmax_inputs(cls, graph: Graph): - """ - MxNet Softmax layers may have additional inputs which should be ignored. Refer to the - openvino/tools/mo/front/mxnet/check_softmax_node_inputs.py. - """ - inputs_to_ignore = set() - softmax_nodes = [] - [softmax_nodes.extend(graph.get_op_nodes(op=op)) for op in ('SoftMax', 'SoftmaxActivation', 'SoftmaxOutput')] - for softmax_node in softmax_nodes: - for i in range(1, len(softmax_node.in_nodes())): - if softmax_node.in_node(i).has_valid('op') and softmax_node.in_node(i).op == 'Parameter': - inputs_to_ignore.add(softmax_node.in_node(i).id) - return inputs_to_ignore - @classmethod def iterator_get_next_analysis(cls, graph: Graph, inputs_desc: dict): message = None @@ -80,9 +65,6 @@ def analyze(self, graph: Graph): inputs_desc = dict() message = InputsAnalysis.iterator_get_next_analysis(graph, inputs_desc) inputs_to_ignore = InputsAnalysis.fifo_queue_analysis(graph, inputs_desc) - if graph.graph['fw'] == 'mxnet': - inputs_to_ignore.update(InputsAnalysis.ignore_mxnet_softmax_inputs(graph)) - inputs = graph.get_op_nodes(op='Parameter') for input in inputs: inputs_desc[input.name] = {'shape': input.soft_get('shape', None), diff --git a/tools/mo/openvino/tools/mo/convert.py b/tools/mo/openvino/tools/mo/convert.py index 56a862be4aef12..d9bad0d11b1ae4 100644 --- a/tools/mo/openvino/tools/mo/convert.py +++ b/tools/mo/openvino/tools/mo/convert.py @@ -59,14 +59,6 @@ def convert_model( tensorboard_logdir: [str, pathlib.Path] = None, tensorflow_custom_layer_libraries: [str, pathlib.Path] = None, - # MXNet-specific parameters: - input_symbol: [str, pathlib.Path] = None, - nd_prefix_name: str = None, - pretrained_model_name: str = None, - save_params_from_nd: bool = None, - legacy_mxnet_model: bool = None, - enable_ssd_gluoncv: bool = False, - # Caffe*-specific parameters: input_proto: [str, pathlib.Path] = None, caffe_parser_path: [str, pathlib.Path] = None, @@ -129,7 +121,7 @@ def convert_model( a string or list of strings of the following format. Quoted list of comma-separated input nodes names with shapes, data types, and values for freezing. If operation names are specified, the order of inputs in converted - model will be the same as order of specified operation names (applicable for TF2, ONNX, MxNet). + model will be the same as order of specified operation names (applicable for TF2, ONNX). The shape and value are specified as comma-separated lists. The data type of input node is specified in braces and can have one of the values: f64 (float64), f32 (float32), f16 (float16), i64 (int64), i32 (int32), u8 (uint8), boolean (bool). Data type is optional. @@ -308,24 +300,6 @@ def convert_model( TensorFlow*: comma separated list of shared libraries with TensorFlow* custom operations implementation. - MXNet-specific parameters: - :param input_symbol: - Symbol file (for example, model-symbol.json) that contains a topology - structure and layer attributes - :param nd_prefix_name: - Prefix name for args.nd and argx.nd files. - :param pretrained_model_name: - Name of a pretrained MXNet model without extension and epoch number. - This model will be merged with args.nd and argx.nd files - :param save_params_from_nd: - Enable saving built parameters file from .nd files - :param legacy_mxnet_model: - Enable MXNet loader to make a model compatible with the latest MXNet - version. Use only if your model was trained with MXNet version lower - than 1.0.0 - :param enable_ssd_gluoncv: - Enable pattern matchers replacers for converting gluoncv ssd topologies. - Caffe*-specific parameters: :param input_proto: Deploy-ready prototxt file that contains a topology structure and diff --git a/tools/mo/openvino/tools/mo/convert_impl.py b/tools/mo/openvino/tools/mo/convert_impl.py index 43ac89be8c1fa8..ae80e6a33064f5 100644 --- a/tools/mo/openvino/tools/mo/convert_impl.py +++ b/tools/mo/openvino/tools/mo/convert_impl.py @@ -34,7 +34,7 @@ from openvino.tools.mo.utils.cli_parser import check_available_transforms, \ get_advanced_cli_options, get_available_front_ends, get_caffe_cli_options, \ get_common_cli_options, get_freeze_placeholder_values, get_kaldi_cli_options, get_layout_values, \ - get_mean_scale_dictionary, get_mxnet_cli_options, get_onnx_cli_options, \ + get_mean_scale_dictionary, get_onnx_cli_options, \ get_placeholder_shapes, get_tf_cli_options, parse_transform, parse_tuple_pairs, \ get_model_name_from_args, depersonalize, get_mo_convert_params, input_to_input_cut_info, \ input_shape_to_input_cut_info, freeze_placeholder_to_input_cut_info @@ -67,7 +67,7 @@ tf_frontend_with_python_bindings_installed = False -def load_extensions(argv: argparse.Namespace, is_tf: bool, is_caffe: bool, is_mxnet: bool, is_kaldi: bool, +def load_extensions(argv: argparse.Namespace, is_tf: bool, is_caffe: bool, is_kaldi: bool, is_onnx: bool): extensions = None if hasattr(argv, 'extensions') and argv.extensions and argv.extensions != '': @@ -79,10 +79,6 @@ def load_extensions(argv: argparse.Namespace, is_tf: bool, is_caffe: bool, is_mx send_framework_info('caffe') from openvino.tools.mo.front.caffe.register_custom_ops import get_front_classes import_extensions.load_dirs(argv.framework, extensions, get_front_classes) - elif is_mxnet: - send_framework_info('mxnet') - from openvino.tools.mo.front.mxnet.register_custom_ops import get_front_classes - import_extensions.load_dirs(argv.framework, extensions, get_front_classes) elif is_kaldi: send_framework_info('kaldi') from openvino.tools.mo.front.kaldi.register_custom_ops import get_front_classes @@ -100,7 +96,7 @@ def replace_ext(name: str, old: str, new: str): return base + new -def print_argv(argv: argparse.Namespace, is_caffe: bool, is_tf: bool, is_mxnet: bool, is_kaldi: bool, is_onnx: bool, +def print_argv(argv: argparse.Namespace, is_caffe: bool, is_tf: bool, is_kaldi: bool, is_onnx: bool, model_name: str): print('Model Optimizer arguments:') props = OrderedDict() @@ -110,8 +106,6 @@ def print_argv(argv: argparse.Namespace, is_caffe: bool, is_tf: bool, is_mxnet: props['caffe_args'] = get_caffe_cli_options() if is_tf: props['tf_args'] = get_tf_cli_options() - if is_mxnet: - props['mxnet_args'] = get_mxnet_cli_options() if is_kaldi: props['kaldi_args'] = get_kaldi_cli_options() if is_onnx: @@ -122,7 +116,6 @@ def print_argv(argv: argparse.Namespace, is_caffe: bool, is_tf: bool, is_mxnet: 'advanced_args': 'Advanced parameters:', 'caffe_args': 'Caffe specific parameters:', 'tf_args': 'TensorFlow specific parameters:', - 'mxnet_args': 'MXNet specific parameters:', 'kaldi_args': 'Kaldi specific parameters:', 'onnx_args': 'ONNX specific parameters:', } @@ -161,17 +154,17 @@ def arguments_post_parsing(argv: argparse.Namespace): 'Please ensure that your environment contains new frontend for the input model format or ' 'try to convert the model without specifying --use_new_frontend option.') - is_tf, is_caffe, is_mxnet, is_kaldi, is_onnx = \ - deduce_legacy_frontend_by_namespace(argv) if not moc_front_end else [False, False, False, False, False] + is_tf, is_caffe, is_kaldi, is_onnx = \ + deduce_legacy_frontend_by_namespace(argv) if not moc_front_end else [False, False, False, False] - is_legacy_frontend = any([is_tf, is_caffe, is_mxnet, is_kaldi, is_onnx]) + is_legacy_frontend = any([is_tf, is_caffe, is_kaldi, is_onnx]) if not is_legacy_frontend and use_legacy_frontend: raise Error('Option --use_legacy_frontend is specified but Model Optimizer does not have legacy frontend ' 'for the input model format. Please try to convert the model without specifying --use_legacy_frontend option.') # handle a default case, i.e. use_new_frontend and use_legacy_frontend are not specified, when no frontend is found if not is_legacy_frontend and not moc_front_end: - legacy_frameworks = ['tf', 'caffe', 'mxnet', 'kaldi', 'onnx'] + legacy_frameworks = ['tf', 'caffe', 'kaldi', 'onnx'] frameworks = list(set(legacy_frameworks + available_moc_front_ends)) if not argv.framework: raise Error('Framework name can not be deduced from the given options: {}={}. ' @@ -191,9 +184,6 @@ def arguments_post_parsing(argv: argparse.Namespace): if is_tf and not argv.input_model and not argv.saved_model_dir and not argv.input_meta_graph: raise Error('Path to input model or saved model dir is required: use --input_model, --saved_model_dir or ' '--input_meta_graph') - elif is_mxnet and not argv.input_model and not argv.input_symbol and not argv.pretrained_model_name: - raise Error('Path to input model or input symbol or pretrained_model_name is required: use --input_model or ' - '--input_symbol or --pretrained_model_name') elif is_caffe and not argv.input_model and not argv.input_proto: raise Error('Path to input model or input proto is required: use --input_model or --input_proto') elif (is_kaldi or is_onnx) and not argv.input_model: @@ -216,7 +206,7 @@ def arguments_post_parsing(argv: argparse.Namespace): log.info('Deduced name for prototxt: {}'.format(argv.input_proto)) if not argv.silent: - print_argv(argv, is_caffe, is_tf, is_mxnet, is_kaldi, is_onnx, argv.model_name) + print_argv(argv, is_caffe, is_tf, is_kaldi, is_onnx, argv.model_name) VersionChecker().check_runtime_dependencies(argv.silent) @@ -274,7 +264,7 @@ def arguments_post_parsing(argv: argparse.Namespace): log.debug("Placeholder shapes : {}".format(argv.placeholder_shapes)) - load_extensions(argv, is_tf, is_caffe, is_mxnet, is_kaldi, is_onnx) + load_extensions(argv, is_tf, is_caffe, is_kaldi, is_onnx) return argv @@ -372,7 +362,7 @@ def get_moc_frontends(argv: argparse.Namespace): def prepare_ir(argv: argparse.Namespace): # TODO: remove this workaround once new TensorFlow frontend supports non-frozen formats: checkpoint, MetaGraph, and SavedModel # Now it converts all TensorFlow formats to the frozen .pb format in case new TensorFlow frontend - is_tf, _, _, _, _ = deduce_legacy_frontend_by_namespace(argv) + is_tf, _, _, _ = deduce_legacy_frontend_by_namespace(argv) argv = arguments_post_parsing(argv) t = tm.Telemetry() @@ -808,7 +798,7 @@ def framework_is_tf(args, argv): if input_model_is_object(args) and check_model_object(args) == "tf": return True if argv is not None: - is_tf, _, _, _, _ = deduce_legacy_frontend_by_namespace(argv) + is_tf, _, _, _ = deduce_legacy_frontend_by_namespace(argv) return is_tf return False @@ -860,7 +850,7 @@ def _convert(cli_parser: argparse.ArgumentParser, framework, args, python_api_us argv.is_python_api_used = python_api_used argv.feManager = FrontEndManager() - frameworks = list(set(['tf', 'caffe', 'mxnet', 'kaldi', 'onnx'] + (get_available_front_ends(argv.feManager) + frameworks = list(set(['tf', 'caffe', 'kaldi', 'onnx'] + (get_available_front_ends(argv.feManager) if argv.feManager else []))) framework = argv.framework if hasattr(argv, 'framework') and argv.framework is not None else framework if framework is not None: @@ -907,7 +897,7 @@ def _convert(cli_parser: argparse.ArgumentParser, framework, args, python_api_us print(get_compression_message()) ov_update_message = get_ov_update_message() - _, is_caffe, is_mxnet, is_kaldi, _ = deduce_legacy_frontend_by_namespace(argv) + _, is_caffe, is_kaldi, _ = deduce_legacy_frontend_by_namespace(argv) if ov_update_message is not None: print(ov_update_message) diff --git a/tools/mo/openvino/tools/mo/front/MatMul_normalizer.py b/tools/mo/openvino/tools/mo/front/MatMul_normalizer.py index f9866e024c0abe..f21350418a8593 100644 --- a/tools/mo/openvino/tools/mo/front/MatMul_normalizer.py +++ b/tools/mo/openvino/tools/mo/front/MatMul_normalizer.py @@ -55,18 +55,12 @@ def replace_sub_graph(self, graph: Graph, match: [dict, SubgraphMatch]): node.insert_op_on_input_port(in_port_idx=1, new_op_class=Transpose, new_op_attrs={'name': name + '/weights_transpose'}, value=int64_array([1, 0])) - # input normalization for 4D Caffe and MXNet FullyConnected + # input normalization for 4D Caffe FullyConnected if graph.graph['fw'] == 'caffe': node.insert_op_on_input_port(in_port_idx=0, new_op_class=Reshape, new_op_attrs={'name': name + '/flatten_fc_input', 'special_zero': True}, value=int64_array([0, -1])) - if graph.graph['fw'] == 'mxnet': - if node.flatten is not False: - node.insert_op_on_input_port(in_port_idx=0, new_op_class=Reshape, - new_op_attrs={'name': name + '/flatten_fc_input', 'special_zero': True}, - value=int64_array([0, -1])) - MatMul.update_node_stat(node, {}) diff --git a/tools/mo/openvino/tools/mo/front/extractor.py b/tools/mo/openvino/tools/mo/front/extractor.py index a7b0f31b31d51b..35f5a591496352 100644 --- a/tools/mo/openvino/tools/mo/front/extractor.py +++ b/tools/mo/openvino/tools/mo/front/extractor.py @@ -1179,37 +1179,9 @@ def class_type(cls): return class_registration.ClassType.EXTRACTOR -class MXNetCustomFrontExtractorOp(object): - """ - A super class for custom mxnet operation extractor. - Do additional extraction of custom MXNet operation attributes without modifying the graph topology. - Useful for custom layers that maps to a single FW operation to re-use of FW shape inference. - In contrast to FrontReplacement* classes, this class doesn't modify graph topology and - doesn't completely override node attributes. So it is safe to preserve the original - MO inference function (which can use FW fallback mechanism). - - It is needed to keep the list of extractors for particularly custom layers. - - When actual extraction happens, Model Optimizer first finds the match by type, which is CustomFrontExtractorOp. - It in turns looks up the MXNetCustomFrontExtractorOp for the needed layer extractor not by type, but by op_type. - - - A sub-class should implement one of extract methods: - def extract(self, node): - return (, { }) - """ - - registered_ops = {} - registered_cls = [] - - @classmethod - def class_type(cls): - return class_registration.ClassType.EXTRACTOR - - class CaffePythonFrontExtractorOp: """ - A super class for custom mxnet operation extractor. + A super class for custom caffe operation extractor. Do additional extraction of Python Caffe operation attributes without modifying the graph topology. Useful for Python layers that maps to a single FW operation to re-use of FW shape inference. In contrast to FrontReplacement* classes, this class doesn't modify graph topology and diff --git a/tools/mo/openvino/tools/mo/front/mxnet/MXFFTToDFT.py b/tools/mo/openvino/tools/mo/front/mxnet/MXFFTToDFT.py deleted file mode 100644 index db943bc962ca78..00000000000000 --- a/tools/mo/openvino/tools/mo/front/mxnet/MXFFTToDFT.py +++ /dev/null @@ -1,140 +0,0 @@ -# Copyright (C) 2018-2024 Intel Corporation -# SPDX-License-Identifier: Apache-2.0 - -import numpy as np - -from openvino.tools.mo.ops.dft import DFT, IDFT -from openvino.tools.mo.ops.elementwise import Add, Sub -from openvino.tools.mo.ops.rank import Rank -from openvino.tools.mo.ops.scatter import ScatterUpdate -from openvino.tools.mo.ops.split import Split -from openvino.tools.mo.front.common.partial_infer.utils import int64_array -from openvino.tools.mo.front.tf.graph_utils import create_op_with_const_inputs -from openvino.tools.mo.graph.graph import Graph, Node, rename_nodes -from openvino.tools.mo.front.common.replacement import FrontReplacementSubgraph -from openvino.tools.mo.ops.broadcast import Broadcast -from openvino.tools.mo.ops.concat import Concat -from openvino.tools.mo.ops.pad import Pad -from openvino.tools.mo.ops.reshape import Reshape -from openvino.tools.mo.ops.squeeze import Squeeze -from openvino.tools.mo.ops.unsqueeze import Unsqueeze - - -class MXFFTToDFT(FrontReplacementSubgraph): - """ - This transformation converts the operation MXFFT into OpenVINO DFT (if the attribute 'is_inverse' is False), - or into OpenVINO IDFT (otherwise). - - According to https://mxnet.apache.org/versions/1.0.0/api/python/symbol/contrib.html#mxnet.symbol.contrib.fft, - MxNet operation FFT accept 2 input data shapes: [N, d] or [N_1, N_2, N_3, d], data can only be real numbers. - The output data has shape: [N, 2*d] or [N_1, N_2, N_3, 2*d]. The format is: [real0, imag0, real1, imag1, ...]. - - Next, MxNet operation IFFT accept 2 input data shapes: [N, d] or [N_1, N_2, N_3, d]. Data is in format: - [real0, imag0, real1, imag1, ...]. Last dimension must be an even number. The output data has shape: [N, d/2] or - [N_1, N_2, N_3, d/2]. It is only the real part of the result. - - But OpenVINO DFT and IDFT operations uses complex input data represented as real tensors of the shape - [N_1, ..., N_r, 2]. Also, the result of OpenVINO DFT and IDFT operations is always complex but represented as - a real tensor of the shape [M_1, ..., M_r, 2]. If OpenVINO DFT or IDFT have no input signal_size, the output shape - and the input shape are the same. - - Hence, to convert MxNet FFT to OpenVINO DFT, we need - 1) to convert input data from the shape [N, d] or [N_1, N_2, N_3, d] to shape [N, d, 1] or [N_1, N_2, N_3, d, 1] - respectively; - 2) to pad converted data using pads_begin = [0, 0, 0] and pads_end = [0, 0, 1] for MxNet FFT input shape [N, d], or - using pads_begin [0, 0, 0, 0, 0] and pads_end = [0, 0, 0, 0, 1] for MxNet FFT input shape [N_1, N_2, N_3, d], - with mode=constant; - 3) to put padded data into DFT input 0, using (-1) in 'axes' input; - 4) to reshape calculated DFT output to the shape [N, 2 * d] for for MxNet FFT input shape [N, d], or to the shape - [N_1, N_2, N_3, 2 * d] - - Finally, to convert MxNet IFFT to OpenVINO IDFT, we need - 1) to reshape input data from the shape [N, d] or [N_1, N_2, N_3, d] to shape [N, d // 2, 2] or - [N_1, N_2, N_3, d // 2, 2] respectively; - 2) to put reshaped input data to the input 0 of IDFT, using (-1) in 'axes' input; - 3) to get real parts using Split + Squeeze. - """ - enabled = True - - def find_and_replace_pattern(self, graph: Graph): - for mx_fft in graph.get_op_nodes(op='MXFFT'): - if mx_fft.soft_get('is_inverse', False): - self.convert_ifft_to_dft(graph, mx_fft) - else: - self.convert_fft_to_dft(graph, mx_fft) - - def convert_fft_to_dft(self, graph: Graph, mx_fft: Node): - mx_fft_name = mx_fft.soft_get('name', mx_fft.id) - unsqueeze_node = create_op_with_const_inputs(graph, Unsqueeze, {1: int64_array([-1])}, - {'name': mx_fft_name + '/Unsqueeze'}) - rank_node = Rank(graph, {'name': mx_fft_name + '/Rank'}).create_node() - - mx_fft_connection = mx_fft.in_port(0).get_connection() - mx_fft_connection.set_destination(unsqueeze_node.in_port(0)) - mx_fft_connection.get_source().connect(rank_node.in_port(0)) - - add_node = create_op_with_const_inputs(graph, Add, {1: int64_array(1)}, - {'name': mx_fft_name + '/Add'}, rank_node) - broadcast_node1 = create_op_with_const_inputs(graph, Broadcast, {0: int64_array(0)}, - {'name': mx_fft_name + '/Pad_broadcast'}) - add_node.out_port(0).connect(broadcast_node1.in_port(1)) - - scatter_node = create_op_with_const_inputs(graph, ScatterUpdate, - {2: int64_array(1), 3: int64_array(0)}, - {'name': mx_fft_name + '/ScatterUpdate'}) - broadcast_node1.out_port(0).connect(scatter_node.in_port(0)) - rank_node.out_port(0).connect(scatter_node.in_port(1)) - - pad_node = Pad(graph, {'name': mx_fft_name + '/Pad', 'mode': 'constant'}).create_node([unsqueeze_node, - broadcast_node1, - scatter_node]) - - dft_node = create_op_with_const_inputs(graph, DFT, {1: int64_array([-1])}, - {'name': mx_fft_name + '/DFT', 'in_ports_count': 2}, - pad_node) - - sub_node = create_op_with_const_inputs(graph, Sub, {1: int64_array(1)}, {'name': mx_fft_name + '/Sub'}) - rank_node.out_port(0).connect(sub_node.in_port(0)) - broadcast_node2 = create_op_with_const_inputs(graph, Broadcast, {0: int64_array(0)}, - {'name': mx_fft_name + '/Reshape_broadcast'}) - sub_node.out_port(0).connect(broadcast_node2.in_port(1)) - concat_node = create_op_with_const_inputs(graph, Concat, {1: int64_array([-1, 2])}, - {'name': mx_fft_name + '/New_shape', 'in_ports_count': 2, 'axis': 0}, - broadcast_node2) - - reshape_node = Reshape(graph, {}).create_node([dft_node, concat_node]) - - mx_fft.out_port(0).get_connection().set_source(reshape_node.out_port(0)) - rename_nodes([(mx_fft, mx_fft_name + '/to_be_removed'), (reshape_node, mx_fft_name)]) - - def convert_ifft_to_dft(self, graph: Graph, mx_fft: Node): - mx_fft_name = mx_fft.soft_get('name', mx_fft.id) - - rank_node = Rank(graph, {'name': mx_fft_name + '/rank'}).create_node() - sub_node = create_op_with_const_inputs(graph, Sub, {1: int64_array(1)}, {'name': mx_fft_name + '/Sub'}) - rank_node.out_port(0).connect(sub_node.in_port(0)) - broadcast_node0 = create_op_with_const_inputs(graph, Broadcast, {0: int64_array(0)}, - {'name': mx_fft_name + '/broadcast'}) - sub_node.out_port(0).connect(broadcast_node0.in_port(1)) - concat_node = create_op_with_const_inputs(graph, Concat, {1: int64_array([-1, 2])}, - {'name': mx_fft_name + '/new_shape', 'in_ports_count': 2, 'axis': 0}, - broadcast_node0) - - reshape_node = Reshape(graph, {'name': mx_fft_name + '/reshape'}).create_node() - concat_node.out_port(0).connect(reshape_node.in_port(1)) - - mx_fft_connection = mx_fft.in_port(0).get_connection() - mx_fft_connection.set_destination(reshape_node.in_port(0)) - mx_fft_connection.get_source().connect(rank_node.in_port(0)) - - dft_node = create_op_with_const_inputs(graph, IDFT, {1: int64_array([-1])}, - {'name': mx_fft_name + '/idft', 'in_ports_count': 2}, - reshape_node) - - split_node = create_op_with_const_inputs(graph, Split, {1: int64_array(-1)}, - {'name': mx_fft_name + '/split', 'num_splits': 2}, - dft_node) - squeeze_node = create_op_with_const_inputs(graph, Squeeze, {1: int64_array([-1])}, {}, split_node) - - mx_fft.out_port(0).get_connection().set_source(squeeze_node.out_port(0)) - rename_nodes([(mx_fft, mx_fft_name + '/to_be_removed'), (squeeze_node, mx_fft_name)]) diff --git a/tools/mo/openvino/tools/mo/front/mxnet/MXRepeatReplacer.py b/tools/mo/openvino/tools/mo/front/mxnet/MXRepeatReplacer.py deleted file mode 100644 index a4a117230bb164..00000000000000 --- a/tools/mo/openvino/tools/mo/front/mxnet/MXRepeatReplacer.py +++ /dev/null @@ -1,115 +0,0 @@ -# Copyright (C) 2018-2024 Intel Corporation -# SPDX-License-Identifier: Apache-2.0 - -from openvino.tools.mo.front.rank_decomposer import RankDecomposer -from openvino.tools.mo.ops.elementwise import Add, Sub, Mul -from openvino.tools.mo.ops.gather import Gather -from openvino.tools.mo.ops.rank import Rank -from openvino.tools.mo.front.common.partial_infer.utils import int64_array -from openvino.tools.mo.front.common.replacement import FrontReplacementPattern -from openvino.tools.mo.front.tf.graph_utils import create_op_node_with_second_input, create_op_with_const_inputs -from openvino.tools.mo.graph.graph import Graph, Node, rename_node -from openvino.tools.mo.ops.broadcast import Broadcast -from openvino.tools.mo.ops.const import Const -from openvino.tools.mo.ops.reshape import Reshape -from openvino.tools.mo.ops.shape import Shape -from openvino.tools.mo.ops.tile import Tile -from openvino.tools.mo.ops.unsqueeze import Unsqueeze -from openvino.tools.mo.utils.shape import get_canonical_axis_index_node, new_shape_node_from_shape_nodes, \ - get_shape_values_by_range_idxs - - -class MXRepeatReplacer(FrontReplacementPattern): - """ - The transformation converts MXRepeat operation to Unsqueeze -> Tile -> Reshape. - """ - - enabled = True - force_clean_up = True - - def run_before(self): - return [RankDecomposer] - - def find_and_replace_pattern(self, graph: Graph): - for mxrepeat in graph.get_op_nodes(op='MXRepeat'): - self.mxrepeat_decomposition(mxrepeat) - - @staticmethod - def mxrepeat_decomposition(node: Node): - graph = node.graph - name = node.soft_get('name', node.id) - - rename_node(node, name + '/to_be_removed') - - # Unqueeze - input_rank = Rank(graph, {'name': name + '/Rank'}).create_node() - node.in_port(0).get_source().connect(input_rank.in_port(0)) - - axis = get_canonical_axis_index_node(input_rank, node.axis) - unsqueeze_axis = create_op_node_with_second_input( - graph, Add, int64_array([1]), {'name': name + '/Unsqueeze/Axis'}, input_node=axis) - - unsqueeze = Unsqueeze(graph, {'name': name + '/Unsqueeze'}).create_node() - unsqueeze.in_port(1).connect(unsqueeze_axis.out_port(0)) - - # Tile (1, 1, ..., repeats, ..., 1) - # we generate tile array according to the following table: - - # parts: | first | repeats | second | - # i: | 0, 1, ..., axis,| axis + 1,| ..., rank+1 | - # tile_array: | 1, 1, ..., 1 ,| repeats ,| ..., 1 | - - one = Const(graph, {'name': name + '/Broadcast/One', 'value': int64_array([1])}).create_node() - first_ones = Broadcast(graph, {'name': name + '/Broadcast/Ones_first_part'}).create_node() - first_ones.in_port(0).connect(one.out_port(0)) - first_ones.in_port(1).connect(unsqueeze_axis.out_port(0)) - - repeats = Const(graph, {'name': name + '/repeats', 'value': int64_array([node.repeats])}).create_node() - - second_ones = Broadcast(graph, {'name': name + '/Broadcast/Ones_second_part'}).create_node() - second_part_broadcast_shape = Sub(graph, {'name': name + '/Broadcast/Shape/second_part'}).create_node() - second_part_broadcast_shape.in_port(0).connect(input_rank.out_port(0)) - second_part_broadcast_shape.in_port(1).connect(unsqueeze_axis.out_port(0)) - second_ones.in_port(0).connect(one.out_port(0)) - second_ones.in_port(1).connect(second_part_broadcast_shape.out_port(0)) - - tile_repeats = new_shape_node_from_shape_nodes([first_ones, repeats, second_ones]) - tile = Tile(graph, {'name': name + '/Tile'}).create_node() - tile.in_port(1).connect(tile_repeats.out_port(0)) - - # Reshape (input_shape[:axis], input_shape[axis] * repeats, input_shape[axis+1:]) - # we generate reshape dim array according to the following table: - - # parts: | first | rep | second | - # i: | 0, 1, ... ,| axis, | ..., rank | - # dim_array: | inp_sh[i] ,| input_shape[axis] * repeats ,| inp_sh[i] | - - input_shape = Shape(graph, {'name': name + '/Shape'}).create_node() - node.in_port(0).get_source().connect(input_shape.in_port(0)) - - first_input_shape_part = get_shape_values_by_range_idxs( - input_shape, input_rank, begin=0, end=node.axis, include_begin=True, include_end=False) - - original_axis_dim = create_op_with_const_inputs( - graph, Gather, {2: int64_array(0)}, {'name': name + '/OriginalDim'}, input_node=input_shape) - original_axis_dim.in_port(1).connect(axis.out_port(0)) - - repeated_dimention = Mul(graph, {'name': name + '/RepeatedDim'}).create_node() - repeated_dimention.in_port(0).connect(original_axis_dim.out_port(0)) - repeated_dimention.in_port(1).connect(repeats.out_port(0)) - - second_input_shape_part = get_shape_values_by_range_idxs( - input_shape, input_rank, begin=node.axis, end=-1, include_begin=False, include_end=True) - - output_shape = new_shape_node_from_shape_nodes( - [first_input_shape_part, repeated_dimention, second_input_shape_part]) - - reshape = Reshape(graph, {'name': name}).create_node() - rename_node(reshape, name) - reshape.in_port(1).connect(output_shape.out_port(0)) - - # Final connections - node.in_port(0).get_connection().set_destination(unsqueeze.in_port(0)) - tile.in_port(0).connect(unsqueeze.out_port(0)) - reshape.in_port(0).connect(tile.out_port(0)) - node.out_port(0).get_connection().set_source(reshape.out_port(0)) diff --git a/tools/mo/openvino/tools/mo/front/mxnet/RNN_ext.py b/tools/mo/openvino/tools/mo/front/mxnet/RNN_ext.py deleted file mode 100644 index 26606ab4056647..00000000000000 --- a/tools/mo/openvino/tools/mo/front/mxnet/RNN_ext.py +++ /dev/null @@ -1,60 +0,0 @@ -# Copyright (C) 2018-2024 Intel Corporation -# SPDX-License-Identifier: Apache-2.0 - -from openvino.tools.mo.ops.GRU import GRU -from openvino.tools.mo.ops.LSTM import LSTM -from openvino.tools.mo.ops.RNN import RNN -from openvino.tools.mo.front.extractor import FrontExtractorOp -from openvino.tools.mo.front.mxnet.extractors.utils import get_mxnet_layer_attrs -from openvino.tools.mo.utils.error import Error -from openvino.tools.mo.utils.utils import refer_to_faq_msg - - -class RNNFrontExtractor(FrontExtractorOp): - op = 'RNN' - enabled = True - - @classmethod - def extract(cls, node): - attrs = get_mxnet_layer_attrs(node.symbol_dict) - mode = attrs.str('mode', None) - state_size = attrs.int('state_size', None) - bidirectional = attrs.bool('bidirectional', False) - num_layers = attrs.int('num_layers', 1) - layout = attrs.str('layout', 'TNC') # in MXNet RNN by default take data in - # format [seq_len, batch_size, inp_size] - - node_attrs = { - 'batch_dim': layout.index('N'), - 'sequence_dim': layout.index('T'), - 'blobs_wrb': False, - 'hidden_size': state_size, - 'has_num_directions': bidirectional, - 'direction': 'bidirectional' if bidirectional else 'forward', - 'num_layers': num_layers, - 'format': 'mxnet', - 'multilayers': num_layers != 1, - 'gate_order': None, - } - - if mode == 'rnn_tanh': - node_attrs['gate_order'] = [0] - node_attrs['activations'] = ['tanh'] if not bidirectional else ['tanh', 'tanh'] - RNN.update_node_stat(node, node_attrs) - elif mode == 'rnn_relu': - node_attrs['gate_order'] = [0] - node_attrs['activations'] = ['relu'] if not bidirectional else ['relu', 'relu'] - RNN.update_node_stat(node, node_attrs) - elif mode == 'gru': - node_attrs['gate_order'] = [1, 0, 2] - node_attrs['linear_before_reset'] = 1 - GRU.update_node_stat(node, node_attrs) - elif mode == 'lstm': - node_attrs['gate_order'] = [1, 0, 2, 3] - LSTM.update_node_stat(node, node_attrs) - else: - raise Error( - "Operation RNN with mode '{}' not supported." + - refer_to_faq_msg(86), - mode) - return cls.enabled diff --git a/tools/mo/openvino/tools/mo/front/mxnet/__init__.py b/tools/mo/openvino/tools/mo/front/mxnet/__init__.py deleted file mode 100644 index 8ba81a92b19c53..00000000000000 --- a/tools/mo/openvino/tools/mo/front/mxnet/__init__.py +++ /dev/null @@ -1,3 +0,0 @@ -# Copyright (C) 2018-2024 Intel Corporation -# SPDX-License-Identifier: Apache-2.0 - diff --git a/tools/mo/openvino/tools/mo/front/mxnet/activation.py b/tools/mo/openvino/tools/mo/front/mxnet/activation.py deleted file mode 100644 index b9bb3c88023ad6..00000000000000 --- a/tools/mo/openvino/tools/mo/front/mxnet/activation.py +++ /dev/null @@ -1,66 +0,0 @@ -# Copyright (C) 2018-2024 Intel Corporation -# SPDX-License-Identifier: Apache-2.0 - -from openvino.tools.mo.ops.activation_ops import SoftPlus, Sigmoid, Tanh, ReLU, \ - Asinh, Acosh, Atanh, SoftSign -from openvino.tools.mo.front.extractor import FrontExtractorOp -from openvino.tools.mo.front.mxnet.extractors.utils import get_mxnet_layer_attrs -from openvino.tools.mo.utils.error import Error -from openvino.tools.mo.utils.utils import refer_to_faq_msg - - -class ActivationFrontExtractor(FrontExtractorOp): - op = 'Activation' - enabled = True - - @classmethod - def extract(cls, node): - attrs = get_mxnet_layer_attrs(node.symbol_dict) - act_type = attrs.str('act_type', 'relu') - if act_type == 'sigmoid': - act_class = Sigmoid - elif act_type == 'tanh': - act_class = Tanh - elif act_type == 'relu': - act_class = ReLU - elif act_type == 'softrelu': - act_class = SoftPlus - elif act_type == 'softsign': - act_class = SoftSign - else: - raise Error( - "Operation '{}' not supported. Please register it as custom op. " + - refer_to_faq_msg(86), - act_type) - act_class.update_node_stat(node) - return cls.enabled - - -class AsinhFrontExtractor(FrontExtractorOp): - op = 'arcsinh' - enabled = True - - @classmethod - def extract(cls, node): - Asinh.update_node_stat(node) - return cls.enabled - - -class AcoshFrontExtractor(FrontExtractorOp): - op = 'arccosh' - enabled = True - - @classmethod - def extract(cls, node): - Acosh.update_node_stat(node) - return cls.enabled - - -class AtanhFrontExtractor(FrontExtractorOp): - op = 'arctanh' - enabled = True - - @classmethod - def extract(cls, node): - Atanh.update_node_stat(node) - return cls.enabled diff --git a/tools/mo/openvino/tools/mo/front/mxnet/adaptive_avg_pooling_ext.py b/tools/mo/openvino/tools/mo/front/mxnet/adaptive_avg_pooling_ext.py deleted file mode 100644 index 9cd6fa207f07e6..00000000000000 --- a/tools/mo/openvino/tools/mo/front/mxnet/adaptive_avg_pooling_ext.py +++ /dev/null @@ -1,25 +0,0 @@ -# Copyright (C) 2018-2024 Intel Corporation -# SPDX-License-Identifier: Apache-2.0 - -from openvino.tools.mo.ops.adaptive_avg_pooling import AdaptiveAvgPooling -from openvino.tools.mo.front.extractor import FrontExtractorOp -from openvino.tools.mo.front.mxnet.extractors.utils import get_mxnet_layer_attrs - - -class AdaptiveAvgPooling2DFrontExtractor(FrontExtractorOp): - op = '_contrib_AdaptiveAvgPooling2D' - enabled = True - - @classmethod - def extract(cls, node): - attrs = get_mxnet_layer_attrs(node.symbol_dict) - output_size = attrs.tuple("output_size", int, None) - if len(output_size) == 1: - output_size = (output_size[0], output_size[0]) - - data = { - 'op': 'Pooling', - 'output_size': output_size - } - AdaptiveAvgPooling.update_node_stat(node, data) - return cls.enabled diff --git a/tools/mo/openvino/tools/mo/front/mxnet/add_input_data_to_prior_boxes.py b/tools/mo/openvino/tools/mo/front/mxnet/add_input_data_to_prior_boxes.py deleted file mode 100644 index fe8faa9ae6f0a8..00000000000000 --- a/tools/mo/openvino/tools/mo/front/mxnet/add_input_data_to_prior_boxes.py +++ /dev/null @@ -1,50 +0,0 @@ -# Copyright (C) 2018-2024 Intel Corporation -# SPDX-License-Identifier: Apache-2.0 - -from openvino.tools.mo.front.common.replacement import FrontReplacementPattern -from openvino.tools.mo.graph.graph import Graph, Node - - -class AddInputDataToPriorBoxes(FrontReplacementPattern): - enabled = True - - def run_before(self): - from openvino.tools.mo.front.create_tensor_nodes import CreateTensorNodes - return [CreateTensorNodes] - - def run_after(self): - from openvino.tools.mo.front.pass_separator import FrontFinish - return [FrontFinish] - - @staticmethod - def add_input_data_to_prior_boxes(graph: Graph, input_names: str = ''): - """ - PriorBox layer has data input unlike mxnet. - Need to add data input to _contrib_MultiBoxPrior for - for correct conversion to PriorBox layer. - - Parameters - ---------- - graph : Graph - Graph with loaded model. - """ - if not input_names: - input_names = ('data',) - else: - input_names = input_names.split(',') - - input_nodes = {} - for node in graph.nodes(): - node = Node(graph, node) - if node.has_valid('op') and node.name in input_names: - input_nodes.update({node.id: node}) - - if len(input_nodes) > 0: - for node in graph.nodes(): - node = Node(graph, node) - if node.has_valid('op') and node.op == '_contrib_MultiBoxPrior': - node.add_input_port(idx=1) - graph.create_edge(list(input_nodes.values())[0], node, out_port=0, in_port=1) - - def find_and_replace_pattern(self, graph: Graph): - self.add_input_data_to_prior_boxes(graph, graph.graph['cmd_params'].input) diff --git a/tools/mo/openvino/tools/mo/front/mxnet/arange_ext.py b/tools/mo/openvino/tools/mo/front/mxnet/arange_ext.py deleted file mode 100644 index 78cc8c38ccade1..00000000000000 --- a/tools/mo/openvino/tools/mo/front/mxnet/arange_ext.py +++ /dev/null @@ -1,26 +0,0 @@ -# Copyright (C) 2018-2024 Intel Corporation -# SPDX-License-Identifier: Apache-2.0 - -import numpy as np - -from openvino.tools.mo.ops.range import Range -from openvino.tools.mo.front.extractor import FrontExtractorOp -from openvino.tools.mo.front.mxnet.extractors.utils import get_mxnet_layer_attrs -from openvino.tools.mo.graph.graph import Node - - -class ArangeExt(FrontExtractorOp): - op = '_arange' - enabled = True - - @classmethod - def extract(cls, node: Node): - attrs = get_mxnet_layer_attrs(node.symbol_dict) - Range.update_node_stat(node, { - 'start': attrs.int('start', 0), - 'stop': attrs.int('stop', 0), - 'repeat': attrs.int('repeat', 1), - 'step': attrs.float('step', 1), - 'dtype': np.dtype(attrs.str('dtype ', 'float32')) - }) - return cls.enabled diff --git a/tools/mo/openvino/tools/mo/front/mxnet/arange_like_ext.py b/tools/mo/openvino/tools/mo/front/mxnet/arange_like_ext.py deleted file mode 100644 index 89ad2507d60bd4..00000000000000 --- a/tools/mo/openvino/tools/mo/front/mxnet/arange_like_ext.py +++ /dev/null @@ -1,25 +0,0 @@ -# Copyright (C) 2018-2021 Intel Corporation -# SPDX-License-Identifier: Apache-2.0 - -import numpy as np - -from openvino.tools.mo.front.extractor import FrontExtractorOp -from openvino.tools.mo.front.mxnet.extractors.utils import get_mxnet_layer_attrs -from openvino.tools.mo.graph.graph import Node -from openvino.tools.mo.ops.arange_like import ArangeLikeOp - - -class ArangeLikeExt(FrontExtractorOp): - op = '_contrib_arange_like' - enabled = True - - @classmethod - def extract(cls, node: Node): - attrs = get_mxnet_layer_attrs(node.symbol_dict) - ArangeLikeOp.update_node_stat(node, { - 'start': attrs.float('start', 0), - 'repeat': attrs.int('repeat', 1), - 'step': attrs.float('step', 1), - 'axis': attrs.int('axis', None), - }) - return cls.enabled diff --git a/tools/mo/openvino/tools/mo/front/mxnet/arange_like_replacer.py b/tools/mo/openvino/tools/mo/front/mxnet/arange_like_replacer.py deleted file mode 100644 index 16272b406bf904..00000000000000 --- a/tools/mo/openvino/tools/mo/front/mxnet/arange_like_replacer.py +++ /dev/null @@ -1,148 +0,0 @@ -# Copyright (C) 2018-2021 Intel Corporation -# SPDX-License-Identifier: Apache-2.0 -import numpy as np - -from openvino.tools.mo.front.common.partial_infer.utils import int64_array, mo_array -from openvino.tools.mo.front.common.replacement import FrontReplacementOp -from openvino.tools.mo.front.tf.graph_utils import create_op_with_const_inputs -from openvino.tools.mo.graph.graph import Graph, rename_nodes -from openvino.tools.mo.ops.Cast import Cast -from openvino.tools.mo.ops.ReduceOps import ReduceProd -from openvino.tools.mo.ops.elementwise import Add, Div, Mul -from openvino.tools.mo.ops.gather import Gather -from openvino.tools.mo.ops.range import Range -from openvino.tools.mo.ops.reshape import Reshape -from openvino.tools.mo.ops.shape import Shape -from openvino.tools.mo.ops.slice import Slice -from openvino.tools.mo.ops.squeeze import Squeeze -from openvino.tools.mo.ops.tile import Tile -from openvino.tools.mo.utils.error import Error - - -class ArangeLikeReplacer(FrontReplacementOp): - op = 'arange_like' - enabled = True - - def replace_sub_graph(self, graph: Graph, match: dict): - node = match['op'] - name = node.soft_get('name', node.id) - axis = node.axis - input_shape_node = Shape(graph, {'name': name + '/ShapeOf'}).create_node() - range_node = create_op_with_const_inputs(graph, Range, {0: mo_array(node.start), - 2: mo_array(node.step)}, {'name': name + '/Range'}) - node.in_port(0).get_connection().set_destination(input_shape_node.in_port(0)) - - if axis is not None: - ''' - Replace arange_like op to subgraph: - Shape - Gather - Range - ''' - gather_node = create_op_with_const_inputs(graph, Gather, {1: int64_array([axis]), - 2: int64_array(0)}, - {'name': name + '/Gather'}) - input_shape_node.out_port(0).connect(gather_node.in_port(0)) - gather_node.out_port(0).connect(range_node.in_port(1)) - node.out_port(0).get_connection().set_source(range_node.out_port(0)) - rename_nodes([(node, name + '/ShouldBeDeleted'), (range_node, name)]) - else: - r''' - Replace arange_like op to subgraph: - | - ShapeOf ----------- | - | | - ReduceProd | - | | - Range | - | | - Reshape ----------- | - | - ''' - - flattened_shape_node = create_op_with_const_inputs(graph, ReduceProd, {1: int64_array([0])}, - {'name': input_shape_node.name + '/ReduceProd', - 'keep_dims': True}) - reshape_backward_node = Reshape(graph, {'name': name + '/Reshape_backward'}).create_node() - - input_shape_node.out_port(0).connect(flattened_shape_node.in_port(0)) - flattened_shape_node.out_port(0).connect(range_node.in_port(1)) - range_node.out_port(0).connect(reshape_backward_node.in_port(0)) - input_shape_node.out_port(0).connect(reshape_backward_node.in_port(1)) - node.out_port(0).get_connection().set_source(reshape_backward_node.out_port(0)) - rename_nodes([(node, name + '/ShouldBeDeleted'), (reshape_backward_node, name)]) - - if node.repeat != 1: - r""" - First, we generate the correct stop value for Range like new_stop_value = stop_value // repeat + 1. - Then repeats each value of the interval using Tile. After that we can get a longer interval - so we reduce it with Slice. - - Sub-graph after Range node will be look like - - Range - Reshape([-1, 1]) - Tile([1, repeat]) - Reshape(-1) - Slice - - """ - - if node.repeat < 1: - raise Error("Unexpected value {} of the attribute 'repeat' for the node {}". format(node.repeat, name)) - - div_node = create_op_with_const_inputs(graph, Div, {1: int64_array([node.repeat])}, - {'name': name + '/Divide'}) - add_node = create_op_with_const_inputs(graph, Add, {1: int64_array([1])}, - {'name': div_node.name + '/Add'}) - cast_node = Cast(graph, {'name': name + '/ConvertToI64', 'dst_type': np.int64}).create_node() - - cast_node.out_port(0).connect(div_node.in_port(0)) - div_node.out_port(0).connect(add_node.in_port(0)) - range_node.in_port(1).get_connection().set_destination(cast_node.in_port(0)) - add_node.out_port(0).connect(range_node.in_port(1)) - - tile_forward_reshape = create_op_with_const_inputs(graph, Reshape, {1: int64_array([-1, 1])}, - {'name': range_node.name + '/ForwardReshape'}) - tile = create_op_with_const_inputs(graph, Tile, {1: int64_array([1, node.repeat])}, - {'name': tile_forward_reshape.name + '/Tile'}) - tile_backward_reshape = create_op_with_const_inputs(graph, Reshape, {1: int64_array([-1])}, - {'name': tile.name + '/BackwardReshape'}) - slice_node = create_op_with_const_inputs(graph, Slice, {1: int64_array([0]), 3: int64_array([0]), - 4: int64_array([1])}, - {'name': tile_backward_reshape.name + '/Slice'}) - - tile_forward_reshape.out_port(0).connect(tile.in_port(0)) - tile.out_port(0).connect(tile_backward_reshape.in_port(0)) - tile_backward_reshape.out_port(0).connect(slice_node.in_port(0)) - slice_node.in_port(2).connect(div_node.in_port(0).get_source()) - - range_node.out_port(0).get_connection().set_source(slice_node.out_port(0)) - range_node.out_port(0).connect(tile_forward_reshape.in_port(0)) - - if axis is not None: - rename_nodes([(range_node, name + '/Range'), (slice_node, name)]) - - # MXNet arange_like op has no stop attribute and the result tensor always matches the input shape, so - # we have to correct the stop value for the Range node if step != 1 or start != 0 - if node.step != 1: - # If step attribute is not integer, we will generate an interval with a larger size and then reduce it - # using Slice - true_elements_count_port = range_node.in_port(1).get_source() - mul_value = np.ceil(node.step) if node.step > 0 else np.floor(node.step) - stop_value = create_op_with_const_inputs(graph, Mul, port_value_dict={1: mo_array(np.ceil(mul_value))}, - op_attrs={'name': range_node.name + '/Stop'}) - range_node.in_port(1).get_connection().insert_node(stop_value) - - slice_range_values = create_op_with_const_inputs(graph, Slice, {1: int64_array([0]), 3: int64_array([0]), - 4: int64_array([1])}, - {'name': range_node.name + '/Slice'}) - slice_range_values.in_port(2).connect(true_elements_count_port) - range_node.out_port(0).get_connection().insert_node(slice_range_values) - - if axis is not None and node.repeat == 1: - rename_nodes([(range_node, name + '/Range'), (slice_range_values, name)]) - - if node.start != 0: - correct_stop_value = create_op_with_const_inputs(graph, Add, port_value_dict={1: mo_array(node.start)}, - op_attrs={'name': range_node.name + '/Correct_Stop'}) - range_node.in_port(1).get_connection().insert_node(correct_stop_value) - - # Range node supports only scalar inputs - squeeze_node = create_op_with_const_inputs(graph, Squeeze, port_value_dict={1: int64_array(0)}, - op_attrs={"name": range_node.name + '/Stop/Squeeze'}) - range_node.in_port(1).get_connection().insert_node(squeeze_node) diff --git a/tools/mo/openvino/tools/mo/front/mxnet/arange_replacer.py b/tools/mo/openvino/tools/mo/front/mxnet/arange_replacer.py deleted file mode 100644 index 29c40a0f46e49a..00000000000000 --- a/tools/mo/openvino/tools/mo/front/mxnet/arange_replacer.py +++ /dev/null @@ -1,36 +0,0 @@ -# Copyright (C) 2018-2024 Intel Corporation -# SPDX-License-Identifier: Apache-2.0 - -from openvino.tools.mo.front.mxnet.MXRepeatReplacer import MXRepeatReplacer -from openvino.tools.mo.ops.mxrepeat import MXRepeat -from openvino.tools.mo.front.common.replacement import FrontReplacementOp -from openvino.tools.mo.graph.graph import Graph -from openvino.tools.mo.ops.const import Const - - -class ArangeReplacer(FrontReplacementOp): - op = 'Range' - enabled = True - - def run_before(self): - # replacement inserts MXRepeat operation, so we should execute its decomposition later - return [MXRepeatReplacer] - - def replace_sub_graph(self, graph: Graph, match: dict): - node = match['op'] - if not node.has_valid('start') or not node.has_valid('stop') or not node.has_valid('step'): - return - - start_value = Const(graph, dict(value=node.start, - symbol_dict={'name': node.id + '/const_start'})).create_node() - limit_value = Const(graph, dict(value=node.stop, - symbol_dict={'name': node.id + '/const_limit'})).create_node() - delta_value = Const(graph, dict(value=node.step, - symbol_dict={'name': node.id + '/const_delta'})).create_node() - node.in_port(0).get_connection().set_source(start_value.out_port(0)) - node.in_port(1).get_connection().set_source(limit_value.out_port(0)) - node.in_port(2).get_connection().set_source(delta_value.out_port(0)) - if node.has_valid('repeat') and node.repeat > 1: - rep = MXRepeat(graph, dict(name=node.id + '/mxrepeat', axis=0, repeats=node.repeat)).create_node() - node.out_port(0).get_destination().get_connection().set_source(rep.out_port(0)) - rep.in_port(0).connect(node.out_port(0)) diff --git a/tools/mo/openvino/tools/mo/front/mxnet/batch_dot_ext.py b/tools/mo/openvino/tools/mo/front/mxnet/batch_dot_ext.py deleted file mode 100644 index ef78a536044ab1..00000000000000 --- a/tools/mo/openvino/tools/mo/front/mxnet/batch_dot_ext.py +++ /dev/null @@ -1,39 +0,0 @@ -# Copyright (C) 2018-2024 Intel Corporation -# SPDX-License-Identifier: Apache-2.0 -import logging as log - -from openvino.tools.mo.front.extractor import FrontExtractorOp -from openvino.tools.mo.front.mxnet.extractors.utils import get_mxnet_layer_attrs -from openvino.tools.mo.graph.graph import Node -from openvino.tools.mo.ops.MatMul import MatMul - - -class BatchDotExt(FrontExtractorOp): - """ - MXNet operation which computes batch matrix multiplication of x and y similar to TensorFlow or ONNX MatMul operation. - - Attributes: - transpose_a - if true then transpose the first input before multiplication - transpose_b - if true then transpose the second input before multiplication - """ - op = 'batch_dot' - enabled = True - - @classmethod - def extract(cls, node: Node): - attrs = get_mxnet_layer_attrs(node.symbol_dict) - transpose_a = attrs.bool('transpose_a', False) - transpose_b = attrs.bool('transpose_b', False) - forward_stype = attrs.str('forward_stype', None) - - if forward_stype is not None: - log.error("Node {} has non default value {} of attribute forward_stype." - "Model Optimizer conversion assumes default value = None".format(node.soft_get('name', node.id), - forward_stype), - extra={'is_warning': True}) - - MatMul.update_node_stat(node, { - 'transpose_a': transpose_a, - 'transpose_b': transpose_b - }) - return cls.enabled diff --git a/tools/mo/openvino/tools/mo/front/mxnet/block_grad_ext.py b/tools/mo/openvino/tools/mo/front/mxnet/block_grad_ext.py deleted file mode 100644 index 38ce05f68f6990..00000000000000 --- a/tools/mo/openvino/tools/mo/front/mxnet/block_grad_ext.py +++ /dev/null @@ -1,16 +0,0 @@ -# Copyright (C) 2018-2024 Intel Corporation -# SPDX-License-Identifier: Apache-2.0 - -from openvino.tools.mo.ops.identity import Identity -from openvino.tools.mo.front.extractor import FrontExtractorOp -from openvino.tools.mo.graph.graph import Node - - -class BlockGradExt(FrontExtractorOp): - op = 'BlockGrad' - enabled = True - - @classmethod - def extract(cls, node: Node): - Identity.update_node_stat(node, {}) - return cls.enabled diff --git a/tools/mo/openvino/tools/mo/front/mxnet/box_nms_ext.py b/tools/mo/openvino/tools/mo/front/mxnet/box_nms_ext.py deleted file mode 100644 index 3ac0d7f273ad30..00000000000000 --- a/tools/mo/openvino/tools/mo/front/mxnet/box_nms_ext.py +++ /dev/null @@ -1,16 +0,0 @@ -# Copyright (C) 2018-2024 Intel Corporation -# SPDX-License-Identifier: Apache-2.0 - -from openvino.tools.mo.ops.box_nms import BoxNms -from openvino.tools.mo.front.extractor import FrontExtractorOp -from openvino.tools.mo.graph.graph import Node - - -class BoxNmsGradExt(FrontExtractorOp): - op = '_contrib_box_nms' - enabled = True - - @classmethod - def extract(cls, node: Node): - BoxNms.update_node_stat(node, {}) - return cls.enabled diff --git a/tools/mo/openvino/tools/mo/front/mxnet/cast_ext.py b/tools/mo/openvino/tools/mo/front/mxnet/cast_ext.py deleted file mode 100644 index b20b3add989e65..00000000000000 --- a/tools/mo/openvino/tools/mo/front/mxnet/cast_ext.py +++ /dev/null @@ -1,20 +0,0 @@ -# Copyright (C) 2018-2024 Intel Corporation -# SPDX-License-Identifier: Apache-2.0 - -import numpy as np - -from openvino.tools.mo.ops.Cast import Cast -from openvino.tools.mo.front.extractor import FrontExtractorOp -from openvino.tools.mo.front.mxnet.extractors.utils import get_mxnet_layer_attrs -from openvino.tools.mo.graph.graph import Node - - -class CastExtractor(FrontExtractorOp): - op = 'Cast' - enabled = True - - @classmethod - def extract(cls, node: Node): - attrs = get_mxnet_layer_attrs(node.symbol_dict) - Cast.update_node_stat(node, {'dst_type': np.dtype(attrs.str('dtype', 'float32'))}) - return cls.enabled diff --git a/tools/mo/openvino/tools/mo/front/mxnet/check_softmax_node_inputs.py b/tools/mo/openvino/tools/mo/front/mxnet/check_softmax_node_inputs.py deleted file mode 100644 index bd3c7bdca6a420..00000000000000 --- a/tools/mo/openvino/tools/mo/front/mxnet/check_softmax_node_inputs.py +++ /dev/null @@ -1,42 +0,0 @@ -# Copyright (C) 2018-2024 Intel Corporation -# SPDX-License-Identifier: Apache-2.0 - -from openvino.tools.mo.front.common.replacement import FrontReplacementPattern -from openvino.tools.mo.graph.graph import Graph - - -class CheckSoftmaxNodeInputs(FrontReplacementPattern): - enabled = True - - def run_before(self): - from openvino.tools.mo.front.user_data_repack import UserDataRepack - return [UserDataRepack] - - def run_after(self): - return [] - - @staticmethod - def pattern(): - return dict( - nodes=[ - ('softmax', dict(op=lambda op: op in ['SoftMax', 'SoftmaxActivation', 'SoftmaxOutput'])) - ], - edges=[]) - - @staticmethod - def replace_pattern(graph: Graph, match: dict): - """ - Need to remove from softmax layer all unused inputs - Parameters - ---------- - graph : Graph - Graph with loaded model. - match : dict - Patterns which were found in graph structure. - """ - softmax_node = match['softmax'] - softmax_nodes_len = len(softmax_node.in_nodes()) - for i in reversed(range(1, softmax_nodes_len)): - in_node = softmax_node.in_node(i) - graph.remove_edge(in_node.id, softmax_node.id) - graph.remove_node(in_node.id) diff --git a/tools/mo/openvino/tools/mo/front/mxnet/clip_ext.py b/tools/mo/openvino/tools/mo/front/mxnet/clip_ext.py deleted file mode 100644 index 5cc6c75d00bc17..00000000000000 --- a/tools/mo/openvino/tools/mo/front/mxnet/clip_ext.py +++ /dev/null @@ -1,19 +0,0 @@ -# Copyright (C) 2018-2024 Intel Corporation -# SPDX-License-Identifier: Apache-2.0 - -from openvino.tools.mo.front.extractor import FrontExtractorOp -from openvino.tools.mo.front.mxnet.extractors.utils import get_mxnet_layer_attrs -from openvino.tools.mo.graph.graph import Node -from openvino.tools.mo.ops.clamp import AttributedClamp - - -class ClipExt(FrontExtractorOp): - op = 'clip' - enabled = True - - @classmethod - def extract(cls, node: Node): - attrs = get_mxnet_layer_attrs(node.symbol_dict) - - AttributedClamp.update_node_stat(node, {'min': attrs.float('a_min', None), 'max': attrs.float('a_max', None)}) - return cls.enabled diff --git a/tools/mo/openvino/tools/mo/front/mxnet/conv_ext.py b/tools/mo/openvino/tools/mo/front/mxnet/conv_ext.py deleted file mode 100644 index 26a951d50c5eae..00000000000000 --- a/tools/mo/openvino/tools/mo/front/mxnet/conv_ext.py +++ /dev/null @@ -1,122 +0,0 @@ -# Copyright (C) 2018-2024 Intel Corporation -# SPDX-License-Identifier: Apache-2.0 - -import numpy as np - -from openvino.tools.mo.front.common.partial_infer.utils import int64_array -from openvino.tools.mo.front.extractor import FrontExtractorOp -from openvino.tools.mo.front.mxnet.extractors.utils import get_mxnet_layer_attrs -from openvino.tools.mo.ops.convolution import Convolution - - -class ConvFrontExtractor(FrontExtractorOp): - op = 'Convolution' - enabled = True - - @classmethod - def extract(cls, node): - attr = get_mxnet_layer_attrs(node.symbol_dict) - - kernel = attr.tuple("kernel", int, None) - stride = attr.tuple("stride", int, tuple(np.ones(len(kernel), dtype=np.int64))) - padding = attr.tuple("pad", int, tuple(np.zeros(len(kernel), dtype=np.int64))) - dilate = attr.tuple("dilate", int, tuple(np.ones(len(kernel), dtype=np.int64))) - group = attr.int("num_group", 1) - output = attr.int("num_filter", None) - bias_term = not attr.bool("no_bias", False) - - final_dilations = int64_array([1, 1, *[d for d in dilate]]) if dilate is not None else None - - node_attrs = { - 'op': __class__.op, - 'bias_addable': True, - 'bias_term': bias_term, - 'pad': int64_array([[0, 0], [0, 0], *[[pad, pad] for pad in padding]]), - 'pad_spatial_shape': int64_array([[pad, pad] for pad in padding]), - 'dilation': final_dilations, - 'output_spatial_shape': None, - 'output_shape': None, - 'stride': int64_array([1, 1, *[s for s in stride]]), - 'group': group, - 'output': output, - 'kernel_spatial': int64_array([k for k in kernel]), - - 'input_feature_channel': 1, - 'output_feature_channel': 0, - 'kernel_spatial_idx': None, - 'reshape_kernel': True, - - 'spatial_dims': None, - 'channel_dims': int64_array([1]), - 'batch_dims': int64_array([0]), - 'layout': 'NCHW', - } - - # update the attributes of the node - Convolution.update_node_stat(node, node_attrs) - return cls.enabled - - -class DeconvFrontExtractor(FrontExtractorOp): - op = 'Deconvolution' - enabled = True - - @staticmethod - def get_pad(node, input_shape, kernel_shape): - padding = np.add.reduce(node.pad, axis=1) - padding[node.spatial_dims] = node.stride[node.spatial_dims] * (input_shape[node.spatial_dims] - 1) + 1 + \ - (kernel_shape[node.spatial_dims] - 1) * node.dilation[node.spatial_dims] - padding[node.spatial_dims] = padding[node.spatial_dims] - node.output_spatial_shape - padding[node.spatial_dims] = (padding[node.spatial_dims] + 1) / 2 - return int64_array([[0, 0], [0, 0], *[[pad, pad] for pad in padding[2:]]]) - - @classmethod - def extract(cls, node): - attr = get_mxnet_layer_attrs(node.symbol_dict) - - kernel = attr.tuple("kernel", int, None) - stride = attr.tuple("stride", int, tuple(np.ones(len(kernel), dtype=np.int64))) - padding = attr.tuple("pad", int, tuple(np.zeros(len(kernel), dtype=np.int64))) - dilate = attr.tuple("dilate", int, tuple(np.ones(len(kernel), dtype=np.int64))) - group = attr.int("num_group", 1) - output = attr.int("num_filter", None) - bias_term = not attr.bool("no_bias", True) - target_shape = attr.tuple("target_shape", int, None) - if target_shape: - target_shape = int64_array(target_shape) - - final_dilations = int64_array([1, 1, *[d for d in dilate]]) if dilate is not None else None - node_attrs = { - 'op': __class__.op, - 'type': 'Deconvolution', - 'bias_addable': True, - 'bias_term': bias_term, - 'pad': int64_array([[0, 0], [0, 0], *[[pad, pad] for pad in padding]]), - 'pad_spatial_shape': int64_array([[pad, pad] for pad in padding]), - 'dilation': final_dilations, - 'output_spatial_shape': target_shape, - 'original_output_spatial_shape': target_shape, - 'output_shape': None, - 'stride': int64_array([1, 1, *[s for s in stride]]), - 'group': group, - 'output': output, - 'kernel_spatial': int64_array([k for k in kernel]), - 'input_feature_channel': 1, - 'output_feature_channel': 0, - 'kernel_spatial_idx': None, - 'reshape_kernel': True, - - 'spatial_dims': None, - 'channel_dims': int64_array([1]), - 'batch_dims': int64_array([0]), - 'layout': 'NCHW', - 'get_pad': DeconvFrontExtractor.get_pad, - } - - output_padding = attr.tuple("adj", int, None) - if target_shape is None and output_padding: - node_attrs["output_padding"] = int64_array([0, 0, *[s for s in output_padding]]) - - # update the attributes of the node - Convolution.update_node_stat(node, node_attrs) - return cls.enabled diff --git a/tools/mo/openvino/tools/mo/front/mxnet/copy_ext.py b/tools/mo/openvino/tools/mo/front/mxnet/copy_ext.py deleted file mode 100644 index cdfa5c68b764bd..00000000000000 --- a/tools/mo/openvino/tools/mo/front/mxnet/copy_ext.py +++ /dev/null @@ -1,16 +0,0 @@ -# Copyright (C) 2018-2024 Intel Corporation -# SPDX-License-Identifier: Apache-2.0 - -from openvino.tools.mo.ops.identity import Identity -from openvino.tools.mo.front.extractor import FrontExtractorOp -from openvino.tools.mo.graph.graph import Node - - -class CopyExt(FrontExtractorOp): - op = '_copy' - enabled = True - - @classmethod - def extract(cls, node: Node): - Identity.update_node_stat(node, {}) - return cls.enabled diff --git a/tools/mo/openvino/tools/mo/front/mxnet/crop_ext.py b/tools/mo/openvino/tools/mo/front/mxnet/crop_ext.py deleted file mode 100644 index bc22ebe81169e1..00000000000000 --- a/tools/mo/openvino/tools/mo/front/mxnet/crop_ext.py +++ /dev/null @@ -1,25 +0,0 @@ -# Copyright (C) 2018-2024 Intel Corporation -# SPDX-License-Identifier: Apache-2.0 - -from openvino.tools.mo.front.extractor import FrontExtractorOp -from openvino.tools.mo.front.mxnet.extractors.utils import get_mxnet_layer_attrs -from openvino.tools.mo.ops.crop import Crop - - -class CropFrontExtractor(FrontExtractorOp): - op = 'Crop' - enabled = True - - @classmethod - def extract(cls, node): - attrs = get_mxnet_layer_attrs(node.symbol_dict) - offset = attrs.tuple("offset", int, ()) - axis = attrs.int("num_args", 0) - node_attrs = { - 'axis': axis, - 'offset': list(offset), - 'dim': None, - } - Crop.update_node_stat(node, node_attrs) - return cls.enabled - diff --git a/tools/mo/openvino/tools/mo/front/mxnet/cumsum.py b/tools/mo/openvino/tools/mo/front/mxnet/cumsum.py deleted file mode 100644 index 800fc00be31055..00000000000000 --- a/tools/mo/openvino/tools/mo/front/mxnet/cumsum.py +++ /dev/null @@ -1,35 +0,0 @@ -# Copyright (C) 2018-2024 Intel Corporation -# SPDX-License-Identifier: Apache-2.0 - -from openvino.tools.mo.ops.Cast import Cast -from openvino.tools.mo.ops.cumsum import CumSum -from openvino.tools.mo.front.common.partial_infer.utils import int64_array -from openvino.tools.mo.front.common.replacement import FrontReplacementOp -from openvino.tools.mo.front.mxnet.extractors.utils import get_mxnet_layer_attrs, mxnet_str_dtype_to_np -from openvino.tools.mo.front.tf.graph_utils import create_op_node_with_second_input -from openvino.tools.mo.graph.graph import Graph, rename_node, Node -from openvino.tools.mo.ops.const import Const - - -class CumSumFrontReplacer(FrontReplacementOp): - op = 'MXNetCumSum' - enabled = True - - def replace_op(self, graph: Graph, node: Node): - name = node.soft_get('name', node.id) - axis = node.soft_get('axis', 0) - - rename_node(node=node, name=name + '/to_be_removed') - cumsum_node = create_op_node_with_second_input(graph, CumSum, int64_array(axis), - {'name': name, 'reverse': False, 'exclusive': False}) - rename_node(cumsum_node, name) - - node.in_port(0).get_connection().set_destination(cumsum_node.in_port(0)) - if node.has_valid('mx_out_type') and node['mx_out_type'] is not None: - rename_node(node=cumsum_node, name=name + '/CumSum') - convert = Cast(graph, {'name': name, 'dst_type': node['mx_out_type']}).create_node() - rename_node(convert, name) - cumsum_node.out_port(0).connect(convert.in_port(0)) - return [convert.id] - else: - return [cumsum_node.id] diff --git a/tools/mo/openvino/tools/mo/front/mxnet/cumsum_ext.py b/tools/mo/openvino/tools/mo/front/mxnet/cumsum_ext.py deleted file mode 100644 index dff58685296fb3..00000000000000 --- a/tools/mo/openvino/tools/mo/front/mxnet/cumsum_ext.py +++ /dev/null @@ -1,24 +0,0 @@ -# Copyright (C) 2018-2024 Intel Corporation -# SPDX-License-Identifier: Apache-2.0 - -import numpy as np -from openvino.tools.mo.ops.cumsum import MXNetCumSum -from openvino.tools.mo.front.extractor import FrontExtractorOp -from openvino.tools.mo.front.mxnet.extractors.utils import get_mxnet_layer_attrs, mxnet_str_dtype_to_np - - -class CumSumExtractor(FrontExtractorOp): - op = '_np_cumsum' - enabled = True - - @classmethod - def extract(cls, node): - attrs = get_mxnet_layer_attrs(node.symbol_dict) - - update_attrs = { - 'axis': attrs.int('axis', 0), - 'mx_out_type': attrs.dtype('dtype', None) - } - - MXNetCumSum.update_node_stat(node, update_attrs) - return cls.enabled diff --git a/tools/mo/openvino/tools/mo/front/mxnet/custom.py b/tools/mo/openvino/tools/mo/front/mxnet/custom.py deleted file mode 100644 index b283f005896d9b..00000000000000 --- a/tools/mo/openvino/tools/mo/front/mxnet/custom.py +++ /dev/null @@ -1,19 +0,0 @@ -# Copyright (C) 2018-2024 Intel Corporation -# SPDX-License-Identifier: Apache-2.0 - -from openvino.tools.mo.front.extractor import FrontExtractorOp, MXNetCustomFrontExtractorOp -from openvino.tools.mo.front.mxnet.extractors.utils import get_mxnet_layer_attrs - - -class CustomFrontExtractorOp(FrontExtractorOp): - op = 'Custom' - enabled = True - - def extract(self, node): - supported = False - op_attrs = None - node_attrs = get_mxnet_layer_attrs(node.symbol_dict) - op_type = node_attrs.str('op_type', None) - if op_type and op_type in MXNetCustomFrontExtractorOp.registered_ops: - supported, op_attrs = MXNetCustomFrontExtractorOp.registered_ops[op_type]().extract(node) - return supported, op_attrs diff --git a/tools/mo/openvino/tools/mo/front/mxnet/custom_rpn_proposal.py b/tools/mo/openvino/tools/mo/front/mxnet/custom_rpn_proposal.py deleted file mode 100644 index 92a214373ab256..00000000000000 --- a/tools/mo/openvino/tools/mo/front/mxnet/custom_rpn_proposal.py +++ /dev/null @@ -1,36 +0,0 @@ -# Copyright (C) 2018-2024 Intel Corporation -# SPDX-License-Identifier: Apache-2.0 - -from openvino.tools.mo.front.common.partial_infer.utils import mo_array -from openvino.tools.mo.ops.proposal import ProposalOp -from openvino.tools.mo.front.extractor import MXNetCustomFrontExtractorOp -from openvino.tools.mo.front.mxnet.extractors.utils import get_mxnet_layer_attrs - - -class RPNProposalMXNetFrontExtractor(MXNetCustomFrontExtractorOp): - op = 'proposal' - enabled = True - - def extract(self, node): - attrs = get_mxnet_layer_attrs(node.symbol_dict) - feat_stride = attrs.int("feat_stride", 16) - ratio = attrs.tuple("ratios", float, (0.5, 1, 2)) - scale = attrs.tuple("scales", int, (4, 8, 16, 32)) - min_size = attrs.int("rpn_min_size", 16) - pre_nms_topn = attrs.int("rpn_pre_nms_top_n", 6000) - post_nms_topn = attrs.int("rpn_post_nms_top_n", 300) - nms_thresh = attrs.float("threshold", 0.7) - - node_attrs = { - 'feat_stride': feat_stride, - 'base_size': 0, - 'min_size': min_size, - 'ratio': mo_array(ratio), - 'scale': mo_array(scale), - 'pre_nms_topn': pre_nms_topn, - 'post_nms_topn': post_nms_topn, - 'nms_thresh': nms_thresh, - } - - ProposalOp.update_node_stat(node, node_attrs) - return (True, node_attrs) diff --git a/tools/mo/openvino/tools/mo/front/mxnet/deformable_conv_ext.py b/tools/mo/openvino/tools/mo/front/mxnet/deformable_conv_ext.py deleted file mode 100644 index 2591ac1f752df8..00000000000000 --- a/tools/mo/openvino/tools/mo/front/mxnet/deformable_conv_ext.py +++ /dev/null @@ -1,60 +0,0 @@ -# Copyright (C) 2018-2024 Intel Corporation -# SPDX-License-Identifier: Apache-2.0 - -import numpy as np - -from openvino.tools.mo.front.common.partial_infer.utils import int64_array -from openvino.tools.mo.front.extractor import FrontExtractorOp -from openvino.tools.mo.front.mxnet.extractors.utils import get_mxnet_layer_attrs -from openvino.tools.mo.ops.deformable_convolution import DeformableConvolution - - -class DeformableConvolutionExtractor(FrontExtractorOp): - op = '_contrib_DeformableConvolution' - enabled = True - - @classmethod - def extract(cls, node): - attr = get_mxnet_layer_attrs(node.symbol_dict) - - kernel = attr.tuple("kernel", int, None) - stride = attr.tuple("stride", int, tuple(np.ones(len(kernel), dtype=np.int64))) - padding = attr.tuple("pad", int, tuple(np.zeros(len(kernel), dtype=np.int64))) - dilate = attr.tuple("dilate", int, tuple(np.ones(len(kernel), dtype=np.int64))) - num_deformable_group = attr.int("num_deformable_group", 1) - num_group = attr.int("num_group", 1) - output = attr.int("num_filter", None) - bias_term = attr.str("no_bias", 'False') == 'False' - - final_dilations = int64_array([1, 1, *[d for d in dilate]]) if dilate is not None else None - - node_attrs = { - 'op': __class__.op, - 'bias_addable': True, - 'bias_term': bias_term, - 'pad': int64_array([[0, 0], [0, 0], *[[pad, pad] for pad in padding]]), - 'pad_spatial_shape': int64_array([[pad, pad] for pad in padding]), - 'dilation': final_dilations, - 'output_spatial_shape': None, - 'output_shape': None, - 'stride': int64_array([1, 1, *[s for s in stride]]), - 'group': num_group, - 'deformable_group': num_deformable_group, - 'output': output, - 'kernel_spatial': int64_array([k for k in kernel]), - - 'input_feature_channel': 1, - 'output_feature_channel': 0, - 'kernel_spatial_idx': None, - 'reshape_kernel': True, - 'weights_index': 2, - - 'spatial_dims': None, - 'channel_dims': int64_array([1]), - 'batch_dims': int64_array([0]), - 'layout': 'NCHW', - } - - # update the attributes of the node - DeformableConvolution.update_node_stat(node, node_attrs) - return cls.enabled diff --git a/tools/mo/openvino/tools/mo/front/mxnet/deformable_psroi_pooling_ext.py b/tools/mo/openvino/tools/mo/front/mxnet/deformable_psroi_pooling_ext.py deleted file mode 100644 index 49ae8303c178b7..00000000000000 --- a/tools/mo/openvino/tools/mo/front/mxnet/deformable_psroi_pooling_ext.py +++ /dev/null @@ -1,39 +0,0 @@ -# Copyright (C) 2018-2024 Intel Corporation -# SPDX-License-Identifier: Apache-2.0 - -from openvino.tools.mo.ops.psroipooling import DeformablePSROIPoolingOp, PSROIPoolingOp -from openvino.tools.mo.front.extractor import FrontExtractorOp -from openvino.tools.mo.front.mxnet.extractors.utils import get_mxnet_layer_attrs - - -class DeformablePSROIPoolingFrontExtractor(FrontExtractorOp): - op = '_contrib_DeformablePSROIPooling' - enabled = True - - @classmethod - def extract(cls, node): - attrs = get_mxnet_layer_attrs(node.symbol_dict) - spatial_scale = attrs.float('spatial_scale', None) - group_size = attrs.int('group_size', 0) - no_trans = attrs.bool('no_trans', False) - trans_std = attrs.float('trans_std', 0) - output_dim = attrs.int('output_dim', 0) - part_size = attrs.int('part_size', 0) - sample_per_part = attrs.int('sample_per_part', 1) - pooled_size = attrs.int('pooled_size', 0) - - data = { - 'spatial_scale': spatial_scale, - 'mode': 'bilinear_deformable', - 'group_size': group_size, - 'output_dim': output_dim, - 'trans_std': trans_std, - 'part_size': part_size, - 'spatial_bins_x': sample_per_part, - 'spatial_bins_y': sample_per_part, - 'pooled_width': pooled_size, - 'pooled_height': pooled_size, - } - - DeformablePSROIPoolingOp.update_node_stat(node, data) - return cls.enabled diff --git a/tools/mo/openvino/tools/mo/front/mxnet/div_sqrt_dim.py b/tools/mo/openvino/tools/mo/front/mxnet/div_sqrt_dim.py deleted file mode 100644 index 46d780a8fff533..00000000000000 --- a/tools/mo/openvino/tools/mo/front/mxnet/div_sqrt_dim.py +++ /dev/null @@ -1,50 +0,0 @@ -# Copyright (C) 2018-2024 Intel Corporation -# SPDX-License-Identifier: Apache-2.0 -import numpy as np - -from openvino.tools.mo.front.PowerToEltwises import PowerToEltwises -from openvino.tools.mo.front.common.partial_infer.utils import mo_array -from openvino.tools.mo.front.common.replacement import FrontReplacementOp -from openvino.tools.mo.graph.graph import Graph, rename_nodes -from openvino.tools.mo.ops.Cast import Cast -from openvino.tools.mo.ops.ConvertLike import ConvertLike -from openvino.tools.mo.ops.elementwise import Div -from openvino.tools.mo.ops.power import AttributedPower -from openvino.tools.mo.ops.shape import Shape -from openvino.tools.mo.utils.shape import node_to_get_shape_value_of_indices - - -class DivSqrtDim(FrontReplacementOp): - """ - Replace _contrib_div_sqrt_dim with sub-graph that matches the formula out = (data / sqrt(data.shape[-1])) - """ - op = '_contrib_div_sqrt_dim' - enabled = True - - def run_before(self): - return [PowerToEltwises] - - def replace_sub_graph(self, graph: Graph, match: dict): - div_sqrt = match['op'] - div_sqrt_name = div_sqrt.soft_get('name', div_sqrt.id) - shape_node = Shape(graph, dict(name=div_sqrt_name + '/Shape')).create_node() - data_out_port = div_sqrt.in_port(0).get_source() - shape_node.in_port(0).connect(data_out_port) - - shape_values_node = node_to_get_shape_value_of_indices(shape_node=shape_node, indices=[-1]) - - pow_node = AttributedPower(graph, dict(name=div_sqrt_name + '/Sqrt', - power=mo_array(0.5))).create_node() - - # Due to specification, Power must have inputs with the same data type. - convert_pow_input = Cast(graph, dict(dst_type=np.float32, - name=shape_values_node.name + '/ConvertToFP32')).create_node() - div_node = Div(graph, dict(name="Div")).create_node() - - shape_values_node.out_port(0).connect(convert_pow_input.in_port(0)) - convert_pow_input.out_port(0).connect(pow_node.in_port(0)) - div_sqrt.in_port(0).get_connection().set_destination(div_node.in_port(0)) - div_node.in_port(1).connect(pow_node.out_port(0)) - div_sqrt.out_port(0).get_connection().set_source(div_node.out_port(0)) - - rename_nodes([(div_sqrt, div_sqrt_name + '/ShouldBeDeleted'), (div_node, div_sqrt_name)]) diff --git a/tools/mo/openvino/tools/mo/front/mxnet/dropout_ext.py b/tools/mo/openvino/tools/mo/front/mxnet/dropout_ext.py deleted file mode 100644 index 63635aae8dbc1f..00000000000000 --- a/tools/mo/openvino/tools/mo/front/mxnet/dropout_ext.py +++ /dev/null @@ -1,16 +0,0 @@ -# Copyright (C) 2018-2024 Intel Corporation -# SPDX-License-Identifier: Apache-2.0 - -from openvino.tools.mo.ops.identity import Identity -from openvino.tools.mo.front.extractor import FrontExtractorOp -from openvino.tools.mo.graph.graph import Node - - -class DropoutExt(FrontExtractorOp): - op = 'Dropout' - enabled = True - - @classmethod - def extract(cls, node: Node): - Identity.update_node_stat(node, {}) - return cls.enabled diff --git a/tools/mo/openvino/tools/mo/front/mxnet/einsum_ext.py b/tools/mo/openvino/tools/mo/front/mxnet/einsum_ext.py deleted file mode 100644 index 6d74e5ad667da9..00000000000000 --- a/tools/mo/openvino/tools/mo/front/mxnet/einsum_ext.py +++ /dev/null @@ -1,20 +0,0 @@ -# Copyright (C) 2018-2024 Intel Corporation -# SPDX-License-Identifier: Apache-2.0 - -from openvino.tools.mo.ops.einsum import Einsum -from openvino.tools.mo.front.extractor import FrontExtractorOp -from openvino.tools.mo.front.mxnet.extractors.utils import get_mxnet_layer_attrs - - -class EinsumExtractor(FrontExtractorOp): - op = '_npi_einsum' - enabled = True - - @classmethod - def extract(cls, einsum_node): - einsum_name = einsum_node.soft_get('name', einsum_node.id) - attrs = get_mxnet_layer_attrs(einsum_node.symbol_dict) - equation = attrs.str('subscripts') - normalized_equation = Einsum.normalize_equation(einsum_name, equation) - Einsum.update_node_stat(einsum_node, {'equation': normalized_equation}) - return cls.enabled diff --git a/tools/mo/openvino/tools/mo/front/mxnet/elementwise_ext.py b/tools/mo/openvino/tools/mo/front/mxnet/elementwise_ext.py deleted file mode 100644 index 998a6dbda7c452..00000000000000 --- a/tools/mo/openvino/tools/mo/front/mxnet/elementwise_ext.py +++ /dev/null @@ -1,415 +0,0 @@ -# Copyright (C) 2018-2024 Intel Corporation -# SPDX-License-Identifier: Apache-2.0 - -import numpy as np - -from openvino.tools.mo.front.common.partial_infer.utils import mo_array -from openvino.tools.mo.ops.elementwise import Mul, Sub, Add, Maximum, Minimum, Div, Greater, GreaterEqual, Equal, Less, \ - LessEqual, Pow, NotEqual, LogicalAnd, LogicalOr, Round -from openvino.tools.mo.front.extractor import FrontExtractorOp -from openvino.tools.mo.front.mxnet.extractors.utils import get_mxnet_layer_attrs -from openvino.tools.mo.graph.graph import Node -from openvino.tools.mo.ops.eltwise_n import EltwiseNAdd -from openvino.tools.mo.ops.power import AttributedPower - - -class PlusExtractor(FrontExtractorOp): - op = '_Plus' - enabled = True - - @classmethod - def extract(cls, node: Node): - Add.update_node_stat(node) - return cls.enabled - - -class BroadcastAddFrontExtractor(FrontExtractorOp): - op = 'broadcast_add' - enabled = True - - @classmethod - def extract(cls, node): - Add.update_node_stat(node) - return cls.enabled - - -class BroadcastDivFrontExtractor(FrontExtractorOp): - op = 'broadcast_div' - enabled = True - - @classmethod - def extract(cls, node): - Div.update_node_stat(node) - return cls.enabled - - -class BroadcastSubFrontExtractor(FrontExtractorOp): - op = 'broadcast_sub' - enabled = True - - @classmethod - def extract(cls, node): - Sub.update_node_stat(node) - return cls.enabled - - -class ElementwiseAddExtractor(FrontExtractorOp): - op = 'elemwise_add' - enabled = True - - @classmethod - def extract(cls, node: Node): - Add.update_node_stat(node) - return cls.enabled - - -class ElementWiseSum(FrontExtractorOp): - op = 'ElementWiseSum' - enabled = True - - @classmethod - def extract(cls, node: Node): - EltwiseNAdd.update_node_stat(node) - return cls.enabled - - -class AddNExtractor(FrontExtractorOp): - op = 'add_n' - enabled = True - - @classmethod - def extract(cls, node: Node): - EltwiseNAdd.update_node_stat(node) - return cls.enabled - - -class ElementwiseMulExtractor(FrontExtractorOp): - op = 'elemwise_mul' - enabled = True - - @classmethod - def extract(cls, node: Node): - Mul.update_node_stat(node) - return cls.enabled - - -class BroadcastMulFrontExtractor(FrontExtractorOp): - op = 'broadcast_mul' - enabled = True - - @classmethod - def extract(cls, node): - Mul.update_node_stat(node) - return cls.enabled - - -class ElemwiseSubFrontExtractor(FrontExtractorOp): - op = 'elemwise_sub' - enabled = True - - @classmethod - def extract(cls, node): - Sub.update_node_stat(node, {}) - return cls.enabled - - -class ElemwiseDivFrontExtractor(FrontExtractorOp): - op = 'elemwise_div' - enabled = True - - @classmethod - def extract(cls, node): - Div.update_node_stat(node, {}) - return cls.enabled - - -class BroadcastMaximumFrontExtractor(FrontExtractorOp): - op = 'broadcast_maximum' - enabled = True - - @classmethod - def extract(cls, node): - Maximum.update_node_stat(node) - return cls.enabled - - -class BroadcastMinimumFrontExtractor(FrontExtractorOp): - op = 'broadcast_minimum' - enabled = True - - @classmethod - def extract(cls, node): - Minimum.update_node_stat(node) - return cls.enabled - - -class BroadcastGreaterFrontExtractor(FrontExtractorOp): - op = 'broadcast_greater' - enabled = True - - @classmethod - def extract(cls, node): - Greater.update_node_stat(node) - return cls.enabled - - -class BroadcastGreaterEqualFrontExtractor(FrontExtractorOp): - op = 'broadcast_greater_equal' - enabled = True - - @classmethod - def extract(cls, node): - GreaterEqual.update_node_stat(node) - return cls.enabled - - -class BroadcastEqualFrontExtractor(FrontExtractorOp): - op = 'broadcast_equal' - enabled = True - - @classmethod - def extract(cls, node): - Equal.update_node_stat(node) - return cls.enabled - - -class BroadcastNotEqualFrontExtractor(FrontExtractorOp): - op = 'broadcast_not_equal' - enabled = True - - @classmethod - def extract(cls, node): - NotEqual.update_node_stat(node) - return cls.enabled - - -class BroadcastLesserFrontExtractor(FrontExtractorOp): - op = 'broadcast_lesser' - enabled = True - - @classmethod - def extract(cls, node): - Less.update_node_stat(node) - return cls.enabled - - -class BroadcastLesserEqualFrontExtractor(FrontExtractorOp): - op = 'broadcast_lesser_equal' - enabled = True - - @classmethod - def extract(cls, node): - LessEqual.update_node_stat(node) - return cls.enabled - - -class BroadcastPowerFrontExtractor(FrontExtractorOp): - op = 'broadcast_power' - enabled = True - - @classmethod - def extract(cls, node): - Pow.update_node_stat(node) - return cls.enabled - - -class BroadcastLogicalAndFrontExtractor(FrontExtractorOp): - op = 'broadcast_logical_and' - enabled = True - - @classmethod - def extract(cls, node): - LogicalAnd.update_node_stat(node) - return cls.enabled - - -class BroadcastLogicalOrFrontExtractor(FrontExtractorOp): - op = 'broadcast_logical_or' - enabled = True - - @classmethod - def extract(cls, node): - LogicalOr.update_node_stat(node) - return cls.enabled - - -class MaximumFrontExtractor(FrontExtractorOp): - op = '_maximum' - enabled = True - - @classmethod - def extract(cls, node): - Maximum.update_node_stat(node) - return cls.enabled - - -class MinimumFrontExtractor(FrontExtractorOp): - op = '_minimum' - enabled = True - - @classmethod - def extract(cls, node): - Minimum.update_node_stat(node) - return cls.enabled - - -class PlusScalarFrontExtractor(FrontExtractorOp): - op = '_plus_scalar' - enabled = True - - @classmethod - def extract(cls, node): - attrs = get_mxnet_layer_attrs(node.symbol_dict) - node['scalar'] = mo_array([attrs.float('scalar', 0.0)], dtype=np.float32) - return cls.enabled - - -class MinusScalarFrontExtractor(FrontExtractorOp): - op = '_minus_scalar' - enabled = True - - @classmethod - def extract(cls, node): - attrs = get_mxnet_layer_attrs(node.symbol_dict) - node['scalar'] = mo_array([attrs.float('scalar', 0.0)]) - return cls.enabled - - -class MulScalarFrontExtractor(FrontExtractorOp): - op = '_mul_scalar' - enabled = True - - @classmethod - def extract(cls, node): - attrs = get_mxnet_layer_attrs(node.symbol_dict) - node['scalar'] = mo_array([attrs.float('scalar', 1.0)], dtype=np.float32) - return cls.enabled - - -class DivScalarFrontExtractor(FrontExtractorOp): - op = '_div_scalar' - enabled = True - - @classmethod - def extract(cls, node): - attrs = get_mxnet_layer_attrs(node.symbol_dict) - node['scalar'] = attrs.float('scalar', 1.0) - return cls.enabled - - -class GreaterScalarFrontExtractor(FrontExtractorOp): - op = '_greater_scalar' - enabled = True - - @classmethod - def extract(cls, node): - attrs = get_mxnet_layer_attrs(node.symbol_dict) - node['scalar'] = mo_array([attrs.float('scalar', 1.0)]) - return cls.enabled - - -class GreaterEqualScalarFrontExtractor(FrontExtractorOp): - op = '_greater_equal_scalar' - enabled = True - - @classmethod - def extract(cls, node): - attrs = get_mxnet_layer_attrs(node.symbol_dict) - node['scalar'] = mo_array([attrs.float('scalar', 1.0)]) - return cls.enabled - - -class EqualScalarFrontExtractor(FrontExtractorOp): - op = '_equal_scalar' - enabled = True - - @classmethod - def extract(cls, node): - attrs = get_mxnet_layer_attrs(node.symbol_dict) - node['scalar'] = mo_array([attrs.float('scalar', 1.0)]) - return cls.enabled - - -class NotEqualScalarFrontExtractor(FrontExtractorOp): - op = '_not_equal_scalar' - enabled = True - - @classmethod - def extract(cls, node): - attrs = get_mxnet_layer_attrs(node.symbol_dict) - node['scalar'] = mo_array([attrs.float('scalar', 1.0)]) - return cls.enabled - - -class LesserScalarFrontExtractor(FrontExtractorOp): - op = '_lesser_scalar' - enabled = True - - @classmethod - def extract(cls, node): - attrs = get_mxnet_layer_attrs(node.symbol_dict) - node['scalar'] = mo_array([attrs.float('scalar', 1.0)]) - return cls.enabled - - -class LesserEqualScalarFrontExtractor(FrontExtractorOp): - op = '_lesser_equal_scalar' - enabled = True - - @classmethod - def extract(cls, node): - attrs = get_mxnet_layer_attrs(node.symbol_dict) - node['scalar'] = mo_array([attrs.float('scalar', 1.0)]) - return cls.enabled - - -class MinimumScalarFrontExtractor(FrontExtractorOp): - op = '_minimum_scalar' - enabled = True - - @classmethod - def extract(cls, node): - attrs = get_mxnet_layer_attrs(node.symbol_dict) - node['scalar'] = attrs.float('scalar', 1.0) - return cls.enabled - - -class MaximumScalarFrontExtractor(FrontExtractorOp): - op = '_maximum_scalar' - enabled = True - - @classmethod - def extract(cls, node): - attrs = get_mxnet_layer_attrs(node.symbol_dict) - node['scalar'] = attrs.float('scalar', 1.0) - return cls.enabled - - -class ZerosFrontExtractor(FrontExtractorOp): - op = 'zeros_like' - enabled = True - - @classmethod - def extract(cls, node): - AttributedPower.update_node_stat(node, {'scale': 0}) - return cls.enabled - - -class OnesFrontExtractor(FrontExtractorOp): - op = 'ones_like' - enabled = True - - @classmethod - def extract(cls, node): - AttributedPower.update_node_stat(node, {'scale': 0, 'shift': 1}) - return cls.enabled - - -class RoundExtractor(FrontExtractorOp): - op = 'round' - enabled = True - - @classmethod - def extract(cls, node): - Round.update_node_stat(node, {'mode': 'half_away_from_zero'}) - return cls.enabled diff --git a/tools/mo/openvino/tools/mo/front/mxnet/eltwise_scalar_replacers.py b/tools/mo/openvino/tools/mo/front/mxnet/eltwise_scalar_replacers.py deleted file mode 100644 index b25b2e653ed6d8..00000000000000 --- a/tools/mo/openvino/tools/mo/front/mxnet/eltwise_scalar_replacers.py +++ /dev/null @@ -1,115 +0,0 @@ -# Copyright (C) 2018-2024 Intel Corporation -# SPDX-License-Identifier: Apache-2.0 - -from openvino.tools.mo.ops.elementwise import Div, Greater, GreaterEqual, Equal, NotEqual, Sub, Mul, Add, Less, LessEqual, Minimum, Maximum -from openvino.tools.mo.front.common.replacement import FrontReplacementOp -from openvino.tools.mo.front.mxnet.extractors.utils import scalar_ops_replacer -from openvino.tools.mo.graph.graph import Node, Graph - - -class DivScalarFrontReplacer(FrontReplacementOp): - op = '_div_scalar' - enabled = True - - def replace_op(self, graph: Graph, node: Node): - div_node = scalar_ops_replacer(graph, node, Div) - return [div_node.id] - - -class GreaterScalarFrontReplacer(FrontReplacementOp): - op = '_greater_scalar' - enabled = True - - def replace_op(self, graph: Graph, node: Node): - greater_node = scalar_ops_replacer(graph, node, Greater) - return [greater_node.id] - - -class GreaterEqualScalarFrontReplacer(FrontReplacementOp): - op = '_greater_equal_scalar' - enabled = True - - def replace_op(self, graph: Graph, node: Node): - greater_node = scalar_ops_replacer(graph, node, GreaterEqual) - return [greater_node.id] - - -class EqualScalarFrontReplacer(FrontReplacementOp): - op = '_equal_scalar' - enabled = True - - def replace_op(self, graph: Graph, node: Node): - equal_scalar_node = scalar_ops_replacer(graph, node, Equal) - return [equal_scalar_node.id] - - -class NotEqualScalarFrontReplacer(FrontReplacementOp): - op = '_not_equal_scalar' - enabled = True - - def replace_op(self, graph: Graph, node: Node): - not_equal_scalar_node = scalar_ops_replacer(graph, node, NotEqual) - return [not_equal_scalar_node.id] - - -class LesserScalarFrontReplacer(FrontReplacementOp): - op = '_lesser_scalar' - enabled = True - - def replace_op(self, graph: Graph, node: Node): - lesser_scalar_node = scalar_ops_replacer(graph, node, Less) - return [lesser_scalar_node.id] - - -class LesserEqualScalarFrontReplacer(FrontReplacementOp): - op = '_lesser_equal_scalar' - enabled = True - - def replace_op(self, graph: Graph, node: Node): - lesser_equal_scalar_node = scalar_ops_replacer(graph, node, LessEqual) - return [lesser_equal_scalar_node.id] - - -class MinusScalarFrontReplacer(FrontReplacementOp): - op = '_minus_scalar' - enabled = True - - def replace_op(self, graph: Graph, node: Node): - sub_node = scalar_ops_replacer(graph, node, Sub) - return [sub_node.id] - - -class MulScalarFrontReplacer(FrontReplacementOp): - op = '_mul_scalar' - enabled = True - - def replace_op(self, graph: Graph, node: Node): - mul_node = scalar_ops_replacer(graph, node, Mul) - return [mul_node.id] - - -class PlusScalarFrontReplacer(FrontReplacementOp): - op = '_plus_scalar' - enabled = True - - def replace_op(self, graph: Graph, node: Node): - add_node = scalar_ops_replacer(graph, node, Add) - return [add_node.id] - - -class MinimumScalarFrontReplacer(FrontReplacementOp): - op = '_minimum_scalar' - enabled = True - - def replace_op(self, graph: Graph, node: Node): - minimum_scalar_node = scalar_ops_replacer(graph, node, Minimum) - return [minimum_scalar_node.id] - - -class MaximumScalarFrontReplacer(FrontReplacementOp): - op = '_maximum_scalar' - enabled = True - - def replace_op(self, graph: Graph, node: Node): - maximum_scalar_node = scalar_ops_replacer(graph, node, Maximum) - return [maximum_scalar_node.id] diff --git a/tools/mo/openvino/tools/mo/front/mxnet/exp_ext.py b/tools/mo/openvino/tools/mo/front/mxnet/exp_ext.py deleted file mode 100644 index 10cb1da29975e9..00000000000000 --- a/tools/mo/openvino/tools/mo/front/mxnet/exp_ext.py +++ /dev/null @@ -1,15 +0,0 @@ -# Copyright (C) 2018-2024 Intel Corporation -# SPDX-License-Identifier: Apache-2.0 - -from openvino.tools.mo.ops.activation_ops import Exp -from openvino.tools.mo.front.extractor import FrontExtractorOp - - -class ExpExtractor(FrontExtractorOp): - op = 'exp' - enabled = True - - @classmethod - def extract(cls, node): - Exp.update_node_stat(node) - return cls.enabled diff --git a/tools/mo/openvino/tools/mo/front/mxnet/expand_dims_ext.py b/tools/mo/openvino/tools/mo/front/mxnet/expand_dims_ext.py deleted file mode 100644 index 46dfa68bb4579b..00000000000000 --- a/tools/mo/openvino/tools/mo/front/mxnet/expand_dims_ext.py +++ /dev/null @@ -1,18 +0,0 @@ -# Copyright (C) 2018-2024 Intel Corporation -# SPDX-License-Identifier: Apache-2.0 - -from openvino.tools.mo.front.extractor import FrontExtractorOp -from openvino.tools.mo.front.mxnet.extractors.utils import get_mxnet_layer_attrs -from openvino.tools.mo.ops.expand_dims import ExpandDims - - -class ExpandDimsExtractor(FrontExtractorOp): - op = 'expand_dims' - enabled = True - - @classmethod - def extract(cls, node): - attrs = get_mxnet_layer_attrs(node.symbol_dict) - expand_axis = attrs.int('axis', None) - ExpandDims.update_node_stat(node, {'expand_axis': expand_axis}) - return cls.enabled diff --git a/tools/mo/openvino/tools/mo/front/mxnet/extractor.py b/tools/mo/openvino/tools/mo/front/mxnet/extractor.py deleted file mode 100644 index 472e26856f1dca..00000000000000 --- a/tools/mo/openvino/tools/mo/front/mxnet/extractor.py +++ /dev/null @@ -1,56 +0,0 @@ -# Copyright (C) 2018-2024 Intel Corporation -# SPDX-License-Identifier: Apache-2.0 - -from openvino.tools.mo.front.mxnet.extractors.batchnorm import batch_norm_ext -from openvino.tools.mo.front.mxnet.extractors.concat import concat_ext -from openvino.tools.mo.front.mxnet.extractors.l2_normalization import l2_normalization_ext -from openvino.tools.mo.front.mxnet.extractors.multibox_prior import multi_box_prior_ext -from openvino.tools.mo.front.mxnet.extractors.scaleshift import scale_shift_ext -from openvino.tools.mo.front.mxnet.extractors.slice_axis import slice_axis_ext -from openvino.tools.mo.front.mxnet.extractors.utils import get_mxnet_layer_attrs -from openvino.tools.mo.graph.graph import Node -from openvino.tools.mo.utils.error import Error -from openvino.tools.mo.utils.utils import refer_to_faq_msg - - -def extractor_wrapper(mxnet_extractor): - return lambda node: mxnet_extractor(get_mxnet_layer_attrs(node.symbol_dict)) - - -mxnet_op_extractors = { - 'BatchNorm': extractor_wrapper(batch_norm_ext), - 'ScaleShift': extractor_wrapper(scale_shift_ext), - 'slice_axis': extractor_wrapper(slice_axis_ext), - 'Concat': extractor_wrapper(concat_ext), - 'L2Normalization': extractor_wrapper(l2_normalization_ext), - '_contrib_MultiBoxPrior': extractor_wrapper(multi_box_prior_ext), -} - - -def common_mxnet_fields(node: Node): - return { - 'kind': 'op', - 'name': node.id, - 'type': node['symbol_dict']['op'], - 'op': node['symbol_dict']['op'], - 'infer': None, - } - - -def mxnet_op_extractor(node: Node): - result = common_mxnet_fields(node) - op = result['op'] - if op not in mxnet_op_extractors: - raise Error( - "Operation '{}' not supported. Please register it as custom op. " + - refer_to_faq_msg(86), - op) - result_attr = mxnet_op_extractors[op](node) - - if result_attr is None: - raise Error('Model Optimizer does not support layer "{}". Please, implement extension. '.format(node.name) + - refer_to_faq_msg(45)) - - result.update(result_attr) - supported = bool(result_attr) - return supported, result diff --git a/tools/mo/openvino/tools/mo/front/mxnet/extractors/__init__.py b/tools/mo/openvino/tools/mo/front/mxnet/extractors/__init__.py deleted file mode 100644 index 8ba81a92b19c53..00000000000000 --- a/tools/mo/openvino/tools/mo/front/mxnet/extractors/__init__.py +++ /dev/null @@ -1,3 +0,0 @@ -# Copyright (C) 2018-2024 Intel Corporation -# SPDX-License-Identifier: Apache-2.0 - diff --git a/tools/mo/openvino/tools/mo/front/mxnet/extractors/add_n.py b/tools/mo/openvino/tools/mo/front/mxnet/extractors/add_n.py deleted file mode 100644 index 57b1e69338e6e2..00000000000000 --- a/tools/mo/openvino/tools/mo/front/mxnet/extractors/add_n.py +++ /dev/null @@ -1,5 +0,0 @@ -# Copyright (C) 2018-2024 Intel Corporation -# SPDX-License-Identifier: Apache-2.0 - -def add_n_ext(attrs): - return {} diff --git a/tools/mo/openvino/tools/mo/front/mxnet/extractors/batchnorm.py b/tools/mo/openvino/tools/mo/front/mxnet/extractors/batchnorm.py deleted file mode 100644 index ed6d7c66bd0bc0..00000000000000 --- a/tools/mo/openvino/tools/mo/front/mxnet/extractors/batchnorm.py +++ /dev/null @@ -1,17 +0,0 @@ -# Copyright (C) 2018-2024 Intel Corporation -# SPDX-License-Identifier: Apache-2.0 - -from openvino.tools.mo.front.common.extractors.utils import layout_attrs -from openvino.tools.mo.front.common.partial_infer.batch_norm import batch_norm_4_infer -from openvino.tools.mo.front.common.partial_infer.utils import reverse_bypass_infer - -def batch_norm_ext(attrs): - node_attrs = { - 'type': 'BatchNormalization', - 'eps': attrs.float('eps', 0.001), - 'infer': batch_norm_4_infer, - 'reverse_infer': lambda node: reverse_bypass_infer(node, in_ports=[0]), - 'fix_gamma': attrs.bool('fix_gamma', False) - } - node_attrs.update(layout_attrs()) - return node_attrs diff --git a/tools/mo/openvino/tools/mo/front/mxnet/extractors/concat.py b/tools/mo/openvino/tools/mo/front/mxnet/extractors/concat.py deleted file mode 100644 index 200a1f75c6f509..00000000000000 --- a/tools/mo/openvino/tools/mo/front/mxnet/extractors/concat.py +++ /dev/null @@ -1,13 +0,0 @@ -# Copyright (C) 2018-2024 Intel Corporation -# SPDX-License-Identifier: Apache-2.0 - -from openvino.tools.mo.front.common.partial_infer.concat import concat_infer - - -def concat_ext(attrs): - node_attrs = { - 'type': 'Concat', - 'axis': attrs.int("dim", 1), - 'infer': concat_infer - } - return node_attrs diff --git a/tools/mo/openvino/tools/mo/front/mxnet/extractors/l2_normalization.py b/tools/mo/openvino/tools/mo/front/mxnet/extractors/l2_normalization.py deleted file mode 100644 index 45e116c7635eb3..00000000000000 --- a/tools/mo/openvino/tools/mo/front/mxnet/extractors/l2_normalization.py +++ /dev/null @@ -1,18 +0,0 @@ -# Copyright (C) 2018-2024 Intel Corporation -# SPDX-License-Identifier: Apache-2.0 - -from openvino.tools.mo.front.common.partial_infer.elemental import copy_shape_infer - - -def l2_normalization_ext(attrs): - eps = attrs.float('eps', 1e-10) - - node_attrs = { - 'op': 'Normalize', - 'type': 'Normalize', - 'eps': eps, - 'across_spatial': 0, - 'channel_shared': 0, - 'infer': copy_shape_infer - } - return node_attrs diff --git a/tools/mo/openvino/tools/mo/front/mxnet/extractors/multibox_prior.py b/tools/mo/openvino/tools/mo/front/mxnet/extractors/multibox_prior.py deleted file mode 100644 index 97068f47ee15c7..00000000000000 --- a/tools/mo/openvino/tools/mo/front/mxnet/extractors/multibox_prior.py +++ /dev/null @@ -1,29 +0,0 @@ -# Copyright (C) 2018-2024 Intel Corporation -# SPDX-License-Identifier: Apache-2.0 - -from openvino.tools.mo.front.common.partial_infer.multi_box_prior import multi_box_prior_infer_mxnet - - -def multi_box_prior_ext(attr): - min_size = list(attr.tuple("sizes", float, (1, 1))) - offset_y, offset_x = attr.tuple("offsets", float, (0.5, 0.5)) - clip = 0 if not attr.bool("clip", False) else 1 - aspect_ratio = attr.tuple("ratios", float, None) - step_y, step_x = attr.tuple("steps", float, (-1, -1)) - if len(aspect_ratio) == 0: - aspect_ratio = [1.0] - - node_attrs = { - 'type': 'PriorBox', - 'step': step_x, - 'offset': offset_x, - 'variance': '0.100000,0.100000,0.200000,0.200000', - 'flip': 0, - 'clip': clip, - 'min_size': min_size, - 'max_size': '', - 'aspect_ratio': list(aspect_ratio), - 'scale_all_sizes': 0, - 'infer': multi_box_prior_infer_mxnet - } - return node_attrs diff --git a/tools/mo/openvino/tools/mo/front/mxnet/extractors/relu.py b/tools/mo/openvino/tools/mo/front/mxnet/extractors/relu.py deleted file mode 100644 index 417cf71ad69bed..00000000000000 --- a/tools/mo/openvino/tools/mo/front/mxnet/extractors/relu.py +++ /dev/null @@ -1,15 +0,0 @@ -# Copyright (C) 2018-2024 Intel Corporation -# SPDX-License-Identifier: Apache-2.0 - -from openvino.tools.mo.ops.activation_ops import ReLU -from openvino.tools.mo.front.extractor import FrontExtractorOp - - -class ReLUFrontExtractor(FrontExtractorOp): - op = 'relu' - enabled = True - - @classmethod - def extract(cls, node): - ReLU.update_node_stat(node) - return ReLUFrontExtractor.enabled diff --git a/tools/mo/openvino/tools/mo/front/mxnet/extractors/scaleshift.py b/tools/mo/openvino/tools/mo/front/mxnet/extractors/scaleshift.py deleted file mode 100644 index bf2fb0deac4d43..00000000000000 --- a/tools/mo/openvino/tools/mo/front/mxnet/extractors/scaleshift.py +++ /dev/null @@ -1,15 +0,0 @@ -# Copyright (C) 2018-2024 Intel Corporation -# SPDX-License-Identifier: Apache-2.0 - -from openvino.tools.mo.front.common.extractors.utils import layout_attrs -from openvino.tools.mo.front.common.partial_infer.batch_norm import batch_norm_4_infer - - -def scale_shift_ext(attrs): - node_attrs = { - 'type': 'ScaleShift', - 'fix_gamma': attrs.bool("fix_gamma", True), - 'infer': batch_norm_4_infer - } - node_attrs.update(layout_attrs()) - return node_attrs diff --git a/tools/mo/openvino/tools/mo/front/mxnet/extractors/slice_axis.py b/tools/mo/openvino/tools/mo/front/mxnet/extractors/slice_axis.py deleted file mode 100644 index 16fab9e001078d..00000000000000 --- a/tools/mo/openvino/tools/mo/front/mxnet/extractors/slice_axis.py +++ /dev/null @@ -1,57 +0,0 @@ -# Copyright (C) 2018-2024 Intel Corporation -# SPDX-License-Identifier: Apache-2.0 - -from openvino.tools.mo.front.caffe.extractors.utils import get_canonical_axis_index -from openvino.tools.mo.utils.error import Error - - -def slice_axis_ext(attrs): - axis = attrs.int("axis", 0) - begin = attrs.int("begin", 0) - end = attrs.int("end", None) - - node_attrs = { - 'op': 'Crop', - 'axis': axis, - 'offset': begin, - 'dim': end, - 'infer': mxnet_slice_axis_infer - } - return node_attrs - - -def mxnet_slice_axis_infer(node): - in_shape = node.in_port(0).data.get_shape() - node.axis = get_canonical_axis_index(in_shape, node.axis) - slice_axis = node.axis - - new_shape = in_shape.copy() - new_shape[slice_axis] = new_shape[slice_axis] / len(node.out_nodes()) - - axis_size = in_shape[slice_axis] - if node.offset < 0: - node.offset += axis_size - - if not node.dim: - node.dim = axis_size - elif node.dim < 0: - node.dim += axis_size - - input_dim = in_shape.size - node.dim = (node.dim - node.offset) - if node.dim > in_shape[slice_axis]: - raise Error( - '{0} node dimension value is bigger than the corresponding value in the input shape {1}. ' + - '\nIn particular {2} is bigger than {3}. The Model Optimizer does not support this case. ' + - '\nTo overcome, try to edit the original model "end" property of the {0} layer.', - node.name, ','.join(str(i) for i in in_shape), str(node.dim), str(in_shape[slice_axis]) - ) - - for i in range(0, input_dim): - if i == slice_axis: - new_shape[i] = node.dim - else: - new_shape[i] = in_shape[i] - - for i in range(0, len(node.out_nodes())): - node.out_node(i)['shape'] = new_shape diff --git a/tools/mo/openvino/tools/mo/front/mxnet/extractors/utils.py b/tools/mo/openvino/tools/mo/front/mxnet/extractors/utils.py deleted file mode 100644 index 0d6f98a92c710b..00000000000000 --- a/tools/mo/openvino/tools/mo/front/mxnet/extractors/utils.py +++ /dev/null @@ -1,219 +0,0 @@ -# Copyright (C) 2018-2024 Intel Corporation -# SPDX-License-Identifier: Apache-2.0 - -import mxnet as mx -import numpy as np - -from openvino.tools.mo.ops.elementwise import Elementwise -from openvino.tools.mo.graph.graph import Node, Graph -from openvino.tools.mo.ops.const import Const -from openvino.tools.mo.utils.error import Error -from openvino.tools.mo.utils.str_to import StrTo -from openvino.tools.mo.utils.utils import refer_to_faq_msg - - -class AttrDictionary(object): - def __init__(self, dict): - self._dict = dict - - def is_valid(self): - return not self._dict is None - - def dict(self): - return self._dict - - def add_dict(self, dict): - self._dict.update(dict) - - def set(self, key, value): - self._dict[key] = value - - def remove(self, key): - if key in self._dict: - del self._dict[key] - - def str(self, key, default=None): - if not self.is_valid: - if default is None: - raise ValueError("Missing required parameter: " + key) - if key in self._dict: - return self._dict[key] - return default - - def dtype(self, key, default=None): - if self.is_valid and key in self._dict: - return mxnet_str_dtype_to_np(self._dict[key]) - return default - - def bool(self, key, default=None): - attr = self.str(key, default) - if isinstance(attr, str): - if attr.isdigit(): - return bool(int(attr)) - return StrTo.bool(attr) - else: - return attr - - def float(self, key, default=None): - return self.val(key, float, default) - - def int(self, key, default=None): - return self.val(key, int, default) - - def tuple(self, key, valtype=str, default=None): - attr = self.str(key, default) - if attr is None: - return default - if isinstance(attr, str): - if (not '(' in attr and not ')' in attr) and (not '[' in attr and not ']' in attr): - return (valtype(attr),) - if (not attr) or (not attr[1:-1].split(',')[0]): - return tuple([valtype(x) for x in default]) - return StrTo.tuple(valtype, attr) - else: - return tuple([valtype(x) for x in attr]) - - def list(self, key, valtype, default=None, sep=","): - attr = self.str(key, default) - if isinstance(attr, list): - attr = [valtype(x) for x in attr] - return attr - else: - return StrTo.list(attr, valtype, sep) - - def val(self, key, valtype, default=None): - attr = self.str(key, default) - attr = None if attr == 'None' else attr - if valtype is None: - return attr - else: - if not isinstance(attr, valtype) and attr is not None: - return valtype(attr) - else: - return attr - - def has(self, key): - if not self.is_valid: - return False - else: - return key in self._dict - - -def get_mxnet_node_edges(node: dict, node_id: [int, str], nodes_list: list, index_node_key: dict): - edge_list = [] - used_indices = set() - for in_port, src_node_id in enumerate(node['inputs']): - edge = create_mxnet_edge(index_node_key[src_node_id[0]], index_node_key[node_id], in_port, src_node_id[1], - nodes_list[src_node_id[0]]['name']) - edge_list.append(edge) - used_indices.add(src_node_id[0]) - return edge_list, used_indices - - -def create_mxnet_edge(src_node_id: str, dst_node_id: str, src_port: int, dst_port: int, framework_name: str): - edge_attrs = { - 'in': src_port, - 'out': dst_port, - # debug anchor for framework name and tensor name - 'fw_tensor_debug_info': [(framework_name, framework_name + ":" + str(dst_port))], - 'in_attrs': ['in'], - 'out_attrs': ['out'], - 'data_attrs': ['fw_tensor_debug_info'] - } - return src_node_id, dst_node_id, edge_attrs - - -def get_mxnet_layer_attrs(json_dic: dict): - attr = 'param' - if 'attr' in json_dic: - attr = 'attr' - elif 'attrs' in json_dic: - attr = 'attrs' - return AttrDictionary(json_dic[attr] if attr in json_dic else {}) - - -def get_json_layer_attrs(json_dic): - attr = 'param' - if 'attr' in json_dic: - attr = 'attr' - elif 'attrs' in json_dic: - attr = 'attrs' - return json_dic[attr] - - -def load_params(input_model, data_names=('data',)): - arg_params = {} - aux_params = {} - arg_keys = [] - aux_keys = [] - file_format = input_model.split('.')[-1] - loaded_weight = mx.nd.load(input_model) - if file_format == 'params': - for key in loaded_weight: - keys = key.split(':') - if len(keys) > 1 and 'aux' == keys[0]: - aux_keys.append(keys[1]) - aux_params[keys[1]] = loaded_weight[key] - elif len(keys) > 1 and 'arg' == keys[0]: - arg_keys.append(keys[1]) - arg_params[keys[1]] = loaded_weight[key] - else: - arg_keys.append(key) - arg_params[key] = loaded_weight[key] - elif file_format == 'nd': - for key in loaded_weight: - if 'auxs' in input_model: - aux_keys.append(key) - aux_params[key] = loaded_weight[key] - elif 'args' in input_model: - arg_keys.append(key) - arg_params[key] = loaded_weight[key] - else: - raise Error( - 'Unsupported Input model file type {}. Model Optimizer support only .params and .nd files format. ' + - refer_to_faq_msg(85), file_format) - - data = mx.sym.Variable(data_names[0]) - model_params = mx.mod.Module(data, data_names=(data_names[0],), label_names=(data_names[0],)) - model_params._arg_params = arg_params - model_params._aux_params = aux_params - model_params._param_names = arg_keys - model_params._aux_names = aux_keys - return model_params - - -def init_rnn_states(model_nodes): - states = {} - for i, node in enumerate(model_nodes): - if node['op'] == 'RNN': - for i in node['inputs'][2:]: - attrs = get_mxnet_layer_attrs(model_nodes[i[0]]) - shape = attrs.tuple('__shape__', int, None) - if shape: - states.update({model_nodes[i[0]]['name']: shape}) - return states - - -def scalar_ops_replacer(graph: Graph, node: Node, elementwise_op_type=Elementwise): - scalar_value = Const(graph, dict(value=node.scalar, - symbol_dict={'name': node.id + '/const'})).create_node() - lin_node = elementwise_op_type(graph, dict(name=node.id + '/lin_', symbol_dict={'name': node.id + '/lin_'}) - ).create_node() - node.in_port(0).get_connection().set_destination(lin_node.in_port(0)) - lin_node.in_port(1).get_connection().set_source(scalar_value.out_port(0)) - node.out_port(0).get_connection().set_source(lin_node.out_port(0)) - return lin_node - - -MXNET_DATA_TYPES = { - 'float16': np.float16, - 'float32': np.float32, - 'float64': np.float64, - 'int8': np.int8, - 'int32': np.int32, - 'int64': np.int64, -} - - -def mxnet_str_dtype_to_np(dtype: str): - return MXNET_DATA_TYPES[dtype] diff --git a/tools/mo/openvino/tools/mo/front/mxnet/eye_ext.py b/tools/mo/openvino/tools/mo/front/mxnet/eye_ext.py deleted file mode 100644 index 82f52ef3b8d6f9..00000000000000 --- a/tools/mo/openvino/tools/mo/front/mxnet/eye_ext.py +++ /dev/null @@ -1,26 +0,0 @@ -# Copyright (C) 2018-2024 Intel Corporation -# SPDX-License-Identifier: Apache-2.0 - -import numpy as np - -from openvino.tools.mo.ops.eye import MXEye -from openvino.tools.mo.front.extractor import FrontExtractorOp -from openvino.tools.mo.front.mxnet.extractors.utils import get_mxnet_layer_attrs - - -class EyeExtractor(FrontExtractorOp): - op = '_eye' - enabled = True - - @classmethod - def extract(cls, node): - attrs = get_mxnet_layer_attrs(node.symbol_dict) - num_rows = attrs.int("N") - num_columns = attrs.int("M", num_rows) - if num_columns is None or num_columns == 0: - num_columns = num_rows - diagonal_index = attrs.int("k", 0) - out_type = attrs.dtype("dtype", np.float32) - new_attrs = {'num_rows': num_rows, 'num_columns': num_columns, 'diagonal_index': diagonal_index, 'output_type': out_type} - MXEye.update_node_stat(node, new_attrs) - return cls.enabled diff --git a/tools/mo/openvino/tools/mo/front/mxnet/eye_mx_to_eye.py b/tools/mo/openvino/tools/mo/front/mxnet/eye_mx_to_eye.py deleted file mode 100644 index 7e732e00e349b8..00000000000000 --- a/tools/mo/openvino/tools/mo/front/mxnet/eye_mx_to_eye.py +++ /dev/null @@ -1,47 +0,0 @@ -# Copyright (C) 2018-2024 Intel Corporation -# SPDX-License-Identifier: Apache-2.0 - -from openvino.tools.mo.front.common.replacement import FrontReplacementPattern -from openvino.tools.mo.front.common.partial_infer.utils import int64_array -from openvino.tools.mo.front.tf.graph_utils import create_op_with_const_inputs -from openvino.tools.mo.graph.graph import Graph -from openvino.tools.mo.ops.eye import Eye -from openvino.tools.mo.utils.error import Error - - -class EyeMXToEye(FrontReplacementPattern): - """ - This transformation converts MXEye operation (MXNet semantic) to Eye operation (OpenVINO semantic). - Refer to the Op implementation for the operations semantics description. - """ - enabled = True - - def find_and_replace_pattern(self, graph: Graph): - for mxeye in graph.get_op_nodes(op='MXEye'): - # save the original node name to use it in the new Eye op instance - original_name = mxeye.soft_get('name', mxeye.id) - mxeye['name'] = original_name + '/to_be_removed' - - if not mxeye.has_valid('num_rows'): - raise Error("MXEye should have valid ''num_rows'' attribute.") - num_rows = mxeye.soft_get('num_rows') - - if not mxeye.has_valid('num_columns'): - raise Error("MXEye should have valid ''num_columns'' attribute.") - num_columns = mxeye.soft_get('num_columns') - - if not mxeye.has_valid('diagonal_index'): - raise Error("MXEye should have valid ''diagonal_index'' attribute.") - diagonal_index = mxeye.soft_get('diagonal_index') - - if not mxeye.has_valid('output_type'): - raise Error("MXEye should have valid ''output_type'' attribute.") - output_type = mxeye.soft_get('output_type') - - new_eye = create_op_with_const_inputs(graph, Eye, {0: int64_array(num_rows), - 1: int64_array(num_columns), - 2: int64_array(diagonal_index)}, - {'name': original_name + '/Gathered', - 'output_type': output_type}) - mxeye.out_port(0).get_connection().set_source(new_eye.out_port(0)) - graph.remove_node(mxeye.id) diff --git a/tools/mo/openvino/tools/mo/front/mxnet/fft_ext.py b/tools/mo/openvino/tools/mo/front/mxnet/fft_ext.py deleted file mode 100644 index e9c6df282238a1..00000000000000 --- a/tools/mo/openvino/tools/mo/front/mxnet/fft_ext.py +++ /dev/null @@ -1,25 +0,0 @@ -# Copyright (C) 2018-2024 Intel Corporation -# SPDX-License-Identifier: Apache-2.0 - -from openvino.tools.mo.ops.mxfft import MXFFT -from openvino.tools.mo.front.extractor import FrontExtractorOp - - -class FFTFrontExtractor(FrontExtractorOp): - op = 'fft' - enabled = True - - @classmethod - def extract(cls, node): - MXFFT.update_node_stat(node, {'is_inverse': False}) - return cls.enabled - - -class IFFTFrontExtractor(FrontExtractorOp): - op = 'ifft' - enabled = True - - @classmethod - def extract(cls, node): - MXFFT.update_node_stat(node, {'is_inverse': True}) - return cls.enabled diff --git a/tools/mo/openvino/tools/mo/front/mxnet/flatten_ext.py b/tools/mo/openvino/tools/mo/front/mxnet/flatten_ext.py deleted file mode 100644 index 1c792f378df8ea..00000000000000 --- a/tools/mo/openvino/tools/mo/front/mxnet/flatten_ext.py +++ /dev/null @@ -1,20 +0,0 @@ -# Copyright (C) 2018-2024 Intel Corporation -# SPDX-License-Identifier: Apache-2.0 - -from openvino.tools.mo.front.extractor import FrontExtractorOp -from openvino.tools.mo.ops.flatten import Flatten - - -class FlattenFrontExtractor(FrontExtractorOp): - op = 'Flatten' - enabled = True - - @classmethod - def extract(cls, node): - attrs = { - 'axis': 1, - 'end_axis': -1, - } - - Flatten.update_node_stat(node, attrs) - return cls.enabled diff --git a/tools/mo/openvino/tools/mo/front/mxnet/fully_connected_ext.py b/tools/mo/openvino/tools/mo/front/mxnet/fully_connected_ext.py deleted file mode 100644 index 74d69ae0f57a0d..00000000000000 --- a/tools/mo/openvino/tools/mo/front/mxnet/fully_connected_ext.py +++ /dev/null @@ -1,24 +0,0 @@ -# Copyright (C) 2018-2024 Intel Corporation -# SPDX-License-Identifier: Apache-2.0 - -from openvino.tools.mo.ops.MatMul import FullyConnected -from openvino.tools.mo.front.extractor import FrontExtractorOp -from openvino.tools.mo.front.mxnet.extractors.utils import get_mxnet_layer_attrs - - -class FullyConnectedFrontExtractor(FrontExtractorOp): - op = 'FullyConnected' - enabled = True - - @classmethod - def extract(cls, node): - attr = get_mxnet_layer_attrs(node.symbol_dict) - num_hidden = attr.int('num_hidden', None) - assert num_hidden is not None, "{} node with no `num_hidden` parameter found".format(cls.op) - attrs = { - 'out-size': num_hidden, - 'transpose_weights': True, - 'flatten': attr.bool('flatten', True) - } - FullyConnected.update_node_stat(node, attrs) - return cls.enabled diff --git a/tools/mo/openvino/tools/mo/front/mxnet/gather.py b/tools/mo/openvino/tools/mo/front/mxnet/gather.py deleted file mode 100644 index ff540b6f0d25e4..00000000000000 --- a/tools/mo/openvino/tools/mo/front/mxnet/gather.py +++ /dev/null @@ -1,23 +0,0 @@ -# Copyright (C) 2018-2024 Intel Corporation -# SPDX-License-Identifier: Apache-2.0 - -from openvino.tools.mo.ops.gather import Gather -from openvino.tools.mo.front.common.partial_infer.utils import int64_array -from openvino.tools.mo.front.common.replacement import FrontReplacementOp -from openvino.tools.mo.front.tf.graph_utils import create_op_with_const_inputs -from openvino.tools.mo.graph.graph import Graph - - -class GatherFrontReplacer(FrontReplacementOp): - op = 'Embedding' - enabled = True - - def replace_sub_graph(self, graph: Graph, match: dict): - node = match['op'] - - gather_node = create_op_with_const_inputs(graph, Gather, {2: int64_array(0)}, - {'name': node.soft_get('name', node.id) + '/embedding_'}) - - node.in_port(0).get_connection().set_destination(gather_node.in_port(1)) - node.in_port(1).get_connection().set_destination(gather_node.in_port(0)) - node.out_port(0).get_connection().set_source(gather_node.out_port(0)) diff --git a/tools/mo/openvino/tools/mo/front/mxnet/gather_ext.py b/tools/mo/openvino/tools/mo/front/mxnet/gather_ext.py deleted file mode 100644 index 536eecb7d30253..00000000000000 --- a/tools/mo/openvino/tools/mo/front/mxnet/gather_ext.py +++ /dev/null @@ -1,13 +0,0 @@ -# Copyright (C) 2018-2024 Intel Corporation -# SPDX-License-Identifier: Apache-2.0 - -from openvino.tools.mo.front.extractor import FrontExtractorOp - - -class GatherFrontExtractor(FrontExtractorOp): - op = 'Embedding' - enabled = True - - @classmethod - def extract(cls, node): - return cls.enabled diff --git a/tools/mo/openvino/tools/mo/front/mxnet/gluoncv_ssd_anchors.py b/tools/mo/openvino/tools/mo/front/mxnet/gluoncv_ssd_anchors.py deleted file mode 100644 index da2dac419d800a..00000000000000 --- a/tools/mo/openvino/tools/mo/front/mxnet/gluoncv_ssd_anchors.py +++ /dev/null @@ -1,112 +0,0 @@ -# Copyright (C) 2018-2024 Intel Corporation -# SPDX-License-Identifier: Apache-2.0 - -from typing import Dict - -from openvino.tools.mo.front.mxnet.mx_reshape_to_reshape import MXReshapeToReshape -from openvino.tools.mo.front.mxnet.ssd_detection_output_replacer import SsdPatternDetectionOutputReplacer -from openvino.tools.mo.ops.elementwise import Div, Add, Sub -from openvino.tools.mo.ops.split import Split -from openvino.tools.mo.front.common.partial_infer.utils import int64_array, mo_array -from openvino.tools.mo.front.common.replacement import FrontReplacementPattern -from openvino.tools.mo.front.tf.graph_utils import create_op_node_with_second_input -from openvino.tools.mo.graph.graph import Graph, Node -from openvino.tools.mo.graph.port import Port -from openvino.tools.mo.middle.passes.convert_data_type import data_type_str_to_np -from openvino.tools.mo.ops.concat import Concat -from openvino.tools.mo.ops.reshape import Reshape - - -def calculate_prior_box_value(value: Node, value_to_div: Port, value_to_add: Port): - """ - :param value: Node with value. Here is supposed the node with op='Split' - :param value_to_div: Output port with values to be divided by 2 - :param value_to_add: Output port with values to be added to values from value_to_div port - :return: Sub and Add nodes - - The sub-graph can be described by formulas: - min = value[value_to_add] - (value[value_to_div] / 2) - max = value[value_to_add] + (value[value_to_div] / 2) - """ - graph = value.graph - dtype = data_type_str_to_np(graph.graph['cmd_params'].data_type) - _min = Sub(graph, dict(name=value.name + '/Sub')).create_node() - div = create_op_node_with_second_input(graph, Div, mo_array([2], dtype=dtype), op_attrs=dict(name=value.name + '/Div')) - div.in_port(0).connect(value_to_div) - _min.in_port(0).connect(value_to_add) - _min.in_port(1).connect(div.out_port(0)) - - _max = Add(graph, dict(name=value.name + '/Add')).create_node() - _max.in_port(0).connect(div.out_port(0)) - _max.in_port(1).connect(value_to_add) - - return _min, _max - - -class SsdAnchorsReplacer(FrontReplacementPattern): - """ - Replacing sub-graph with all anchors to sub-graph which calculates prior boxes values by formulas: - - value[i] = xmin = value[i] - (value[i + 2] / 2) - value[i + 1] = ymin = value[i + 1] - (value[i + 3] / 2) - value[i + 2] = xmax = value[i] + (value[i + 2] / 2) - value[i + 3] = ymax = value[i + 1] + (value[i + 3] / 2) - """ - - enabled = True - graph_condition = [lambda graph: graph.graph['cmd_params'].enable_ssd_gluoncv] - - def run_after(self): - return [SsdPatternDetectionOutputReplacer, MXReshapeToReshape] - - def pattern(self): - return dict( - nodes=[ - ('slice_like', dict(op='slice_like')), - ('reshape0', dict(op='Reshape')), - ('reshape1', dict(op='Reshape')), - ('reshape2', dict(op='Reshape')), - ('reshape3', dict(op='Reshape')), - ('concat', dict(op='Concat')), - ('detection_output', dict(op='DetectionOutput')) - ], - edges=[ - ('slice_like', 'reshape0'), - ('reshape0', 'reshape1'), - ('reshape1', 'reshape2'), - ('reshape2', 'reshape3'), - ('reshape3', 'concat', {'in': 0}), - ('concat', 'detection_output', {'in': 2}) - ]) - - def replace_pattern(self, graph: Graph, match: Dict[str, Node]): - concat_node = match['concat'] - concat_node['axis'] = 1 - concat_name = concat_node.soft_get('name', concat_node.id) - - concat_reshape = create_op_node_with_second_input(graph, Reshape, int64_array([1, 2, -1]), op_attrs=dict( - name=concat_name + '/Reshape')) - split_node = create_op_node_with_second_input(graph, Split, int64_array(1), op_attrs=dict( - name=concat_name + '/Split', num_splits=2), input_node=concat_reshape) - split_node_reshape = create_op_node_with_second_input(graph, Reshape, int64_array([-1, 4]), op_attrs=dict( - name=split_node.name + '/Reshape')) - split_node.out_port(0).connect(split_node_reshape.in_port(0)) - value = create_op_node_with_second_input(graph, Split, int64_array(1), op_attrs=dict( - name=split_node_reshape.name + '/Split', num_splits=4), input_node=split_node_reshape) - - xmin, xmax = calculate_prior_box_value(value, value_to_div=value.out_port(2), value_to_add=value.out_port(0)) - ymin, ymax = calculate_prior_box_value(value, value_to_div=value.out_port(3), value_to_add=value.out_port(1)) - - concat_slice_value = Concat(graph, dict(name=value.name + '/Concat', in_ports_count=4, axis=1)).create_node() - for ind, node in enumerate([xmin, ymin, xmax, ymax]): - concat_slice_value.in_port(ind).connect(node.out_port(0)) - - reshape_concat_values = create_op_node_with_second_input(graph, Reshape, int64_array([1, 1, -1]), - op_attrs=dict(name=concat_slice_value.name + '/Reshape'), - input_node=concat_slice_value) - concat = Concat(graph, dict(name=reshape_concat_values.name + '/Concat', in_ports_count=2, axis=1)).create_node() - concat.in_port(0).connect(reshape_concat_values.out_port(0)) - concat.in_port(1).connect(split_node.out_port(1)) - - match['detection_output'].in_port(2).get_connection().set_source(concat.out_port(0)) - concat_node.out_port(0).get_connection().set_destination(concat_reshape.in_port(0)) diff --git a/tools/mo/openvino/tools/mo/front/mxnet/instance_norm_ext.py b/tools/mo/openvino/tools/mo/front/mxnet/instance_norm_ext.py deleted file mode 100644 index 898a6ad0416b49..00000000000000 --- a/tools/mo/openvino/tools/mo/front/mxnet/instance_norm_ext.py +++ /dev/null @@ -1,22 +0,0 @@ -# Copyright (C) 2018-2024 Intel Corporation -# SPDX-License-Identifier: Apache-2.0 - -from openvino.tools.mo.ops.instance_normalization import InstanceNormalization -from openvino.tools.mo.front.extractor import FrontExtractorOp -from openvino.tools.mo.front.mxnet.extractors.utils import get_mxnet_layer_attrs -from openvino.tools.mo.graph.graph import Node - - -class InstanceNormFrontExtractor(FrontExtractorOp): - op = 'InstanceNorm' - enabled = True - - @classmethod - def extract(cls, node: Node): - attr = get_mxnet_layer_attrs(node.symbol_dict) - node_attrs = { - 'epsilon': attr.float('eps', 0.001) - } - - InstanceNormalization.update_node_stat(node, node_attrs) - return cls.enabled diff --git a/tools/mo/openvino/tools/mo/front/mxnet/layer_norm_ext.py b/tools/mo/openvino/tools/mo/front/mxnet/layer_norm_ext.py deleted file mode 100644 index 33669cb28da350..00000000000000 --- a/tools/mo/openvino/tools/mo/front/mxnet/layer_norm_ext.py +++ /dev/null @@ -1,23 +0,0 @@ -# Copyright (C) 2018-2024 Intel Corporation -# SPDX-License-Identifier: Apache-2.0 -from openvino.tools.mo.front.extractor import FrontExtractorOp -from openvino.tools.mo.front.mxnet.extractors.utils import get_mxnet_layer_attrs -from openvino.tools.mo.graph.graph import Node -from openvino.tools.mo.ops.layer_norm import LayerNorm - - -class LayerNormFrontExtractor(FrontExtractorOp): - op = 'LayerNorm' - enabled = True - - @classmethod - def extract(cls, node: Node): - attr = get_mxnet_layer_attrs(node.symbol_dict) - - node_attrs = { - 'epsilon': attr.float('eps', 9.99999975e-06), - 'axis': attr.int('axis', -1), - 'output_mean_var': attr.bool('output_mean_var', False) - } - LayerNorm.update_node_stat(node, node_attrs) - return cls.enabled diff --git a/tools/mo/openvino/tools/mo/front/mxnet/leaky_relu.py b/tools/mo/openvino/tools/mo/front/mxnet/leaky_relu.py deleted file mode 100644 index 1489def4bccef3..00000000000000 --- a/tools/mo/openvino/tools/mo/front/mxnet/leaky_relu.py +++ /dev/null @@ -1,49 +0,0 @@ -# Copyright (C) 2018-2024 Intel Corporation -# SPDX-License-Identifier: Apache-2.0 - -from openvino.tools.mo.ops.activation_ops import Elu, LeakyReLU, ReLU -from openvino.tools.mo.ops.gelu import GeLUOP -from openvino.tools.mo.ops.prelu import PReLU -from openvino.tools.mo.front.extractor import FrontExtractorOp -from openvino.tools.mo.front.mxnet.extractors.utils import get_mxnet_layer_attrs -from openvino.tools.mo.utils.error import Error -from openvino.tools.mo.utils.utils import refer_to_faq_msg - - -class LeakyReLUFrontExtractor(FrontExtractorOp): - op = 'LeakyReLU' - enabled = True - - @classmethod - def extract(cls, node): - attrs = get_mxnet_layer_attrs(node.symbol_dict) - act_type = attrs.str('act_type', 'leaky') - if act_type == 'prelu': - prelu_attrs = {'channel_shared': 1, - 'filler_type': 'constant', - 'filler_value': 0, - 'min': 0, - 'max': 1, - 'mean': 0, - 'std': 0, - 'sparse': -1, - 'variance_norm': "caffe.FillerParameter.FAN_IN"} - PReLU.update_node_stat(node, prelu_attrs) - elif act_type == 'elu': - alpha = attrs.float('slope', 0.25) - Elu.update_node_stat(node, {'alpha': alpha}) - elif act_type == 'leaky': - negative_slope = attrs.float('slope', 0.25) - if negative_slope == 0: - ReLU.update_node_stat(node) - else: - LeakyReLU.update_node_stat(node, {'negative_slope': negative_slope}) - elif act_type == 'gelu': - GeLUOP.update_node_stat(node, {'approximation_mode': 'erf'}) - else: - raise Error( - "Operation '{}' not supported. Please register it as custom op. " + - refer_to_faq_msg(86), - act_type) - - return LeakyReLUFrontExtractor.enabled diff --git a/tools/mo/openvino/tools/mo/front/mxnet/loader.py b/tools/mo/openvino/tools/mo/front/mxnet/loader.py deleted file mode 100644 index 9185d6d34b2045..00000000000000 --- a/tools/mo/openvino/tools/mo/front/mxnet/loader.py +++ /dev/null @@ -1,148 +0,0 @@ -# Copyright (C) 2018-2024 Intel Corporation -# SPDX-License-Identifier: Apache-2.0 - -import json -import logging as log -import os - -import mxnet as mx -import numpy as np - -from openvino.tools.mo.front.common.partial_infer.utils import mo_array -from openvino.tools.mo.front.extractor import add_outputs_identity -from openvino.tools.mo.front.mxnet.extractor import common_mxnet_fields -from openvino.tools.mo.front.mxnet.extractors.utils import get_mxnet_node_edges, load_params, init_rnn_states, create_mxnet_edge -from openvino.tools.mo.front.mxnet.nd_to_params import build_params_file -from openvino.tools.mo.graph.graph import Node, Graph -from openvino.tools.mo.utils.error import Error -from openvino.tools.mo.utils.utils import refer_to_faq_msg - - -def load_symbol_nodes(model_name, input_symbol: str = None, legacy_mxnet_model: bool = False): - if input_symbol: - json_name = input_symbol - if legacy_mxnet_model: - log.warning('If you use --input_symbol with legacy MXNet models be sure that symbol and param names ' + - 'have correct format supported by MXNet') - else: - json_name = '%s-symbol.json' % model_name - input_symbol = json_name - - if legacy_mxnet_model and (input_symbol == json_name): - log.warning('For legacy MXNet models Model Optimizer does not support conversion of old MXNet models' + - '(trained with 1.0.0 version of MXNet and lower) with custom layers. ' + - refer_to_faq_msg(93)) - sym = mx.symbol.load(json_name) - model_nodes = json.loads(sym.tojson()) - else: - if os.path.isfile(json_name): - with open(json_name, 'r') as fd: - model_nodes = json.load(fd) - else: - raise Error('Specified input json {} does not exist. ' + - refer_to_faq_msg(84), json_name) - - return model_nodes['nodes'] - - -def parse_input_model(input_model): - path_wo_ext = '.'.join(input_model.split('.')[:-1]) - model_name_w_iter = path_wo_ext.split(os.sep)[-1] - iteration_number = int(model_name_w_iter.split('-')[-1]) - model_name = '-'.join(path_wo_ext.split('-')[:-1]) - return model_name, iteration_number - - -def load_symbol_def(input_model_name, input_symbol, input_names: str = '', nd_prefix_name: str = '', - pretrained_model_name: str = '', legacy_mxnet_model: bool = False): - if not nd_prefix_name and not pretrained_model_name: - # model name always has extension 'param' - try: - model_name, iteration_number = parse_input_model(input_model_name) - except ValueError as err: - raise Error( - 'Input model name {} is not in an expected format, cannot extract iteration number. ' + - refer_to_faq_msg(48), - input_model_name) - - if input_names: - model_params = load_params(input_model_name, data_names=input_names.split(',')) - else: - model_params = load_params(input_model_name) - - elif nd_prefix_name and pretrained_model_name and input_symbol: - model_name, iteration_number = parse_input_model(pretrained_model_name) - model_name = '-'.join(input_symbol.split('-')[:-1]) - model_params = build_params_file(nd_prefix_name, pretrained_model_name, input_names) - else: - raise Error( - "Arguments --nd_prefix_name, --pretrained_model_name and --input_symbol should be provided. Please provide all or do not use any. " + - refer_to_faq_msg(81)) - - model_nodes = load_symbol_nodes(model_name, input_symbol, legacy_mxnet_model) - - return model_nodes, model_params, model_name, iteration_number - - -def symbol_attrs(symbol_node): - return {'symbol_dict': symbol_node} - - -def symbol2nx(graph, model_nodes, model_params, input_names: str = ''): - if not input_names: - input_names = ('data',) - else: - input_names = input_names.split(',') - - graph.inputs_order = input_names - - rnn_states = init_rnn_states(model_nodes) - names_rnn_states = list(rnn_states.keys()) - - # as mxnet contain input layers as index of layer, for correct set up edges, we need provide index of layer with name of graph node - index_node_keys = {} - fw_name_map = {} - for i, node in enumerate(model_nodes): - if node['name'] in model_params._arg_params and node['name'] not in input_names: - node['value'] = mo_array(model_params._arg_params[node['name']].asnumpy(), dtype=np.float32) - elif node['name'] in model_params._aux_params and node['name'] not in input_names: - node['value'] = mo_array(model_params._aux_params[node['name']].asnumpy(), dtype=np.float32) - elif node['name'] in names_rnn_states: - node['value'] = np.zeros(rnn_states[node['name']], dtype=np.float32) - node_name = graph.unique_id(node['name']) - graph.add_node(node_name, **symbol_attrs(node)) - if hasattr(graph, 'op_names_statistic') and 'op' in node: - if node['op'] != 'null': - graph.op_names_statistic[node['op']] += 1 - graph.node[node_name].update(common_mxnet_fields(Node(graph, node_name))) - index_node_keys[i] = node_name - fw_name_map[node_name] = node['name'] - - used_indices_set = set() - for i, attrs in enumerate(model_nodes): - node = attrs - edges, used_indices = get_mxnet_node_edges(node, i, list(model_nodes), index_node_keys) - if len(edges) > 0: - graph.add_edges_from(edges) - used_indices_set = used_indices_set.union(used_indices) - - output_ids = [index_node_keys[node_id] for node_id in set(range(len(model_nodes))) - used_indices_set] - - graph.outputs_order = output_ids - - # Tensor names information corresponding to a node is stored on outgoing edges. - # As output nodes do not have outgoing edges, fake outputs are required. In the following code - # for each output Identity node is added, and tensor name for the output is kept - # on (output, fake output) edge. After Result nodes adding transformation fake outputs - # are deleted from graph. - add_outputs_identity(graph, output_ids, lambda g, output_id, fake_node_id, fw_name: g.add_edges_from([ - create_mxnet_edge(output_id, fake_node_id, 0, 0, fw_name[output_id])]), {'fw_name': fw_name_map}) - - return graph - - -def find_output_node(graph: Graph, src_input_index): - for i, attrs in (list(graph.nodes(data=True))[src_input_index + 1:]): - for input_index in attrs['symbol_dict']['inputs']: - if input_index[0] == src_input_index: - return i diff --git a/tools/mo/openvino/tools/mo/front/mxnet/lrn_ext.py b/tools/mo/openvino/tools/mo/front/mxnet/lrn_ext.py deleted file mode 100644 index 656ec9101a37a3..00000000000000 --- a/tools/mo/openvino/tools/mo/front/mxnet/lrn_ext.py +++ /dev/null @@ -1,28 +0,0 @@ -# Copyright (C) 2018-2024 Intel Corporation -# SPDX-License-Identifier: Apache-2.0 - -from openvino.tools.mo.front.extractor import FrontExtractorOp -from openvino.tools.mo.front.mxnet.extractors.utils import get_mxnet_layer_attrs -from openvino.tools.mo.ops.lrn import AttributedLRN - - -class LRNExtractor(FrontExtractorOp): - op = 'LRN' - enabled = True - - @classmethod - def extract(cls, node): - attrs = get_mxnet_layer_attrs(node.symbol_dict) - - alpha = attrs.float("alpha", 0.0001) - beta = attrs.float("beta", 0.75) - knorm = attrs.float("knorm", 2.0) - nsize = attrs.int("nsize", None) - - AttributedLRN.update_node_stat(node, { - 'alpha': alpha, - 'beta': beta, - 'bias': knorm, - 'local_size': nsize, - }) - return cls.enabled diff --git a/tools/mo/openvino/tools/mo/front/mxnet/max_ext.py b/tools/mo/openvino/tools/mo/front/mxnet/max_ext.py deleted file mode 100644 index 80508fb62d6049..00000000000000 --- a/tools/mo/openvino/tools/mo/front/mxnet/max_ext.py +++ /dev/null @@ -1,18 +0,0 @@ -# Copyright (C) 2018-2024 Intel Corporation -# SPDX-License-Identifier: Apache-2.0 - -from openvino.tools.mo.ops.ReduceOps import ReduceMax -from openvino.tools.mo.front.common.partial_infer.utils import int64_array -from openvino.tools.mo.front.extractor import FrontExtractorOp -from openvino.tools.mo.front.mxnet.extractors.utils import get_mxnet_layer_attrs - - -class MaxFrontExtractor(FrontExtractorOp): - op = 'max' - enabled = True - - @classmethod - def extract(cls, node): - attrs = get_mxnet_layer_attrs(node.symbol_dict) - ReduceMax.update_node_stat(node, {'axis': int64_array([attrs.int('axis', 0)]), 'keep_dims': False}) - return cls.enabled diff --git a/tools/mo/openvino/tools/mo/front/mxnet/modulated_deformable_conv_ext.py b/tools/mo/openvino/tools/mo/front/mxnet/modulated_deformable_conv_ext.py deleted file mode 100644 index bedcf967c089e9..00000000000000 --- a/tools/mo/openvino/tools/mo/front/mxnet/modulated_deformable_conv_ext.py +++ /dev/null @@ -1,62 +0,0 @@ -# Copyright (C) 2018-2024 Intel Corporation -# SPDX-License-Identifier: Apache-2.0 - -import numpy as np - -from openvino.tools.mo.front.common.partial_infer.utils import int64_array -from openvino.tools.mo.front.extractor import FrontExtractorOp -from openvino.tools.mo.front.mxnet.extractors.utils import get_mxnet_layer_attrs -from openvino.tools.mo.ops.deformable_convolution import DeformableConvolution - - -class ModulatedDeformableConvolutionExtractor(FrontExtractorOp): - op = '_contrib_ModulatedDeformableConvolution' - enabled = True - - @classmethod - def extract(cls, node): - attr = get_mxnet_layer_attrs(node.symbol_dict) - - kernel = attr.tuple("kernel", int, None) - stride = attr.tuple("stride", int, tuple(np.ones(len(kernel), dtype=np.int64))) - padding = attr.tuple("pad", int, tuple(np.zeros(len(kernel), dtype=np.int64))) - dilate = attr.tuple("dilate", int, tuple(np.ones(len(kernel), dtype=np.int64))) - num_deformable_group = attr.int("num_deformable_group", 1) - num_group = attr.int("num_group", 1) - output = attr.int("num_filter", None) - bias_term = attr.str("no_bias", 'False') == 'False' - - final_dilations = int64_array([1, 1, *[d for d in dilate]]) if dilate is not None else None - - node_attrs = { - 'op': __class__.op, - 'bias_addable': True, - 'bias_term': bias_term, - 'pad': int64_array([[0, 0], [0, 0], *[[pad, pad] for pad in padding]]), - 'pad_spatial_shape': int64_array([[pad, pad] for pad in padding]), - 'dilation': final_dilations, - 'output_spatial_shape': None, - 'output_shape': None, - 'stride': int64_array([1, 1, *[s for s in stride]]), - 'group': num_group, - 'deformable_group': num_deformable_group, - 'output': output, - 'kernel_spatial': int64_array([k for k in kernel]), - 'bilinear_interpolation_pad': True, - - 'input_feature_channel': 1, - 'output_feature_channel': 0, - 'kernel_spatial_idx': None, - 'reshape_kernel': True, - 'weights_index': 2, - 'in_ports_count': 4, - - 'spatial_dims': None, - 'channel_dims': int64_array([1]), - 'batch_dims': int64_array([0]), - 'layout': 'NCHW', - } - - # update the attributes of the node - DeformableConvolution.update_node_stat(node, node_attrs) - return cls.enabled diff --git a/tools/mo/openvino/tools/mo/front/mxnet/modulated_deformable_conv_replacer.py b/tools/mo/openvino/tools/mo/front/mxnet/modulated_deformable_conv_replacer.py deleted file mode 100644 index 7c1ff3311bfaa3..00000000000000 --- a/tools/mo/openvino/tools/mo/front/mxnet/modulated_deformable_conv_replacer.py +++ /dev/null @@ -1,22 +0,0 @@ -# Copyright (C) 2018-2024 Intel Corporation -# SPDX-License-Identifier: Apache-2.0 - -from openvino.tools.mo.front.common.replacement import FrontReplacementPattern -from openvino.tools.mo.graph.graph import Graph - - -class DeformableConvolutionReplacer(FrontReplacementPattern): - # swap mask and weights inputs for ModulatedDeformableConvolution according to the specification - enabled = True - - def find_and_replace_pattern(self, graph: Graph): - - for deform_conv in graph.get_op_nodes(type='DeformableConvolution'): - if len(deform_conv.get_inputs()) != 4: - return - - m_source = deform_conv.in_port(2).get_source() - deform_conv.in_port(2).disconnect() - - deform_conv.in_port(3).get_connection().set_destination(deform_conv.in_port(2)) - m_source.connect(deform_conv.in_port(3)) diff --git a/tools/mo/openvino/tools/mo/front/mxnet/multibox_detection_ext.py b/tools/mo/openvino/tools/mo/front/mxnet/multibox_detection_ext.py deleted file mode 100644 index d790f4e60c6af4..00000000000000 --- a/tools/mo/openvino/tools/mo/front/mxnet/multibox_detection_ext.py +++ /dev/null @@ -1,39 +0,0 @@ -# Copyright (C) 2018-2024 Intel Corporation -# SPDX-License-Identifier: Apache-2.0 - -from openvino.tools.mo.ops.DetectionOutput import DetectionOutput -from openvino.tools.mo.front.extractor import FrontExtractorOp -from openvino.tools.mo.front.mxnet.extractors.utils import get_mxnet_layer_attrs - - -class MultiBoxDetectionOutputExtractor(FrontExtractorOp): - op = '_contrib_MultiBoxDetection' - enabled = True - - @classmethod - def extract(cls, node): - attrs = get_mxnet_layer_attrs(node.symbol_dict) - top_k = attrs.int("nms_topk", -1) - nms_threshold = attrs.float("nms_threshold", 0.5) - confidence_threshold = attrs.float("threshold", 0.01) - clip = 0 if not attrs.bool("clip", True) else 1 - - node_attrs = { - 'type': 'DetectionOutput', - 'op': __class__.op, - 'keep_top_k': top_k, - 'variance_encoded_in_target': 0, - 'code_type': "caffe.PriorBoxParameter.CENTER_SIZE", - 'share_location': 1, - 'confidence_threshold': confidence_threshold, - 'background_label_id': 0, - 'nms_threshold': nms_threshold, - 'top_k': top_k, - 'decrease_label_id': 1, - 'clip_before_nms': clip, - 'normalized': 1, - } - - DetectionOutput.update_node_stat(node, node_attrs) - - return cls.enabled diff --git a/tools/mo/openvino/tools/mo/front/mxnet/mx_reshape_reverse.py b/tools/mo/openvino/tools/mo/front/mxnet/mx_reshape_reverse.py deleted file mode 100644 index eca8a2b10c6982..00000000000000 --- a/tools/mo/openvino/tools/mo/front/mxnet/mx_reshape_reverse.py +++ /dev/null @@ -1,77 +0,0 @@ -# Copyright (C) 2018-2024 Intel Corporation -# SPDX-License-Identifier: Apache-2.0 - -import numpy as np - -from openvino.tools.mo.front.mxnet.mx_reshape_to_reshape import MXReshapeToReshape -from openvino.tools.mo.ops.Reverse import Reverse -from openvino.tools.mo.ops.mxreshape import MXReshape -from openvino.tools.mo.front.common.partial_infer.utils import int64_array -from openvino.tools.mo.front.common.replacement import FrontReplacementOp -from openvino.tools.mo.front.tf.graph_utils import create_op_node_with_second_input -from openvino.tools.mo.graph.graph import Graph -from openvino.tools.mo.ops.reshape import Reshape -from openvino.tools.mo.ops.shape import Shape -from openvino.tools.mo.ops.squeeze import Squeeze -from openvino.tools.mo.ops.unsqueeze import Unsqueeze - - -class MXReshapeReverse(FrontReplacementOp): - """ - If reshape layer with reverse True, special values will inferred from right to left. - The Replacer simulate the behavior. The replaced subgraph reverse input data and special dims, - and after reshape reverse output result to backward. - Resulting subgraph: reshape(reverse=True) -> reverse - reshape(reverse=False) -reverse subgraph. - """ - op = 'MXReshape' - enabled = True - - def run_before(self): - return [MXReshapeToReshape] - - def replace_sub_graph(self, graph: Graph, match: dict): - mxreshape = match['op'] - if not mxreshape.reverse: - return - - shape_node = Shape(graph, dict(name=mxreshape.id + '/Shape')).create_node() - forward_reverse_unsqueeze_node = create_op_node_with_second_input(graph, Unsqueeze, int64_array([0]), - dict(name=str(mxreshape.id) + '/ForwardUnsqueeze')) - forward_reverse_node = Reverse(graph, dict(name=mxreshape.id + '/ForwardReverse', axis=1)).create_node() - - forward_reverse_squeeze_node = create_op_node_with_second_input(graph, Squeeze, int64_array([0]), - dict(name=str(mxreshape.id) + '/ForwardSqueeze')) - reshape_node = Reshape(graph, dict(name=mxreshape.id + '/Reshape')).create_node() - shape_node.in_port(0).connect(mxreshape.in_port(0).get_source()) - mxreshape.in_port(0).get_connection().set_destination(reshape_node.in_port(0)) - - forward_reverse_unsqueeze_node.in_port(0).connect(shape_node.out_port(0)) - forward_reverse_node.in_port(0).connect(forward_reverse_unsqueeze_node.out_port(0)) - forward_reverse_squeeze_node.in_port(0).connect(forward_reverse_node.out_port(0)) - reshape_node.in_port(1).connect(forward_reverse_squeeze_node.out_port(0)) - - reshape_shape_node = create_op_node_with_second_input(graph, Reshape, int64_array(np.flip(mxreshape.dim, 0)), - dict(name=str(mxreshape.id) + '/ReshapeShape')) - if np.sum(np.in1d([-2, -3, -4], mxreshape.dim), axis=0): - reshape_shape_node = MXReshape(graph, dict(name=mxreshape.id + '/Reshape', - dim=int64_array(np.flip(mxreshape.dim, 0)))).create_node() - - reshape_shape_node.in_port(0).connect(reshape_node.out_port(0)) - - backward_shape_node = Shape(graph, dict(name=mxreshape.id + '/BackwardShape')).create_node() - backward_reverse_unsqueeze_node = create_op_node_with_second_input(graph, Unsqueeze, int64_array([0]), - dict(name=str(mxreshape.id) + '/BackwardUnsqueeze')) - backward_reverse_node = Reverse(graph, dict(name=mxreshape.id + '/BackwardReverse', axis=1)).create_node() - backward_reverse_squeeze_node = create_op_node_with_second_input(graph, Squeeze, int64_array([0]), - dict(name=str(mxreshape.id) + '/BackwardSqueeze')) - backward_reshape_node = Reshape(graph, dict(name=mxreshape.id + '/BackwardReshape')).create_node() - - backward_shape_node.in_port(0).connect(reshape_shape_node.out_port(0)) - backward_reverse_unsqueeze_node.in_port(0).connect(backward_shape_node.out_port(0)) - backward_reverse_node.in_port(0).connect(backward_reverse_unsqueeze_node.out_port(0)) - backward_reverse_squeeze_node.in_port(0).connect(backward_reverse_node.out_port(0)) - - backward_reshape_node.in_port(0).connect(reshape_shape_node.out_port(0)) - backward_reshape_node.in_port(1).connect(backward_reverse_squeeze_node.out_port(0)) - - mxreshape.out_port(0).get_connection().set_source(backward_reshape_node.out_port(0)) diff --git a/tools/mo/openvino/tools/mo/front/mxnet/mx_reshape_to_reshape.py b/tools/mo/openvino/tools/mo/front/mxnet/mx_reshape_to_reshape.py deleted file mode 100644 index 7bb48b8c699d2c..00000000000000 --- a/tools/mo/openvino/tools/mo/front/mxnet/mx_reshape_to_reshape.py +++ /dev/null @@ -1,115 +0,0 @@ -# Copyright (C) 2018-2024 Intel Corporation -# SPDX-License-Identifier: Apache-2.0 - -import numpy as np - -from openvino.tools.mo.ops.elementwise import Mul -from openvino.tools.mo.front.common.partial_infer.utils import int64_array -from openvino.tools.mo.front.common.replacement import FrontReplacementSubgraph -from openvino.tools.mo.graph.graph import Graph -from openvino.tools.mo.ops.concat import Concat -from openvino.tools.mo.ops.const import Const -from openvino.tools.mo.ops.reshape import Reshape -from openvino.tools.mo.ops.shape import Shape -from openvino.tools.mo.utils.shape import get_shape_values_by_indices_node, get_shape_values_by_range_idxs - - -class MXReshapeToReshape(FrontReplacementSubgraph): - """ - Generate subgraph which is equivalent for transform of -2 -3 or -4 options in reshape dims attribute. - -2 copy all/remainder of the input dimensions to the output shape. - Example: input shape = (2,3,4), shape = (2,-2,1,1), output shape = (2,3,4,1,1) - -3 use the product of two consecutive dimensions of the input shape as the output dimension. - Example: input shape = (2,3,4), shape = (-3,4), output shape = (6,4) - -4 split one dimension of the input into two dimensions passed subsequent to -4 in shape (can contain -1). - Example: input shape = (2,3,4), shape = (-4,1,2,-2), output shape =(1,2,3,4) - """ - enabled = True - - def pattern(self): - return dict( - nodes=[ - ('mxreshape', dict(op='MXReshape', dim=lambda node_dim: node_dim is not None and len(set(node_dim).intersection({-2, -3, -4})) != 0, reverse=False)) - ], - edges=[] - ) - - def resolve_handlers(self, case): - return { - -2: self.resolve_minus2, - -3: self.resolve_minus3, - -4: self.resolve_minus4, - }.get(case, self.resolve_const_shape) - - def resolve_minus2(self, shape_node, input_index, reshape_index, dims): - rank_node = Shape(shape_node.graph, dict(name=shape_node.id + '/RankShapeMXReshapeMinus2')).create_node() - rank_node.in_port(0).connect(shape_node.out_port(0)) - shape_values_node = get_shape_values_by_range_idxs(shape=shape_node, rank=rank_node, - begin=input_index, end=-1, - include_begin=True, include_end=True) - input_index = None - reshape_index = reshape_index + 1 - return input_index, reshape_index, dims, shape_values_node - - def resolve_minus3(self, shape_node, input_index, reshape_index, dims): - shape_indexes_node1 = Const(shape_node.graph, dict(name=shape_node.id + '/ShapeMinus3_index_const1_' + str(input_index), - value=int64_array([input_index]))).create_node() - dims_node1 = get_shape_values_by_indices_node(shape_node, shape_indexes_node1) - - shape_indexes_node2 = Const(shape_node.graph, dict(name=shape_node.id + '/ShapeMinus3_index_const2_' + str(input_index), - value=int64_array([input_index + 1]))).create_node() - dims_node2 = get_shape_values_by_indices_node(shape_node, shape_indexes_node2) - - mul_node = Mul(shape_node.graph, dict(name=shape_node.id + '/MulMinus3_' + str(input_index))).create_node() - - mul_node.in_port(0).connect(dims_node1.out_port(0)) - mul_node.in_port(1).connect(dims_node2.out_port(0)) - - input_index = input_index + 2 - reshape_index = reshape_index + 1 - return input_index, reshape_index, dims, mul_node - - def resolve_minus4(self, shape_node, input_index, reshape_index, dims): - shape_const_node = Const(shape_node.graph, dict(name=shape_node.id + '/ShapeMinus4_index_const_' + str(input_index), - value=np.take(dims, [reshape_index + 1, reshape_index + 2]))).create_node() - input_index = input_index + 2 - reshape_index = reshape_index + 3 - return input_index, reshape_index, dims, shape_const_node - - def resolve_const_shape(self, shape_node, input_index, reshape_index, dims): - dim_const_node = Const(shape_node.graph, dict(name=shape_node.id + '/DimConst_' + str(reshape_index), - value=[dims[reshape_index]])).create_node() - input_index = input_index + 1 if input_index != None else None - reshape_index = reshape_index + 1 - return input_index, reshape_index, dims, dim_const_node - - def resolve(self, input_index, reshape_index, dims, input_shape_node, output_dims_nodes): - - resolve_handler = self.resolve_handlers(dims[reshape_index]) - input_index, reshape_index, dims, dims_node = resolve_handler(input_shape_node, input_index, - reshape_index, dims) - output_dims_nodes.append(dims_node) - return input_index, reshape_index, output_dims_nodes - - def replace_sub_graph(self, graph: Graph, match: dict): - node = match['mxreshape'] - - input_index = 0 - reshape_index = 0 - shape_node = Shape(graph, dict(name=node.id + '/ShapeMXReshape')).create_node() - shape_node.in_port(0).connect(node.in_port(0).get_source()) - output_dims_nodes = [] - for d in node.dim: - if reshape_index < len(node.dim): - input_index, reshape_index, output_dims_nodes = self.resolve(input_index, reshape_index, node.dim, shape_node, output_dims_nodes) - - concat_node = Concat(shape_node.graph, dict(name=shape_node.id + '/ConcatMXReshape_', axis=0, - in_ports_count=len(output_dims_nodes))).create_node() - - for in_port_index, dim_node in enumerate(output_dims_nodes): - concat_node.in_port(in_port_index).connect(dim_node.out_port(0)) - - reshape_node = Reshape(graph, dict(name=node.id + '/Reshape_')).create_node() - reshape_node.in_port(1).connect(concat_node.out_port(0)) - node.in_port(0).get_connection().set_destination(reshape_node.in_port(0)) - node.out_port(0).get_connection().set_source(reshape_node.out_port(0)) diff --git a/tools/mo/openvino/tools/mo/front/mxnet/nd_to_params.py b/tools/mo/openvino/tools/mo/front/mxnet/nd_to_params.py deleted file mode 100644 index dcf27533411a5a..00000000000000 --- a/tools/mo/openvino/tools/mo/front/mxnet/nd_to_params.py +++ /dev/null @@ -1,73 +0,0 @@ -# Copyright (C) 2018-2024 Intel Corporation -# SPDX-License-Identifier: Apache-2.0 - -import os - -import mxnet as mx - -from openvino.tools.mo.front.mxnet.extractors.utils import load_params - - -def save_params_file(model_name: str, args: dict, auxs: dict, iteration_number: int = 0): - pretrained = {} - for key in args: - pretrained["arg:" + key] = args[key] - - for key in auxs: - pretrained["aux:" + key] = auxs[key] - - save_model_path = '{}-{:04}.params'.format(model_name, iteration_number) - save_model_path = os.path.expanduser(save_model_path) - if os.path.isfile(save_model_path): - os.remove(save_model_path) - mx.nd.save(save_model_path, pretrained) - - -def add_pretrained_model(pretrained_params: dict, args: dict, pretrained_model: str, iteration_number: int, - input_names: str): - if input_names: - input_names = input_names.split(',') - else: - input_names = 'data' - - arg_dict = args - if pretrained_params: - symbol, arg_params, aux_params = mx.model.load_checkpoint(pretrained_model, iteration_number) - arg_names = symbol.list_arguments() - arg_dict = {} - - for name in arg_names: - if name in input_names: - continue - key = "arg:" + name - if key in pretrained_params: - arg_dict[name] = pretrained_params[key].copyto(mx.cpu()) - del pretrained_params - arg_dict.update(args) - return arg_dict - - -def build_params_file(nd_prefix_name: str = '', pretrained_model: str = '', input_names: str = ''): - path_wo_ext = '.'.join(pretrained_model.split('.')[:-1]) - pretrained_model_name_w_iter = path_wo_ext.split(os.sep)[-1] - pretrained_model_name = '-'.join(path_wo_ext.split('-')[:-1]) - iteration_number = int(pretrained_model_name_w_iter.split('-')[-1]) - files_dir = os.path.dirname(pretrained_model) - - if input_names: - model_params = load_params(pretrained_model, data_names=input_names.split(',')) - else: - model_params = load_params(pretrained_model) - - pretrained_params = mx.nd.load(pretrained_model) if pretrained_model_name else None - nd_args = mx.nd.load(os.path.join(files_dir, '%s_args.nd' % nd_prefix_name)) if nd_prefix_name else None - nd_auxs = mx.nd.load(os.path.join(files_dir, '%s_auxs.nd' % nd_prefix_name)) if nd_prefix_name else None - nd_args = add_pretrained_model(pretrained_params, nd_args, pretrained_model_name, - iteration_number, - input_names) - - model_params._arg_params = nd_args - model_params._aux_params = nd_auxs - model_params._param_names = list(nd_args.keys()) - model_params._aux_names = list(nd_auxs.keys()) - return model_params diff --git a/tools/mo/openvino/tools/mo/front/mxnet/null_ext.py b/tools/mo/openvino/tools/mo/front/mxnet/null_ext.py deleted file mode 100644 index ccf82400b43f86..00000000000000 --- a/tools/mo/openvino/tools/mo/front/mxnet/null_ext.py +++ /dev/null @@ -1,19 +0,0 @@ -# Copyright (C) 2018-2024 Intel Corporation -# SPDX-License-Identifier: Apache-2.0 - -from openvino.tools.mo.ops.parameter import Parameter -from openvino.tools.mo.front.extractor import FrontExtractorOp -from openvino.tools.mo.ops.const import Const - - -class NullFrontExtractor(FrontExtractorOp): - op = 'null' - enabled = True - - @classmethod - def extract(cls, node): - if 'value' in node.symbol_dict: - Const.update_node_stat(node, {'value': node.symbol_dict['value']}) - else: - Parameter.update_node_stat(node, {}) - return cls.enabled diff --git a/tools/mo/openvino/tools/mo/front/mxnet/pad_ext.py b/tools/mo/openvino/tools/mo/front/mxnet/pad_ext.py deleted file mode 100644 index 6472c070d79427..00000000000000 --- a/tools/mo/openvino/tools/mo/front/mxnet/pad_ext.py +++ /dev/null @@ -1,28 +0,0 @@ -# Copyright (C) 2018-2024 Intel Corporation -# SPDX-License-Identifier: Apache-2.0 - -from openvino.tools.mo.front.common.partial_infer.utils import mo_array -from openvino.tools.mo.front.extractor import FrontExtractorOp -from openvino.tools.mo.front.mxnet.extractors.utils import get_mxnet_layer_attrs -from openvino.tools.mo.ops.pad import AttributedPad - - -class PadFrontExtractor(FrontExtractorOp): - op = 'Pad' - enabled = True - - @classmethod - def extract(cls, node): - attrs = get_mxnet_layer_attrs(node.symbol_dict) - pads = mo_array(list(attrs.tuple('pad_width', int, None))) - pads = pads.reshape([-1, 2]) - value = attrs.float('constant_value', 0.0) - - node_attrs = { - 'pads': pads, - 'mode': attrs.str('mode', None), - 'fill_value': value, - } - - AttributedPad.update_node_stat(node, node_attrs) - return cls.enabled diff --git a/tools/mo/openvino/tools/mo/front/mxnet/pooling_ext.py b/tools/mo/openvino/tools/mo/front/mxnet/pooling_ext.py deleted file mode 100644 index 8a592f70530fcb..00000000000000 --- a/tools/mo/openvino/tools/mo/front/mxnet/pooling_ext.py +++ /dev/null @@ -1,53 +0,0 @@ -# Copyright (C) 2018-2024 Intel Corporation -# SPDX-License-Identifier: Apache-2.0 - -import numpy as np - -from openvino.tools.mo.front.common.partial_infer.utils import int64_array -from openvino.tools.mo.front.extractor import FrontExtractorOp -from openvino.tools.mo.front.mxnet.extractors.utils import get_mxnet_layer_attrs -from openvino.tools.mo.ops.pooling import Pooling - - -class PoolingFrontExtractor(FrontExtractorOp): - op = 'Pooling' - enabled = True - - @classmethod - def extract(cls, node): - attrs = get_mxnet_layer_attrs(node.symbol_dict) - - kernel = attrs.tuple("kernel", int, None) - stride = attrs.tuple("stride", int, tuple(np.ones(len(kernel), dtype=np.int64))) - padding = attrs.tuple("pad", int, tuple(np.zeros(len(kernel), dtype=np.int64))) - method = attrs.str("pool_type", None) - rt = 'floor' - - data = { - 'window': int64_array([1, 1, *[k for k in kernel]]), - 'stride': int64_array([1, 1, *[s for s in stride]]), - 'pad': int64_array([[0, 0], [0, 0], *[[pad, pad] for pad in padding]]), - 'pad_spatial_shape': int64_array([[pad, pad] for pad in padding]), - 'pool_method': method, - 'exclude_pad': False, - 'output_spatial_shape': None, - 'spatial_dims': None, - 'channel_dims': int64_array([1]), - 'batch_dims': int64_array([0]), - 'layout': 'NCHW', - 'rounding_type': rt, - } - - pooling_conv = attrs.str("pooling_convention", 'valid') - if pooling_conv: - data["pooling_convention"] = pooling_conv - if pooling_conv == 'full': - data["rounding_type"] = 'ceil' - - global_pool = attrs.bool("global_pool", False) - if global_pool: - data["global_pool"] = global_pool - - # update the attributes of the node - Pooling.update_node_stat(node, data) - return cls.enabled diff --git a/tools/mo/openvino/tools/mo/front/mxnet/proposal_ext.py b/tools/mo/openvino/tools/mo/front/mxnet/proposal_ext.py deleted file mode 100644 index 2739e589d014a8..00000000000000 --- a/tools/mo/openvino/tools/mo/front/mxnet/proposal_ext.py +++ /dev/null @@ -1,38 +0,0 @@ -# Copyright (C) 2018-2024 Intel Corporation -# SPDX-License-Identifier: Apache-2.0 - -from openvino.tools.mo.front.common.partial_infer.utils import mo_array -from openvino.tools.mo.ops.proposal import ProposalOp -from openvino.tools.mo.front.extractor import FrontExtractorOp -from openvino.tools.mo.front.mxnet.extractors.utils import get_mxnet_layer_attrs - - -class ProposalFrontExtractor(FrontExtractorOp): - op = '_contrib_Proposal' - enabled = True - - @classmethod - def extract(cls, node): - attrs = get_mxnet_layer_attrs(node.symbol_dict) - pre_nms_topn = attrs.int('rpn_pre_nms_top_n', 6000) - post_nms_topn = attrs.int('rpn_post_nms_top_n', 300) - nms_thresh = attrs.float('threshold', 0.7) - min_size = attrs.int('rpn_min_size', 16) - scale = attrs.tuple("scales", float, (4, 8, 16, 32)) - ratio = attrs.tuple("ratios", float, (0.5, 1, 2)) - feat_stride = attrs.int('feature_stride', 16) - - update_attrs = { - 'feat_stride': feat_stride, - 'ratio': mo_array(ratio), - 'min_size': min_size, - 'scale': mo_array(scale), - 'pre_nms_topn': pre_nms_topn, - 'post_nms_topn': post_nms_topn, - 'nms_thresh': nms_thresh, - 'base_size': feat_stride - } - - # update the attributes of the node - ProposalOp.update_node_stat(node, update_attrs) - return cls.enabled diff --git a/tools/mo/openvino/tools/mo/front/mxnet/psroi_pooling_ext.py b/tools/mo/openvino/tools/mo/front/mxnet/psroi_pooling_ext.py deleted file mode 100644 index 3778953ee03cd7..00000000000000 --- a/tools/mo/openvino/tools/mo/front/mxnet/psroi_pooling_ext.py +++ /dev/null @@ -1,33 +0,0 @@ -# Copyright (C) 2018-2024 Intel Corporation -# SPDX-License-Identifier: Apache-2.0 - -from openvino.tools.mo.ops.psroipooling import PSROIPoolingOp -from openvino.tools.mo.front.extractor import FrontExtractorOp -from openvino.tools.mo.front.mxnet.extractors.utils import get_mxnet_layer_attrs - - -class PSROIPoolingFrontExtractor(FrontExtractorOp): - op = '_contrib_PSROIPooling' - enabled = True - - @classmethod - def extract(cls, node): - attrs = get_mxnet_layer_attrs(node.symbol_dict) - - spatial_scale = attrs.float("spatial_scale", None) - pooled_size = attrs.int("pooled_size", None) - output_dim = attrs.int("output_dim", None) - group_size = attrs.int("group_size", 0) - - if group_size == 0: - group_size = pooled_size - - data = { - 'spatial_scale': spatial_scale, - 'output_dim': output_dim, - 'group_size': group_size, - } - - # update the attributes of the node - PSROIPoolingOp.update_node_stat(node, data) - return cls.enabled diff --git a/tools/mo/openvino/tools/mo/front/mxnet/random_uniform_ext.py b/tools/mo/openvino/tools/mo/front/mxnet/random_uniform_ext.py deleted file mode 100644 index e22231cc650095..00000000000000 --- a/tools/mo/openvino/tools/mo/front/mxnet/random_uniform_ext.py +++ /dev/null @@ -1,24 +0,0 @@ -# Copyright (C) 2018-2024 Intel Corporation -# SPDX-License-Identifier: Apache-2.0 - -import numpy as np - -from openvino.tools.mo.ops.random_uniform import AttributedRandomUniform -from openvino.tools.mo.front.extractor import FrontExtractorOp -from openvino.tools.mo.front.mxnet.extractors.utils import get_mxnet_layer_attrs - - -class RandomUniformExtractor(FrontExtractorOp): - op = '_random_uniform' - enabled = True - - @classmethod - def extract(cls, node): - attrs = get_mxnet_layer_attrs(node.symbol_dict) - shape = list(attrs.tuple("shape", int, None)) - high = attrs.float("high", 1.0) - low = attrs.float("low", 0.0) - out_type = attrs.dtype("dtype", np.float32) - new_attrs = {'shape': shape, 'min_val': out_type(low), 'max_val': out_type(high), 'output_type': out_type} - AttributedRandomUniform.update_node_stat(node, new_attrs) - return cls.enabled diff --git a/tools/mo/openvino/tools/mo/front/mxnet/register_custom_ops.py b/tools/mo/openvino/tools/mo/front/mxnet/register_custom_ops.py deleted file mode 100644 index d294ec17e98cf9..00000000000000 --- a/tools/mo/openvino/tools/mo/front/mxnet/register_custom_ops.py +++ /dev/null @@ -1,11 +0,0 @@ -# Copyright (C) 2018-2024 Intel Corporation -# SPDX-License-Identifier: Apache-2.0 - -from openvino.tools.mo.front.common.replacement import FrontReplacementOp, FrontReplacementSubgraph, FrontReplacementPattern -from openvino.tools.mo.front.extractor import FrontExtractorOp, MXNetCustomFrontExtractorOp -from openvino.tools.mo.front.tf.replacement import FrontReplacementFromConfigFileGeneral - -def get_front_classes(): - front_classes = [FrontExtractorOp, FrontReplacementOp, FrontReplacementSubgraph, MXNetCustomFrontExtractorOp, - FrontReplacementPattern, FrontReplacementFromConfigFileGeneral] - return front_classes diff --git a/tools/mo/openvino/tools/mo/front/mxnet/repeat_ext.py b/tools/mo/openvino/tools/mo/front/mxnet/repeat_ext.py deleted file mode 100644 index bd0d277d1767b4..00000000000000 --- a/tools/mo/openvino/tools/mo/front/mxnet/repeat_ext.py +++ /dev/null @@ -1,26 +0,0 @@ -# Copyright (C) 2018-2024 Intel Corporation -# SPDX-License-Identifier: Apache-2.0 - -from openvino.tools.mo.ops.mxrepeat import MXRepeat -from openvino.tools.mo.front.extractor import FrontExtractorOp -from openvino.tools.mo.front.mxnet.extractors.utils import get_mxnet_layer_attrs -from openvino.tools.mo.graph.graph import Node - - -class RepeatExt(FrontExtractorOp): - op = 'repeat' - enabled = True - - @classmethod - def extract(cls, node: Node): - attrs = get_mxnet_layer_attrs(node.symbol_dict) - axis = attrs.int('axis', 0) - repeats = attrs.int('repeats', None) - assert repeats is not None and repeats > 0, \ - '`repeat` op requires positive `repeats` attribute, but it is {} for node {}'.format(repeats, node.name) - - MXRepeat.update_node_stat(node, { - 'axis': axis, - 'repeats': repeats, - }) - return cls.enabled diff --git a/tools/mo/openvino/tools/mo/front/mxnet/reshape_ext.py b/tools/mo/openvino/tools/mo/front/mxnet/reshape_ext.py deleted file mode 100644 index fd8e8fb6d2fd39..00000000000000 --- a/tools/mo/openvino/tools/mo/front/mxnet/reshape_ext.py +++ /dev/null @@ -1,31 +0,0 @@ -# Copyright (C) 2018-2024 Intel Corporation -# SPDX-License-Identifier: Apache-2.0 - -from openvino.tools.mo.ops.mxreshape import MXReshape -from openvino.tools.mo.front.common.partial_infer.utils import int64_array -from openvino.tools.mo.front.extractor import FrontExtractorOp -from openvino.tools.mo.front.mxnet.extractors.utils import get_mxnet_layer_attrs -from openvino.tools.mo.ops.reshape import Reshape - - -class ReshapeFrontExtractor(FrontExtractorOp): - op = 'Reshape' - enabled = True - - @classmethod - def extract(cls, node): - attrs = get_mxnet_layer_attrs(node.symbol_dict) - dim = attrs.tuple("shape", int, None) - reverse = attrs.bool("reverse", False) - update_attrs = { - 'dim': int64_array(dim), - 'reverse': reverse - } - for d in dim: - if d in [-2, -3, -4] or reverse: - MXReshape.update_node_stat(node, update_attrs) - return cls.enabled - - # update the attributes of the node - Reshape.update_node_stat(node, update_attrs) - return cls.enabled diff --git a/tools/mo/openvino/tools/mo/front/mxnet/rnn_param_concat.py b/tools/mo/openvino/tools/mo/front/mxnet/rnn_param_concat.py deleted file mode 100644 index d2b99b1bdb4c70..00000000000000 --- a/tools/mo/openvino/tools/mo/front/mxnet/rnn_param_concat.py +++ /dev/null @@ -1,22 +0,0 @@ -# Copyright (C) 2018-2024 Intel Corporation -# SPDX-License-Identifier: Apache-2.0 - -from openvino.tools.mo.front.extractor import FrontExtractorOp -from openvino.tools.mo.front.mxnet.extractors.utils import get_mxnet_layer_attrs -from openvino.tools.mo.ops.concat import Concat - - -class RNNParamConcatFrontExtractor(FrontExtractorOp): - op = '_rnn_param_concat' - enabled = True - - @classmethod - def extract(cls, node): - attrs = get_mxnet_layer_attrs(node.symbol_dict) - data = { - 'axis': attrs.int("dim", 1), - } - - # update the attributes of the node - Concat.update_node_stat(node, data) - return cls.enabled diff --git a/tools/mo/openvino/tools/mo/front/mxnet/roi_pooling_ext.py b/tools/mo/openvino/tools/mo/front/mxnet/roi_pooling_ext.py deleted file mode 100644 index f25432fa99e39f..00000000000000 --- a/tools/mo/openvino/tools/mo/front/mxnet/roi_pooling_ext.py +++ /dev/null @@ -1,31 +0,0 @@ -# Copyright (C) 2018-2024 Intel Corporation -# SPDX-License-Identifier: Apache-2.0 - -from openvino.tools.mo.front.common.extractors.utils import layout_attrs -from openvino.tools.mo.front.extractor import FrontExtractorOp -from openvino.tools.mo.front.mxnet.extractors.utils import get_mxnet_layer_attrs -from openvino.tools.mo.ops.roipooling import ROIPooling - - -class ROIPoolingFrontExtractor(FrontExtractorOp): - op = 'ROIPooling' - enabled = True - - @classmethod - def extract(cls, node): - attrs = get_mxnet_layer_attrs(node.symbol_dict) - - spatial_scale = attrs.float("spatial_scale", None) - pooled_size = attrs.tuple("pooled_size", int, (0, 0)) - data = { - 'type': 'ROIPooling', - 'spatial_scale': spatial_scale, - 'pooled_w': pooled_size[1], - 'pooled_h': pooled_size[0] - } - - data.update(layout_attrs()) - - # update the attributes of the node - ROIPooling.update_node_stat(node, data) - return cls.enabled diff --git a/tools/mo/openvino/tools/mo/front/mxnet/roll_ext.py b/tools/mo/openvino/tools/mo/front/mxnet/roll_ext.py deleted file mode 100644 index 23d58e08ea15dc..00000000000000 --- a/tools/mo/openvino/tools/mo/front/mxnet/roll_ext.py +++ /dev/null @@ -1,21 +0,0 @@ -# Copyright (C) 2018-2024 Intel Corporation -# SPDX-License-Identifier: Apache-2.0 - -from openvino.tools.mo.ops.roll import AttributedRoll -from openvino.tools.mo.front.extractor import FrontExtractorOp -from openvino.tools.mo.front.mxnet.extractors.utils import get_mxnet_layer_attrs - - -class RollExtractor(FrontExtractorOp): - op = '_np_roll' - enabled = True - - @classmethod - def extract(cls, node): - attrs = get_mxnet_layer_attrs(node.symbol_dict) - shift = list(attrs.tuple("shift", int, None)) - axis = None - if attrs.has("axis"): - axis = list(attrs.tuple("axis", int, None)) - AttributedRoll.update_node_stat(node, {'axes': axis, 'shift': shift}) - return cls.enabled diff --git a/tools/mo/openvino/tools/mo/front/mxnet/shape_array_ext.py b/tools/mo/openvino/tools/mo/front/mxnet/shape_array_ext.py deleted file mode 100644 index 754545caec0146..00000000000000 --- a/tools/mo/openvino/tools/mo/front/mxnet/shape_array_ext.py +++ /dev/null @@ -1,16 +0,0 @@ -# Copyright (C) 2018-2024 Intel Corporation -# SPDX-License-Identifier: Apache-2.0 - -from openvino.tools.mo.front.extractor import FrontExtractorOp -from openvino.tools.mo.ops.shape import Shape - - -class ShapeArrayExtractor(FrontExtractorOp): - op = 'shape_array' - enabled = True - - @classmethod - def extract(cls, node): - Shape.update_node_stat(node, {}) - return cls.enabled - diff --git a/tools/mo/openvino/tools/mo/front/mxnet/sigmoid.py b/tools/mo/openvino/tools/mo/front/mxnet/sigmoid.py deleted file mode 100644 index 0ec89701d3d372..00000000000000 --- a/tools/mo/openvino/tools/mo/front/mxnet/sigmoid.py +++ /dev/null @@ -1,15 +0,0 @@ -# Copyright (C) 2018-2024 Intel Corporation -# SPDX-License-Identifier: Apache-2.0 - -from openvino.tools.mo.ops.activation_ops import Sigmoid -from openvino.tools.mo.front.extractor import FrontExtractorOp - - -class SigmoidFrontExtractor(FrontExtractorOp): - op = 'sigmoid' - enabled = True - - @classmethod - def extract(cls, node): - Sigmoid.update_node_stat(node) - return cls.enabled diff --git a/tools/mo/openvino/tools/mo/front/mxnet/slice_channel_ext.py b/tools/mo/openvino/tools/mo/front/mxnet/slice_channel_ext.py deleted file mode 100644 index ff1f465b41f63f..00000000000000 --- a/tools/mo/openvino/tools/mo/front/mxnet/slice_channel_ext.py +++ /dev/null @@ -1,28 +0,0 @@ -# Copyright (C) 2018-2024 Intel Corporation -# SPDX-License-Identifier: Apache-2.0 - -from openvino.tools.mo.ops.split import AttributedSplit -from openvino.tools.mo.front.extractor import FrontExtractorOp -from openvino.tools.mo.front.mxnet.extractors.utils import get_mxnet_layer_attrs - - -class SliceChannelFrontExtractor(FrontExtractorOp): - op = 'SliceChannel' - enabled = True - - @classmethod - def extract(cls, node): - attrs = get_mxnet_layer_attrs(node.symbol_dict) - axis = attrs.int("axis", 1) - num_outputs = attrs.int("num_outputs", 0) - squeeze_axis = attrs.bool('squeeze_axis', False) - - node_attrs = { - 'axis': axis, - 'squeeze_axis': squeeze_axis, - 'num_splits': num_outputs, - } - - # update the attributes of the node - AttributedSplit.update_node_stat(node, node_attrs) - return cls.enabled diff --git a/tools/mo/openvino/tools/mo/front/mxnet/slice_ext.py b/tools/mo/openvino/tools/mo/front/mxnet/slice_ext.py deleted file mode 100644 index ef2206c237c468..00000000000000 --- a/tools/mo/openvino/tools/mo/front/mxnet/slice_ext.py +++ /dev/null @@ -1,24 +0,0 @@ -# Copyright (C) 2018-2024 Intel Corporation -# SPDX-License-Identifier: Apache-2.0 - -from openvino.tools.mo.front.common.partial_infer.utils import mo_array -from openvino.tools.mo.front.extractor import FrontExtractorOp -from openvino.tools.mo.front.mxnet.extractors.utils import get_mxnet_layer_attrs -from openvino.tools.mo.ops.slice import MXSlice - - -class SliceFrontExtractor(FrontExtractorOp): - op = 'slice' - enabled = True - - @classmethod - def extract(cls, node): - attrs = get_mxnet_layer_attrs(node.symbol_dict) - node_attrs = { - 'crop_begin': mo_array(attrs.tuple("begin", int, ())), - 'crop_end': mo_array(attrs.tuple("end", int, ())), - 'step': mo_array(attrs.tuple("step", int, ())), - } - - MXSlice.update_node_stat(node, node_attrs) - return cls.enabled diff --git a/tools/mo/openvino/tools/mo/front/mxnet/slice_like_ext.py b/tools/mo/openvino/tools/mo/front/mxnet/slice_like_ext.py deleted file mode 100644 index 82c0db1099b3c0..00000000000000 --- a/tools/mo/openvino/tools/mo/front/mxnet/slice_like_ext.py +++ /dev/null @@ -1,23 +0,0 @@ -# Copyright (C) 2018-2024 Intel Corporation -# SPDX-License-Identifier: Apache-2.0 - -from openvino.tools.mo.ops.slice_like import SliceLike -from openvino.tools.mo.front.extractor import FrontExtractorOp -from openvino.tools.mo.front.mxnet.extractors.utils import get_mxnet_layer_attrs - - -class SliceLikeFrontExtractor(FrontExtractorOp): - op = 'slice_like' - enabled = True - - @classmethod - def extract(cls, node): - attrs = get_mxnet_layer_attrs(node.symbol_dict) - axes = list(attrs.tuple("axes", int, [])) - node_attrs = { - 'axes': axes - } - - # update the attributes of the node - SliceLike.update_node_stat(node, node_attrs) - return cls.enabled diff --git a/tools/mo/openvino/tools/mo/front/mxnet/slice_replacers.py b/tools/mo/openvino/tools/mo/front/mxnet/slice_replacers.py deleted file mode 100644 index 0d9430cbf2a4cc..00000000000000 --- a/tools/mo/openvino/tools/mo/front/mxnet/slice_replacers.py +++ /dev/null @@ -1,38 +0,0 @@ -# Copyright (C) 2018-2024 Intel Corporation -# SPDX-License-Identifier: Apache-2.0 - -import numpy as np - -from openvino.tools.mo.front.common.replacement import FrontReplacementOp -from openvino.tools.mo.graph.graph import Graph -from openvino.tools.mo.ops.const import Const -from openvino.tools.mo.ops.strided_slice import StridedSlice - - -class MXSliceToStridedSliceReplacer(FrontReplacementOp): - op = 'MXSlice' - enabled = True - - def replace_sub_graph(self, graph: Graph, match: dict): - node = match['op'] - - strided_slice_node = StridedSlice(graph, dict(name=node.id + '/strided_slice_', - shrink_axis_mask=np.zeros(len(node.crop_begin), dtype=np.int64), - new_axis_mask=np.zeros(len(node.crop_begin), dtype=np.int64), - ellipsis_mask=np.zeros(len(node.crop_begin), dtype=np.int64), - begin_mask=np.ones(len(node.crop_begin), dtype=np.int64), - end_mask=np.ones(len(node.crop_end), dtype=np.int64))).create_node() - node.in_port(0).get_connection().set_destination(strided_slice_node.in_port(0)) - node.out_port(0).get_connection().set_source(strided_slice_node.out_port(0)) - - crop_begin_node = Const(graph, dict(value=node.crop_begin, - symbol_dict={'name': node.id + '/crop_begin_const'})).create_node() - crop_end_node = Const(graph, dict(value=node.crop_end, - symbol_dict={'name': node.id + '/crop_end_const'})).create_node() - strided_slice_node.in_port(1).get_connection().set_source(crop_begin_node.out_port(0)) - strided_slice_node.in_port(2).get_connection().set_source(crop_end_node.out_port(0)) - - if len(node.step) > 0: - stride_node = Const(graph, dict(value=node.step, - symbol_dict={'name': node.id + '/steps_const'})).create_node() - strided_slice_node.in_port(3).get_connection().set_source(stride_node.out_port(0)) diff --git a/tools/mo/openvino/tools/mo/front/mxnet/softmax.py b/tools/mo/openvino/tools/mo/front/mxnet/softmax.py deleted file mode 100644 index de17575b1baa99..00000000000000 --- a/tools/mo/openvino/tools/mo/front/mxnet/softmax.py +++ /dev/null @@ -1,35 +0,0 @@ -# Copyright (C) 2018-2024 Intel Corporation -# SPDX-License-Identifier: Apache-2.0 - -from openvino.tools.mo.front.common.partial_infer.utils import mo_array -from openvino.tools.mo.ops.elementwise import Mul -from openvino.tools.mo.front.common.replacement import FrontReplacementSubgraph -from openvino.tools.mo.graph.graph import Graph -from openvino.tools.mo.ops.const import Const - - -class SoftmaxFrontReplacementSubgraph(FrontReplacementSubgraph): - enabled = True - - def pattern(self): - return dict( - nodes=[ - ('softmax', dict(type='SoftMax')) - ], - edges=[] - ) - - def replace_sub_graph(self, graph: Graph, match: dict): - node = match['softmax'] - if 'temperature' in node and node['temperature'] != 1.0: - in_node = node.in_node() - out_nodes = [node for node in node.out_nodes().values()] - graph.remove_edge(node.in_node().id, node.id) - temperature = mo_array([1.0 / node.temperature]) - scalar_value_op = Const(graph, dict(value=temperature, shape=temperature.shape, - symbol_dict={'name': node.id + '/const'})) - mul_op = Mul(graph, dict(name=node.id + '/mul_', symbol_dict={'name': node.id + '/mul_'})) - mul_node = mul_op.create_node(inputs=[in_node, scalar_value_op.create_node()]) - edge_attrs = graph.get_edge_data(node.id, out_nodes[0].id)[0] - graph.add_edges_from([(mul_node.id, node.id, edge_attrs)]) - diff --git a/tools/mo/openvino/tools/mo/front/mxnet/softmax_activation_ext.py b/tools/mo/openvino/tools/mo/front/mxnet/softmax_activation_ext.py deleted file mode 100644 index 366269ad0f2bde..00000000000000 --- a/tools/mo/openvino/tools/mo/front/mxnet/softmax_activation_ext.py +++ /dev/null @@ -1,29 +0,0 @@ -# Copyright (C) 2018-2024 Intel Corporation -# SPDX-License-Identifier: Apache-2.0 - -from openvino.tools.mo.front.extractor import FrontExtractorOp -from openvino.tools.mo.front.mxnet.extractors.utils import get_mxnet_layer_attrs -from openvino.tools.mo.ops.softmax import Softmax - - -class SoftmaxActivationExtractor(FrontExtractorOp): - op = 'SoftmaxActivation' - enabled = True - - @classmethod - def extract(cls, node): - attr = get_mxnet_layer_attrs(node.symbol_dict) - mode = attr.str("mode", "instance") - - if mode == "channel": - axis = 1 - else: - axis = -1 - - update_attrs = { - 'axis': axis, - } - - # update the attributes of the node - Softmax.update_node_stat(node, update_attrs) - return cls.enabled diff --git a/tools/mo/openvino/tools/mo/front/mxnet/softmax_ext.py b/tools/mo/openvino/tools/mo/front/mxnet/softmax_ext.py deleted file mode 100644 index 8256c61a37f60a..00000000000000 --- a/tools/mo/openvino/tools/mo/front/mxnet/softmax_ext.py +++ /dev/null @@ -1,25 +0,0 @@ -# Copyright (C) 2018-2024 Intel Corporation -# SPDX-License-Identifier: Apache-2.0 - -from openvino.tools.mo.front.extractor import FrontExtractorOp -from openvino.tools.mo.front.mxnet.extractors.utils import get_mxnet_layer_attrs -from openvino.tools.mo.ops.softmax import Softmax - - -class SoftmaxFrontExtractor(FrontExtractorOp): - op = 'softmax' - enabled = True - - @classmethod - def extract(cls, node): - attrs = get_mxnet_layer_attrs(node.symbol_dict) - - update_attrs = { - 'type': 'SoftMax', - 'axis': attrs.int("axis", -1), - 'temperature': attrs.float('temperature', 1.0) - } - - # update the attributes of the node - Softmax.update_node_stat(node, update_attrs) - return cls.enabled diff --git a/tools/mo/openvino/tools/mo/front/mxnet/softmax_output_ext.py b/tools/mo/openvino/tools/mo/front/mxnet/softmax_output_ext.py deleted file mode 100644 index 6634eec6631dc6..00000000000000 --- a/tools/mo/openvino/tools/mo/front/mxnet/softmax_output_ext.py +++ /dev/null @@ -1,33 +0,0 @@ -# Copyright (C) 2018-2024 Intel Corporation -# SPDX-License-Identifier: Apache-2.0 - -from openvino.tools.mo.front.extractor import FrontExtractorOp -from openvino.tools.mo.front.mxnet.extractors.utils import get_mxnet_layer_attrs -from openvino.tools.mo.ops.softmax import Softmax - - -class SoftmaxOutputExtractor(FrontExtractorOp): - op = 'SoftmaxOutput' - enabled = True - - @classmethod - def extract(cls, node): - attrs = get_mxnet_layer_attrs(node.symbol_dict) - - axis = 1 - preserve_shape = attrs.str('preserve_shape', 'False') - multi_output = attrs.str('multi_output', 'False') - - if preserve_shape == 'True': - axis = -1 - - if multi_output == 'True': - axis = 1 - - update_attrs = { - 'axis': axis, - } - - # update the attributes of the node - Softmax.update_node_stat(node, update_attrs) - return cls.enabled diff --git a/tools/mo/openvino/tools/mo/front/mxnet/softsign_ext.py b/tools/mo/openvino/tools/mo/front/mxnet/softsign_ext.py deleted file mode 100644 index a1ef7dfc184149..00000000000000 --- a/tools/mo/openvino/tools/mo/front/mxnet/softsign_ext.py +++ /dev/null @@ -1,15 +0,0 @@ -# Copyright (C) 2018-2024 Intel Corporation -# SPDX-License-Identifier: Apache-2.0 - -from openvino.tools.mo.front.extractor import FrontExtractorOp -from openvino.tools.mo.ops.activation_ops import SoftSign - - -class SoftSignExtractor(FrontExtractorOp): - op = 'softsign' - enabled = True - - @classmethod - def extract(cls, node): - SoftSign.update_node_stat(node, {}) - return cls.enabled diff --git a/tools/mo/openvino/tools/mo/front/mxnet/squeeze_ext.py b/tools/mo/openvino/tools/mo/front/mxnet/squeeze_ext.py deleted file mode 100644 index 603be68c83a050..00000000000000 --- a/tools/mo/openvino/tools/mo/front/mxnet/squeeze_ext.py +++ /dev/null @@ -1,18 +0,0 @@ -# Copyright (C) 2018-2024 Intel Corporation -# SPDX-License-Identifier: Apache-2.0 - -from openvino.tools.mo.front.extractor import FrontExtractorOp -from openvino.tools.mo.front.mxnet.extractors.utils import get_mxnet_layer_attrs -from openvino.tools.mo.ops.squeeze import Squeeze - - -class SqueezeExtractor(FrontExtractorOp): - op = 'squeeze' - enabled = True - - @classmethod - def extract(cls, node): - attrs = get_mxnet_layer_attrs(node.symbol_dict) - - Squeeze.update_node_stat(node, {'squeeze_dims': attrs.int("axis", None), 'keep_at_least_1d': True}) - return cls.enabled diff --git a/tools/mo/openvino/tools/mo/front/mxnet/ssd_anchor_reshape.py b/tools/mo/openvino/tools/mo/front/mxnet/ssd_anchor_reshape.py deleted file mode 100644 index a83805b1748543..00000000000000 --- a/tools/mo/openvino/tools/mo/front/mxnet/ssd_anchor_reshape.py +++ /dev/null @@ -1,90 +0,0 @@ -# Copyright (C) 2018-2024 Intel Corporation -# SPDX-License-Identifier: Apache-2.0 - -from openvino.tools.mo.front.common.partial_infer.utils import mo_array -from openvino.tools.mo.front.mxnet.eltwise_scalar_replacers import MulScalarFrontReplacer -from openvino.tools.mo.front.mxnet.ssd_detection_output_replacer import SsdPatternDetectionOutputReplacer -from openvino.tools.mo.front.split_normalizer import AttributedSplitToSplit -from openvino.tools.mo.ops.slice_like import SliceLike -from openvino.tools.mo.front.common.replacement import FrontReplacementSubgraph -from openvino.tools.mo.graph.graph import Graph, Node -from openvino.tools.mo.middle.pattern_match import find_pattern_matches -from openvino.tools.mo.ops.const import Const - - -class SsdPatternAnchorReshape(FrontReplacementSubgraph): - """ - Find ssd anchors and setup variants values. - Need to provide compatibility with OV DetectionOutput layer. - """ - enabled = True - graph_condition = [lambda graph: graph.graph['fw'] == 'mxnet' and graph.graph['cmd_params'].enable_ssd_gluoncv] - variants_pattern = dict( - nodes=[ - ('concat', dict(op='Concat')), - ('reshape', dict(op='Reshape')), - ('slice_channel', dict(op='Split')), - ('mul_scalar1x', dict(op='Mul')), - ('mul_scalar1y', dict(op='Mul')), - ('mul_scalar2x', dict(op='Mul')), - ('mul_scalar2y', dict(op='Mul')), - ], - edges=[ - ('concat', 'reshape'), - ('reshape', 'slice_channel'), - ('slice_channel', 'mul_scalar1x', {'out': 0}), - ('slice_channel', 'mul_scalar1y', {'out': 1}), - ('slice_channel', 'mul_scalar2x', {'out': 2}), - ('slice_channel', 'mul_scalar2y', {'out': 3}), - ] - ) - - def run_after(self): - return [MulScalarFrontReplacer, AttributedSplitToSplit] - - def run_before(self): - return [SsdPatternDetectionOutputReplacer] - - def pattern(self): - return dict( - nodes=[ - ('power', dict(op='Mul')), - ('anchor', dict(op='Const')), - ('slice_like', dict(op='slice_like')), - ('reshape1', dict(op='Reshape')), - ('reshape2', dict(op='Reshape')), - ('reshape3', dict(op='Reshape')) - ], - edges=[ - ('anchor', 'slice_like', {'in': 0}), - ('power', 'slice_like', {'in': 1}), - ('slice_like', 'reshape1', {'in': 0}), - ('reshape1', 'reshape2', {'in': 0}), - ('reshape2', 'reshape3', {'in': 0}), - ] - ) - - def replace_sub_graph(self, graph: Graph, match: dict): - slice_like = match['slice_like'] - const = slice_like.in_nodes()[0] - crop_shape = slice_like.in_nodes()[1] - - variants_dict = {'mul_scalar1x': 0.1, 'mul_scalar2x': 0.2, 'mul_scalar1y': 0.1, 'mul_scalar2y': 0.2} - for matches in find_pattern_matches(graph, self.variants_pattern['nodes'], self.variants_pattern['edges'], None, None): - for k, v in matches.items(): - if v in variants_dict.keys(): - variants_dict[v] = Node(graph, k).in_nodes()[1].value[0] - - variants = mo_array([variants_dict['mul_scalar1x'], variants_dict['mul_scalar1y'], - variants_dict['mul_scalar2x'], variants_dict['mul_scalar2y']] * int(const.value.size / 4)).reshape(const.value.shape) - priorbox_variants = Const(graph, dict(value=variants, name=const.id + '/priorbox_variants')).create_node() - variants_slice_like = SliceLike(graph, dict(axes=slice_like.axes, - name=slice_like.id + '/variants_slice_like')).create_node() - variants_slice_like.in_port(0).connect(priorbox_variants.out_port(0)) - variants_slice_like.in_port(1).connect(crop_shape.out_port(0)) - - concat = match['reshape3'].out_port(0).get_destination().node - assert concat.op == 'Concat' - concat_nodes_count = len(concat.in_nodes()) - concat.add_input_port(concat_nodes_count) - concat.in_port(concat_nodes_count).get_connection().set_source(variants_slice_like.out_port(0)) diff --git a/tools/mo/openvino/tools/mo/front/mxnet/ssd_detection_output_replacer.py b/tools/mo/openvino/tools/mo/front/mxnet/ssd_detection_output_replacer.py deleted file mode 100644 index e1855234cc166a..00000000000000 --- a/tools/mo/openvino/tools/mo/front/mxnet/ssd_detection_output_replacer.py +++ /dev/null @@ -1,143 +0,0 @@ -# Copyright (C) 2018-2024 Intel Corporation -# SPDX-License-Identifier: Apache-2.0 - -from openvino.tools.mo.front.flatten_to_reshape import FlattenToReshape -from openvino.tools.mo.front.split_normalizer import AttributedSplitToSplit -from openvino.tools.mo.ops.DetectionOutput import DetectionOutput -from openvino.tools.mo.front.common.partial_infer.utils import int64_array -from openvino.tools.mo.front.common.replacement import FrontReplacementSubgraph -from openvino.tools.mo.front.tf.graph_utils import create_op_node_with_second_input -from openvino.tools.mo.graph.graph import Node, Graph -from openvino.tools.mo.middle.pattern_match import find_pattern_matches -from openvino.tools.mo.ops.reshape import Reshape -from openvino.tools.mo.ops.result import Result - - -class SsdPatternDetectionOutputReplacer(FrontReplacementSubgraph): - """ - Detecting and replacing atomic operations subgraph to DetectionOutput layer. - """ - enabled = True - force_clean_up = True - graph_condition = [lambda graph: graph.graph['fw'] == 'mxnet' and graph.graph['cmd_params'].enable_ssd_gluoncv] - - concats_pattern = [ - dict( - nodes=[ - ('conv', dict(op='Convolution')), - ('transpose', dict(op='Transpose')), - ('flatten', dict(op='Flatten')), - ('concat', dict(op='Concat')), - ('reshape', dict(op='Reshape')), - ('slice_channel', dict(op='Split')), - ], - edges=[('conv', 'transpose', {'in': 0}), - ('transpose', 'flatten', {'in': 0}), - ('flatten', 'concat', {'in': 0}), - ('concat', 'reshape', {'in': 0}), - ('reshape', 'slice_channel', {'in': 0}), ] - ), - dict( - nodes=[ - ('conv', dict(op='Convolution')), - ('transpose', dict(op='Transpose')), - ('flatten', dict(op='Flatten')), - ('concat', dict(op='Concat')), - ('reshape', dict(op='Reshape')), - ('softmax', dict(op='SoftMax')), - ], - edges=[('conv', 'transpose', {'in': 0}), - ('transpose', 'flatten', {'in': 0}), - ('flatten', 'concat', {'in': 0}), - ('concat', 'reshape', {'in': 0}), - ('reshape', 'softmax', {'in': 0}), ] - ), - dict( - nodes=[ - ('power', dict(op='Mul')), - ('anchor', dict(op='Const')), - ('slice_like', dict(op='slice_like')), - ('reshape1', dict(op='Reshape')), - ('reshape2', dict(op='Reshape')), - ('reshape3', dict(op='Reshape')), - ('concat', dict(op='Concat')), - ('reshape4', dict(op='Reshape')), - ], - edges=[ - ('anchor', 'slice_like', {'in': 0}), - ('power', 'slice_like', {'in': 1}), - ('slice_like', 'reshape1', {'in': 0}), - ('reshape1', 'reshape2', {'in': 0}), - ('reshape2', 'reshape3', {'in': 0}), - ('reshape3', 'concat', {'in': 0}), - ('concat', 'reshape4', {'in': 0}), - ] - ) - ] - - def run_before(self): - return [FlattenToReshape] - - def run_after(self): - return [AttributedSplitToSplit] - - def pattern(self): - return dict( - nodes=[ - ('box_nms', dict(op='_contrib_box_nms')) - ], - edges=[] - ) - - def reshape_priorboxes(self, concat): - for i, node in concat.in_nodes().items(): - reshape_node = create_op_node_with_second_input(concat.graph, Reshape, int64_array([1, -1]), - dict(name=concat.name + str(i) + '/PriorBoxReshape_')) - node.out_port(0).disconnect() - node.out_port(0).connect(reshape_node.in_port(0)) - concat.in_port(i).connect(reshape_node.out_port(0)) - - def replace_sub_graph(self, graph: Graph, match: dict): - box_nms = match['box_nms'] - top_k = box_nms.topk - nms_threshold = box_nms.overlap_thresh - - ssd_concats = {} - concat_names = ['ssd_concat1', 'ssd_concat0', 'ssd_concat2'] - - for i, concat_match in enumerate(self.concats_pattern): - for matches in find_pattern_matches(graph, concat_match['nodes'], concat_match['edges'], None, None): - for match in matches: - if graph.has_node(match): - n = Node(graph, match) - if n.op == 'Concat': - ssd_concats.update({concat_names[i]: n}) - break - - assert concat_names[0] in ssd_concats - assert concat_names[1] in ssd_concats - assert concat_names[2] in ssd_concats - - graph.remove_nodes_from(graph.get_nodes_with_attributes(op='Result')) - detection_output_node = DetectionOutput(graph, dict(name=graph.unique_id() + '/DetectionOutput_', - top_k=top_k, keep_top_k=top_k, nms_threshold=nms_threshold, - background_label_id=0, clip=0, decrease_label_id=1, - code_type="caffe.PriorBoxParameter.CENTER_SIZE", - confidence_threshold=0.01, share_location=1, - variance_encoded_in_target=0, normalized=1)).create_node() - - reshape_node = create_op_node_with_second_input(graph, Reshape, int64_array([0, -1]), - dict(name=graph.unique_id() + '/DetectionOutput_')) - - ssd_softmax_node = ssd_concats['ssd_concat0'].out_node().out_node() - ssd_softmax_node.out_port(0).disconnect() - ssd_softmax_node.out_port(0).connect(reshape_node.in_port(0)) - reshape_node.out_port(0).connect(detection_output_node.in_port(1)) - - ssd_concats['ssd_concat2'].axis = 2 - self.reshape_priorboxes(ssd_concats['ssd_concat2']) - - ssd_concats['ssd_concat1'].out_port(0).get_connection().set_destination(detection_output_node.in_port(0)) - ssd_concats['ssd_concat2'].out_port(0).get_connection().set_destination(detection_output_node.in_port(2)) - - Result(graph, {'name': detection_output_node.id + '/Result'}).create_node([detection_output_node]) diff --git a/tools/mo/openvino/tools/mo/front/mxnet/ssd_pattern_flatten_softmax_activation.py b/tools/mo/openvino/tools/mo/front/mxnet/ssd_pattern_flatten_softmax_activation.py deleted file mode 100644 index 2dca5e0af8bf38..00000000000000 --- a/tools/mo/openvino/tools/mo/front/mxnet/ssd_pattern_flatten_softmax_activation.py +++ /dev/null @@ -1,54 +0,0 @@ -# Copyright (C) 2018-2024 Intel Corporation -# SPDX-License-Identifier: Apache-2.0 - -from openvino.tools.mo.front.mxnet.ssd_pattern_remove_flatten import SsdPatternRemoveFlatten -from openvino.tools.mo.front.mxnet.ssd_pattern_remove_reshape import SsdPatternRemoveReshape -from openvino.tools.mo.front.common.partial_infer.utils import int64_array -from openvino.tools.mo.front.common.replacement import FrontReplacementSubgraph -from openvino.tools.mo.front.tf.graph_utils import create_op_node_with_second_input -from openvino.tools.mo.graph.graph import Graph -from openvino.tools.mo.ops.reshape import Reshape - - -class SsdPatternFlattenSoftmaxActivation(FrontReplacementSubgraph): - enabled = True - - def run_before(self): - return [SsdPatternRemoveFlatten, SsdPatternRemoveReshape] - - def pattern(self): - return dict( - nodes=[ - ('softmax_activation', dict(op='SoftMax')), - ('multi_box_detection', dict(op='_contrib_MultiBoxDetection')) - ], - edges=[ - ('softmax_activation', 'multi_box_detection', {'in': 1}) - ] - ) - - def replace_sub_graph(self, graph: Graph, match: dict): - """ - Need to find the pattern: SoftmaxActivation -> DetectionOutput - DetectionOutput in OV expects flattened input from SoftMax, that is why there is the need to add - Flatten layer - - Parameters - ---------- - graph : Graph - Graph with loaded model. - match : dict - Patterns which were found in graph structure. - """ - softmax_activation = match['softmax_activation'] - multi_box_detection = match['multi_box_detection'] - softmax_activation['axis'] = -1 - edge_data = graph.get_edge_data(softmax_activation.id, multi_box_detection.id) - out_port = edge_data[0]['out'] - in_port = edge_data[0]['in'] - graph.remove_edge(softmax_activation.id, multi_box_detection.id) - new_reshape_node = create_op_node_with_second_input(graph, Reshape, int64_array([0, -1]), - dict(op='Reshape', - name=multi_box_detection.name + '/Reshape_'), - softmax_activation) - graph.create_edge(new_reshape_node, multi_box_detection, in_port=in_port, out_port=out_port) diff --git a/tools/mo/openvino/tools/mo/front/mxnet/ssd_pattern_remove_flatten.py b/tools/mo/openvino/tools/mo/front/mxnet/ssd_pattern_remove_flatten.py deleted file mode 100644 index 4b99fc5d273c1f..00000000000000 --- a/tools/mo/openvino/tools/mo/front/mxnet/ssd_pattern_remove_flatten.py +++ /dev/null @@ -1,39 +0,0 @@ -# Copyright (C) 2018-2024 Intel Corporation -# SPDX-License-Identifier: Apache-2.0 - -from openvino.tools.mo.front.flatten_to_reshape import FlattenToReshape -from openvino.tools.mo.front.mxnet.ssd_pattern_remove_reshape import SsdPatternRemoveReshape -from openvino.tools.mo.front.common.replacement import FrontReplacementSubgraph -from openvino.tools.mo.graph.graph import Graph - - -class SsdPatternRemoveFlatten(FrontReplacementSubgraph): - enabled = True - - def run_before(self): - return [SsdPatternRemoveReshape, FlattenToReshape] - - def pattern(self): - return dict( - nodes=[ - ('multi_box_prior', dict(op='_contrib_MultiBoxPrior')), - ('flatten', dict(op='Flatten')) - ], - edges=[ - ('multi_box_prior', 'flatten', {'in': 0}) - ] - ) - - def replace_sub_graph(self, graph: Graph, match: dict): - """ - Need to find each occurrence of pattern: _contrib_MultiBoxPrior -> Flatten - remove Flatten layer - OV does not expect outputs to be flattened - - Parameters - ---------- - graph : Graph - Graph with loaded model. - match : dict - Patterns which were found in graph structure. - """ - graph.erase_node(match['flatten']) diff --git a/tools/mo/openvino/tools/mo/front/mxnet/ssd_pattern_remove_reshape.py b/tools/mo/openvino/tools/mo/front/mxnet/ssd_pattern_remove_reshape.py deleted file mode 100644 index 33a6e8c24db565..00000000000000 --- a/tools/mo/openvino/tools/mo/front/mxnet/ssd_pattern_remove_reshape.py +++ /dev/null @@ -1,46 +0,0 @@ -# Copyright (C) 2018-2024 Intel Corporation -# SPDX-License-Identifier: Apache-2.0 - -from openvino.tools.mo.front.common.replacement import FrontReplacementSubgraph -from openvino.tools.mo.front.mxnet.extractors.utils import get_json_layer_attrs -from openvino.tools.mo.graph.graph import Graph - - -class SsdPatternRemoveReshape(FrontReplacementSubgraph): - enabled = True - - def pattern(self): - return dict( - nodes=[ - ('multi_box_prior', dict(op='_contrib_MultiBoxPrior')), - ('concat', dict(op='Concat')), - ('reshape', dict(op='Reshape')) - ], - edges=[ - ('multi_box_prior', 'concat', {'in': 0}), - ('concat', 'reshape', {'in': 0}) - ] - ) - - def replace_sub_graph(self, graph: Graph, match: dict): - """ - Need to find each occurrence of pattern: _contrib_MultiBoxPrior(s) -> Concat -> Reshape - remove Reshape layer - OV does not expect outputs from concatenation of _contrib_MultiBoxPrior to be reshaped - - Parameters - ---------- - graph : Graph - Graph with loaded model. - match : dict - Patterns which were found in graph structure. - """ - reshape_node = match['reshape'] - reshape_node.out_port(0).get_connection().set_source(reshape_node.in_port(0).get_connection().get_source()) - graph.remove_node(reshape_node.id) - - # concat should be performed for the third axis - concat_node = match['concat'] - attr = get_json_layer_attrs(concat_node.graph.node[concat_node.id]['symbol_dict']) - if 'dim' in attr: - attr['dim'] = 2 - concat_node['axis'] = 2 diff --git a/tools/mo/openvino/tools/mo/front/mxnet/ssd_pattern_remove_transpose.py b/tools/mo/openvino/tools/mo/front/mxnet/ssd_pattern_remove_transpose.py deleted file mode 100644 index 49e1652d80e2b0..00000000000000 --- a/tools/mo/openvino/tools/mo/front/mxnet/ssd_pattern_remove_transpose.py +++ /dev/null @@ -1,52 +0,0 @@ -# Copyright (C) 2018-2024 Intel Corporation -# SPDX-License-Identifier: Apache-2.0 - -from openvino.tools.mo.front.mxnet.ssd_pattern_flatten_softmax_activation import SsdPatternFlattenSoftmaxActivation -from openvino.tools.mo.front.mxnet.ssd_pattern_remove_flatten import SsdPatternRemoveFlatten -from openvino.tools.mo.front.mxnet.ssd_pattern_remove_reshape import SsdPatternRemoveReshape -from openvino.tools.mo.front.common.replacement import FrontReplacementSubgraph -from openvino.tools.mo.graph.graph import Graph - - -class SsdPatternRemoveTranspose(FrontReplacementSubgraph): - enabled = True - - def run_before(self): - return [SsdPatternFlattenSoftmaxActivation, SsdPatternRemoveFlatten, SsdPatternRemoveReshape] - - def pattern(self): - return dict( - nodes=[ - ('transpose', dict(op='Transpose')), - ('softmax_activation', dict(op='SoftMax')), - ('multi_box_detection', dict(op='_contrib_MultiBoxDetection')) - ], - edges=[ - ('transpose', 'softmax_activation', {'in': 0}), - ('softmax_activation', 'multi_box_detection', {'in': 1}), - ] - ) - - def replace_sub_graph(self, graph: Graph, match: dict): - """ - Need to find each occurrence of pattern: - transpose -> SoftmaxActivation -> _contrib_MultiBoxDetection - remove transpose layer to secure the order of weights in SoftMax to be the same as - OV expects weights to be in following order: class-wise values for each priorbox. - priorboxes change the quickest - - Parameters - ---------- - graph : Graph - Graph with loaded model. - match : dict - Patterns which were found in graph structure. - """ - transpose_node = match['transpose'] - softmax_activation = match['softmax_activation'] - transpose_in_node = transpose_node.in_node(0) - - graph.remove_edge(transpose_in_node.id, transpose_node.id) - graph.remove_edge(transpose_node.id, softmax_activation.id) - graph.remove_node(transpose_node.id) - graph.create_edge(transpose_in_node, softmax_activation) diff --git a/tools/mo/openvino/tools/mo/front/mxnet/ssd_reorder_detection_out_inputs.py b/tools/mo/openvino/tools/mo/front/mxnet/ssd_reorder_detection_out_inputs.py deleted file mode 100644 index 1090134f3a0679..00000000000000 --- a/tools/mo/openvino/tools/mo/front/mxnet/ssd_reorder_detection_out_inputs.py +++ /dev/null @@ -1,53 +0,0 @@ -# Copyright (C) 2018-2024 Intel Corporation -# SPDX-License-Identifier: Apache-2.0 - -from openvino.tools.mo.front.mxnet.ssd_pattern_flatten_softmax_activation import SsdPatternFlattenSoftmaxActivation -from openvino.tools.mo.front.mxnet.ssd_pattern_remove_transpose import SsdPatternRemoveTranspose -from openvino.tools.mo.front.common.replacement import FrontReplacementPattern -from openvino.tools.mo.graph.graph import Graph - - -class SsdReorderDetectionOutInputs(FrontReplacementPattern): - - enabled = True - - def run_before(self): - return [SsdPatternFlattenSoftmaxActivation, SsdPatternRemoveTranspose] - - @staticmethod - def pattern(): - return dict( - nodes=[ - ('multi_box_detection', dict(op='_contrib_MultiBoxDetection')) - ], - edges=[]) - - @staticmethod - def replace_pattern(graph: Graph, match: dict): - """ - DetectionOutput layer has another order of inputs unlike mxnet. - Need to reorder _contrib_MultiBoxDetection inputs - for correct conversion to DetectionOutput layer. - - Parameters - ---------- - graph : Graph - Graph with loaded model. - """ - multi_box_detection_node = match['multi_box_detection'] - conf_node = multi_box_detection_node.in_node(0) - loc_node = multi_box_detection_node.in_node(1) - - conf_edge_data = graph.get_edge_data(conf_node.id, multi_box_detection_node.id) - conf_out_port = conf_edge_data[0]['out'] - conf_in_port = conf_edge_data[0]['in'] - - loc_edge_data = graph.get_edge_data(loc_node.id, multi_box_detection_node.id) - loc_out_port = loc_edge_data[0]['out'] - loc_in_port = loc_edge_data[0]['in'] - - graph.remove_edge(conf_node.id, multi_box_detection_node.id) - graph.remove_edge(loc_node.id, multi_box_detection_node.id) - - graph.create_edge(loc_node, multi_box_detection_node, in_port=conf_in_port, out_port=conf_out_port) - graph.create_edge(conf_node, multi_box_detection_node, in_port=loc_in_port, out_port=loc_out_port) diff --git a/tools/mo/openvino/tools/mo/front/mxnet/stack_ext.py b/tools/mo/openvino/tools/mo/front/mxnet/stack_ext.py deleted file mode 100644 index 1d1f00c9c75933..00000000000000 --- a/tools/mo/openvino/tools/mo/front/mxnet/stack_ext.py +++ /dev/null @@ -1,24 +0,0 @@ -# Copyright (C) 2018-2024 Intel Corporation -# SPDX-License-Identifier: Apache-2.0 - -from openvino.tools.mo.ops.pack import PackOp -from openvino.tools.mo.front.extractor import FrontExtractorOp -from openvino.tools.mo.front.mxnet.extractors.utils import get_mxnet_layer_attrs - - -class StackFrontExtractor(FrontExtractorOp): - op = 'stack' - enabled = True - - @classmethod - def extract(cls, node): - attrs = get_mxnet_layer_attrs(node.symbol_dict) - - update_attrs = { - 'axis': attrs.int('axis', 0) - } - - # update the attributes of the node - PackOp.update_node_stat(node, update_attrs) - - return cls.enabled diff --git a/tools/mo/openvino/tools/mo/front/mxnet/swapaxis_ext.py b/tools/mo/openvino/tools/mo/front/mxnet/swapaxis_ext.py deleted file mode 100644 index 3831cf87713acb..00000000000000 --- a/tools/mo/openvino/tools/mo/front/mxnet/swapaxis_ext.py +++ /dev/null @@ -1,35 +0,0 @@ -# Copyright (C) 2018-2024 Intel Corporation -# SPDX-License-Identifier: Apache-2.0 - -from openvino.tools.mo.ops.swapaxis import SwapAxis -from openvino.tools.mo.front.extractor import FrontExtractorOp -from openvino.tools.mo.front.mxnet.extractors.utils import get_mxnet_layer_attrs - - -def extract(node): - attrs = get_mxnet_layer_attrs(node.symbol_dict) - dim1 = attrs.int("dim1", 0) - dim2 = attrs.int("dim2", 0) - - update_attrs = { - 'dim1': dim1, - 'dim2': dim2, - } - - # update the attributes of the node - SwapAxis.update_node_stat(node, update_attrs) - return True - - -class SwapAxisFrontExtractor(FrontExtractorOp): - op = 'SwapAxis' - enabled = True - - extract = staticmethod(extract) - - -class SwapAxesFrontExtractor(FrontExtractorOp): - op = 'swapaxes' - enabled = True - - extract = staticmethod(extract) diff --git a/tools/mo/openvino/tools/mo/front/mxnet/take_ext.py b/tools/mo/openvino/tools/mo/front/mxnet/take_ext.py deleted file mode 100644 index 0c557a35b61306..00000000000000 --- a/tools/mo/openvino/tools/mo/front/mxnet/take_ext.py +++ /dev/null @@ -1,20 +0,0 @@ -# Copyright (C) 2018-2024 Intel Corporation -# SPDX-License-Identifier: Apache-2.0 - -from openvino.tools.mo.ops.gather import AttributedGather -from openvino.tools.mo.front.extractor import FrontExtractorOp -from openvino.tools.mo.front.mxnet.extractors.utils import get_mxnet_layer_attrs -from openvino.tools.mo.graph.graph import Node - - -class TakeExtractor(FrontExtractorOp): - op = 'take' - enabled = True - - @classmethod - def extract(cls, node: Node): - attrs = get_mxnet_layer_attrs(node.symbol_dict) - AttributedGather.update_node_stat(node, { - 'axis': attrs.int('axis', 0), - }) - return cls.enabled diff --git a/tools/mo/openvino/tools/mo/front/mxnet/tile_ext.py b/tools/mo/openvino/tools/mo/front/mxnet/tile_ext.py deleted file mode 100644 index 781b1e99b38a74..00000000000000 --- a/tools/mo/openvino/tools/mo/front/mxnet/tile_ext.py +++ /dev/null @@ -1,20 +0,0 @@ -# Copyright (C) 2018-2024 Intel Corporation -# SPDX-License-Identifier: Apache-2.0 - -from openvino.tools.mo.front.extractor import FrontExtractorOp -from openvino.tools.mo.front.mxnet.extractors.utils import get_mxnet_layer_attrs -from openvino.tools.mo.graph.graph import Node -from openvino.tools.mo.ops.tile import Tile - - -class TileExt(FrontExtractorOp): - op = 'tile' - enabled = True - - @classmethod - def extract(cls, node: Node): - attrs = get_mxnet_layer_attrs(node.symbol_dict) - Tile.update_node_stat(node, { - 'reps': attrs.tuple('reps', int, None), - }) - return cls.enabled diff --git a/tools/mo/openvino/tools/mo/front/mxnet/tile_replacer.py b/tools/mo/openvino/tools/mo/front/mxnet/tile_replacer.py deleted file mode 100644 index 79be16ca5ab7d0..00000000000000 --- a/tools/mo/openvino/tools/mo/front/mxnet/tile_replacer.py +++ /dev/null @@ -1,19 +0,0 @@ -# Copyright (C) 2018-2024 Intel Corporation -# SPDX-License-Identifier: Apache-2.0 - -from openvino.tools.mo.front.common.partial_infer.utils import int64_array -from openvino.tools.mo.front.common.replacement import FrontReplacementOp -from openvino.tools.mo.graph.graph import Graph -from openvino.tools.mo.ops.const import Const - - -class TileReplacer(FrontReplacementOp): - op = 'Tile' - enabled = True - - def replace_sub_graph(self, graph: Graph, match: dict): - node = match['op'] - if node.has_valid('reps'): - tile_array = Const(graph, dict(value=int64_array(node.reps), - symbol_dict={'name': node.id + '/tile_array'})).create_node() - node.in_port(1).get_connection().set_source(tile_array.out_port(0)) diff --git a/tools/mo/openvino/tools/mo/front/mxnet/transpose_ext.py b/tools/mo/openvino/tools/mo/front/mxnet/transpose_ext.py deleted file mode 100644 index 26e832105367d8..00000000000000 --- a/tools/mo/openvino/tools/mo/front/mxnet/transpose_ext.py +++ /dev/null @@ -1,21 +0,0 @@ -# Copyright (C) 2018-2024 Intel Corporation -# SPDX-License-Identifier: Apache-2.0 - -import numpy as np - -from openvino.tools.mo.front.common.partial_infer.utils import mo_array -from openvino.tools.mo.ops.transpose import Transpose -from openvino.tools.mo.front.extractor import FrontExtractorOp -from openvino.tools.mo.front.mxnet.extractors.utils import get_mxnet_layer_attrs - - -class TransposeFrontExtractor(FrontExtractorOp): - op = 'transpose' - enabled = True - - @classmethod - def extract(cls, node): - attrs = get_mxnet_layer_attrs(node.symbol_dict) - order = list(attrs.tuple("axes", int, None)) - Transpose.update_node_stat(node, {'order': mo_array(order, dtype=np.int32)}) - return cls.enabled diff --git a/tools/mo/openvino/tools/mo/front/mxnet/up_sampling_ext.py b/tools/mo/openvino/tools/mo/front/mxnet/up_sampling_ext.py deleted file mode 100644 index 3ba5c539195ef6..00000000000000 --- a/tools/mo/openvino/tools/mo/front/mxnet/up_sampling_ext.py +++ /dev/null @@ -1,70 +0,0 @@ -# Copyright (C) 2018-2024 Intel Corporation -# SPDX-License-Identifier: Apache-2.0 - -import math - -from openvino.tools.mo.front.mxnet.conv_ext import DeconvFrontExtractor -from openvino.tools.mo.ops.interpolate import Interpolate -from openvino.tools.mo.front.common.partial_infer.utils import int64_array -from openvino.tools.mo.front.extractor import FrontExtractorOp -from openvino.tools.mo.front.mxnet.extractors.utils import get_mxnet_layer_attrs -from openvino.tools.mo.ops.convolution import Convolution - - -class UpSamplingFrontExtractor(FrontExtractorOp): - op = 'UpSampling' - enabled = True - - @classmethod - def extract(cls, node): - attrs = get_mxnet_layer_attrs(node.symbol_dict) - scale = attrs.int("scale", 1) - num_filter = attrs.int("num_filter", 0) - mode = attrs.str("sample_type", None) - if mode == 'nearest': - node_attrs = { - 'factor': attrs.int("scale", 1), - 'mode': mode, - 'antialias': 0, - 'axes': int64_array([2, 3]), - } - Interpolate.update_node_stat(node, node_attrs) - elif mode == 'bilinear': - """ - Bilinear UpSampling uses deconvolution algorithm under the hood. - For MXNet Bilinear UpSampling op just wrapper over Deconvolution op. - Inputs data: - input1 - input data - input2 - deconvolution weight - """ - kernel = 2 * scale - scale % 2 - stride = scale - pad = math.ceil((scale - 1) / 2) - num_group = num_filter - - node_attrs = { - 'op': __class__.op, - 'type': 'Deconvolution', - 'bias_addable': True, - 'bias_term': False, - 'pad': int64_array([[0, 0], [0, 0], [pad, pad], [pad, pad]]), - 'pad_spatial_shape': int64_array([[pad, pad], [pad, pad]]), - 'dilation': None, - 'output_spatial_shape': None, - 'output_shape': None, - 'stride': int64_array([1, 1, stride, stride]), - 'group': num_group, - 'output': num_filter, - 'kernel_spatial': int64_array([kernel, kernel]), - 'input_feature_channel': 0, - 'output_feature_channel': 1, - 'kernel_spatial_idx': None, - 'reshape_kernel': True, - 'spatial_dims': None, - 'channel_dims': int64_array([1]), - 'batch_dims': int64_array([0]), - 'layout': 'NCHW', - 'get_pad': DeconvFrontExtractor.get_pad, - } - Convolution.update_node_stat(node, node_attrs) - return cls.enabled diff --git a/tools/mo/openvino/tools/mo/front/mxnet/where_ext.py b/tools/mo/openvino/tools/mo/front/mxnet/where_ext.py deleted file mode 100644 index 0498c94a07b7d6..00000000000000 --- a/tools/mo/openvino/tools/mo/front/mxnet/where_ext.py +++ /dev/null @@ -1,15 +0,0 @@ -# Copyright (C) 2018-2024 Intel Corporation -# SPDX-License-Identifier: Apache-2.0 - -from openvino.tools.mo.ops.select import Select -from openvino.tools.mo.front.extractor import FrontExtractorOp - - -class WhereFrontExtractor(FrontExtractorOp): - op = 'where' - enabled = True - - @classmethod - def extract(cls, node): - Select.update_node_stat(node, {}) - return cls.enabled diff --git a/tools/mo/openvino/tools/mo/front/mxnet/yolo_v3_mobilenet1_voc.json b/tools/mo/openvino/tools/mo/front/mxnet/yolo_v3_mobilenet1_voc.json deleted file mode 100644 index bc5aebe3f441b5..00000000000000 --- a/tools/mo/openvino/tools/mo/front/mxnet/yolo_v3_mobilenet1_voc.json +++ /dev/null @@ -1,14 +0,0 @@ -[ - { - "id": "TFYOLOV3", - "match_kind": "general", - "custom_attributes": { - "classes": 20, - "anchors": [10, 13, 16, 30, 33, 23, 30, 61, 62, 45, 59, 119, 116, 90, 156, 198, 373, 326], - "coords": 4, - "num": 9, - "masks":[[6, 7, 8], [3, 4, 5], [0, 1, 2]], - "entry_points": ["yolov30_yolooutputv30_reshape0", "yolov30_yolooutputv31_reshape0", "yolov30_yolooutputv32_reshape0"] - } - } -] \ No newline at end of file diff --git a/tools/mo/openvino/tools/mo/front/mxnet/zeros_ext.py b/tools/mo/openvino/tools/mo/front/mxnet/zeros_ext.py deleted file mode 100644 index b24ea66c170958..00000000000000 --- a/tools/mo/openvino/tools/mo/front/mxnet/zeros_ext.py +++ /dev/null @@ -1,38 +0,0 @@ -# Copyright (C) 2018-2024 Intel Corporation -# SPDX-License-Identifier: Apache-2.0 - -import numpy as np - -from openvino.tools.mo.front.extractor import FrontExtractorOp -from openvino.tools.mo.front.mxnet.extractors.utils import get_mxnet_layer_attrs -from openvino.tools.mo.ops.const import Const - - -class ZerosFrontExtractor(FrontExtractorOp): - op = '_zeros' - enabled = True - - @classmethod - def extract(cls, node): - attrs = get_mxnet_layer_attrs(node.symbol_dict) - shape = list(attrs.tuple('shape', int, None)) - dtype = attrs.tuple('dtype', str, None) - if dtype and len(dtype) == 1: - dtype = dtype[0] - else: - dtype = np.float32 - zero_shapes = [] - for i, s in enumerate(shape): - if s == 0: - shape[i] = 1 - zero_shapes.append(i) - - update_attrs = { - 'shape': np.ndarray(shape), - 'value': np.zeros(shape, dtype=dtype), - 'zero_shapes': zero_shapes - } - - # update the attributes of the node - Const.update_node_stat(node, update_attrs) - return cls.enabled diff --git a/tools/mo/openvino/tools/mo/load/mxnet/__init__.py b/tools/mo/openvino/tools/mo/load/mxnet/__init__.py deleted file mode 100644 index 8ba81a92b19c53..00000000000000 --- a/tools/mo/openvino/tools/mo/load/mxnet/__init__.py +++ /dev/null @@ -1,3 +0,0 @@ -# Copyright (C) 2018-2024 Intel Corporation -# SPDX-License-Identifier: Apache-2.0 - diff --git a/tools/mo/openvino/tools/mo/load/mxnet/loader.py b/tools/mo/openvino/tools/mo/load/mxnet/loader.py deleted file mode 100644 index 5ab7d9fbe0561f..00000000000000 --- a/tools/mo/openvino/tools/mo/load/mxnet/loader.py +++ /dev/null @@ -1,56 +0,0 @@ -# Copyright (C) 2018-2024 Intel Corporation -# SPDX-License-Identifier: Apache-2.0 - -from openvino.tools.mo.utils.error import FrameworkError, Error -from openvino.tools.mo.utils.utils import refer_to_faq_msg - -try: - import mxnet -except ImportError: - raise Error('Module mxnet was not found. Please install appropriate version of mxnet with requirements via \n' - 'pip install openvino-dev[mxnet]') - -from openvino.tools.mo.load.loader import Loader -from openvino.tools.mo.front.common.register_custom_ops import update_extractors_with_extensions -from openvino.tools.mo.front.extractor import extract_node_attrs -from openvino.tools.mo.front.mxnet.extractor import mxnet_op_extractors, mxnet_op_extractor -from openvino.tools.mo.front.mxnet.loader import symbol2nx, load_symbol_def -from openvino.tools.mo.front.mxnet.nd_to_params import save_params_file -from openvino.tools.mo.graph.graph import Graph -from openvino.tools.mo.utils.telemetry_utils import send_shapes_info, send_op_names_info - - -class MxNetLoader(Loader): - enabled = True - - def load(self, graph: Graph): - argv = graph.graph['cmd_params'] - try: - model_nodes, model_params, model_name, iteration_number = load_symbol_def(argv.input_model, - argv.input_symbol, - argv.input, - argv.nd_prefix_name, - argv.pretrained_model_name, - argv.legacy_mxnet_model) - except (ValueError, mxnet.base.MXNetError) as e: - raise FrameworkError( - 'The following error happened while loading mxnet model {}: {}. ' + - refer_to_faq_msg(53), - argv.input_model, - str(e) - ) from e - - if argv.nd_prefix_name and argv.pretrained_model_name and argv.save_params_from_nd: - save_params_file(model_name, model_params._arg_params, model_params._aux_params, iteration_number) - - update_extractors_with_extensions(mxnet_op_extractors) - symbol2nx(graph, model_nodes, model_params, argv.input) - graph.check_empty_graph('symbol2nx. It may happen due to problems with loaded model') - - graph.graph['layout'] = 'NCHW' - graph.graph['fw'] = 'mxnet' - graph.graph['feature_dim'] = 1 if graph.graph['layout'] == 'NCHW' else 3 - - extract_node_attrs(graph, mxnet_op_extractor) - send_op_names_info('mxnet', graph) - send_shapes_info('mxnet', graph) diff --git a/tools/mo/openvino/tools/mo/main_mxnet.py b/tools/mo/openvino/tools/mo/main_mxnet.py deleted file mode 100644 index e924f47690eb32..00000000000000 --- a/tools/mo/openvino/tools/mo/main_mxnet.py +++ /dev/null @@ -1,10 +0,0 @@ -# Copyright (C) 2018-2024 Intel Corporation -# SPDX-License-Identifier: Apache-2.0 - -import sys - -from openvino.tools.mo.utils.cli_parser import get_mxnet_cli_parser # pylint: disable=no-name-in-module,import-error - -if __name__ == "__main__": - from openvino.tools.mo.main import main - sys.exit(main(get_mxnet_cli_parser(), 'mxnet')) diff --git a/tools/mo/openvino/tools/mo/middle/passes/fusing/decomposition.py b/tools/mo/openvino/tools/mo/middle/passes/fusing/decomposition.py index b12170afe0d6c8..d0afff7b9ca2ad 100644 --- a/tools/mo/openvino/tools/mo/middle/passes/fusing/decomposition.py +++ b/tools/mo/openvino/tools/mo/middle/passes/fusing/decomposition.py @@ -75,7 +75,7 @@ def convert_batch_norm(graph: Graph): def _fused_batch_norm_decomposition(graph: Graph, tinput: Port, toutput: Port, gamma: Port, beta: Port, mean: np.ndarray, variance: np.ndarray, can_be_fused=True): """ - This is common function for TF, Caffe and MXNet + This is common function for TF and Caffe It creates Mul->Add->Mul->Add sub graph """ batch_norm_name = tinput.get_connection().get_destination().node.name diff --git a/tools/mo/openvino/tools/mo/mo_mxnet.py b/tools/mo/openvino/tools/mo/mo_mxnet.py deleted file mode 100755 index d39b75ef488ace..00000000000000 --- a/tools/mo/openvino/tools/mo/mo_mxnet.py +++ /dev/null @@ -1,11 +0,0 @@ -#!/usr/bin/env python3 - -# Copyright (C) 2018-2024 Intel Corporation -# SPDX-License-Identifier: Apache-2.0 - - -if __name__ == "__main__": - from openvino.tools.mo.subprocess_main import subprocess_main - from openvino.tools.mo.utils.telemetry_utils import init_mo_telemetry - init_mo_telemetry() - subprocess_main(framework='mxnet') diff --git a/tools/mo/openvino/tools/mo/ops/LSTM.py b/tools/mo/openvino/tools/mo/ops/LSTM.py index 618b1083dc4c67..a135ceda832545 100644 --- a/tools/mo/openvino/tools/mo/ops/LSTM.py +++ b/tools/mo/openvino/tools/mo/ops/LSTM.py @@ -22,7 +22,7 @@ def __init__(self, graph: Graph, attrs: dict): 'gate_order': None, 'normalized': False, 'multilayers': False, - 'format': None, # format type of input blobs for different frameworks (onnx, tf, mxnet), + 'format': None, # format type of input blobs for different frameworks (onnx, tf), 'activation_alpha': None, 'activation_beta': None, diff --git a/tools/mo/openvino/tools/mo/ops/RNN.py b/tools/mo/openvino/tools/mo/ops/RNN.py index 89ff2ae84db901..ea73368cc1fa10 100644 --- a/tools/mo/openvino/tools/mo/ops/RNN.py +++ b/tools/mo/openvino/tools/mo/ops/RNN.py @@ -79,7 +79,7 @@ def reverse_infer(node: Node): input_size = get_rnn_input_size(node) batch_size, seq_len = get_rnn_batch_size_and_seq_len(node) - # MXNet, ONNX has the same input layout + # ONNX has the same input layout input_shape = shape_array([seq_len, batch_size, input_size]) if node.format == 'tf': input_shape = shape_array([batch_size, seq_len, input_size]) @@ -127,12 +127,8 @@ def rnn_infer(node: Node, out_ports=None): num_directions = 2 if node.direction in ['bidirectional'] else 1 if node.has_num_directions: - if node.format == 'mxnet' and node.normalized is False: - # In MXNet RNN layer return output with shape [seq_len, batch_size, hidden_size * num_directions] - out_shape[-1] *= num_directions - else: - # ONNX-like, insert extra dimension to output shape for num_directions - out_shape = shape_insert(out_shape, 1, np.int64(num_directions)) + # ONNX-like, insert extra dimension to output shape for num_directions + out_shape = shape_insert(out_shape, 1, np.int64(num_directions)) # 0 output is required creating it if doesn't exist if 0 not in node.out_nodes(): @@ -193,12 +189,7 @@ def get_rnn_batch_size_and_seq_len(node: Node): if node.batch_dim == 1: seq_len = out_shape[0] - if node.format == 'mxnet': - assert len(out_shape) == 3, 'incorrect out_shape rank for node {}'.format(node_name) - # for MXNet out_shape = [seq_len, batch_size, hidden_size] - batch_size = out_shape[1] - in_port_with_initial_states = 2 - elif node.format == 'onnx': + if node.format == 'onnx': assert len(out_shape) == 4, 'incorrect out_shape rank for node {}'.format(node_name) # even for ONNX in extractor 'batch_dim': 1 (front/onnx/lstm_ext.py:26) despite the fact that # out_shape = [seq_len, num_directions, batch_size, hidden_size] @@ -241,29 +232,9 @@ def get_rnn_input_size(node: Node): # ONNX weights on input 1 contain only W part, R, and B are connected separately # weights_shape = `[num_directions, 4 * hidden_size, input_size]` weights_size = node.in_port(1).data.get_shape() - assert len(weights_size) == 3, 'incorrect weights ranks for MXNet {} node {}'.format(node.op, node_name) + assert len(weights_size) == 3, 'incorrect weights ranks for ONNX {} node {}'.format(node.op, node_name) input_size = weights_size[2] return input_size - elif node.format == 'mxnet': - multiplier = node.multiplier - hidden_size = node.hidden_size - num_layers = node.num_layers - direction = 2 if node.has_num_directions else 1 - - # for MXNet models we always get flattened weights which contains WRB - weights_size = node.in_port(1).data.get_shape() - assert len(weights_size) == 1, 'incorrect weights ranks for MXNet {} node {}'.format(node.op, node_name) - weights_size = weights_size[0] - - size = hidden_size * direction * multiplier - other_layer_params_size = (hidden_size * direction + hidden_size + 2) * size - first_layer_params_size = weights_size - (num_layers - 1) * other_layer_params_size - # lhe lines above to find first_layer_params_size was taken from MXNetSplitMultiLayers.py:79 - # input_size can be calculated from the first_layer_params_size - # if first_layer_params_size = (input_size + hidden_size + 2) * size - # then input_size = first_layer_params_size / size - 2 - hidden_size - input_size = first_layer_params_size / size - 2 - hidden_size - return input_size elif node.format == 'tf': log.error('reverse infer for TensorFlow RNN operation {} is not implemented yet'.format(node_name), extra={'is_warning': True}) diff --git a/tools/mo/openvino/tools/mo/ops/arange_like.py b/tools/mo/openvino/tools/mo/ops/arange_like.py deleted file mode 100644 index fd78a6da8ffe91..00000000000000 --- a/tools/mo/openvino/tools/mo/ops/arange_like.py +++ /dev/null @@ -1,29 +0,0 @@ -# Copyright (C) 2018-2021 Intel Corporation -# SPDX-License-Identifier: Apache-2.0 -from openvino.tools.mo.graph.graph import Graph -from openvino.tools.mo.ops.op import Op - - -class ArangeLikeOp(Op): - """ - MXNet operation which returns a sequence of numbers. If axis attribute is None, the output has the - same shape as the input. Otherwise, the output is a 1D array with size of the specified axis. - - Attributes: - start - Start of interval - step - Spacing between values - repeat - The repeating time of all elements. Now we can support only default value (= 1) - axis - Arange elements according to the size of a certain axis of input array. Defualt value is None - - """ - op = 'arange_like' - - def __init__(self, graph: Graph, attrs: dict): - mandatory_props = { - 'type': None, - 'op': self.op, - 'infer': None, - 'in_ports_count': 1, - 'out_ports_count': 1, - } - super().__init__(graph, mandatory_props, attrs) \ No newline at end of file diff --git a/tools/mo/openvino/tools/mo/ops/convolution.py b/tools/mo/openvino/tools/mo/ops/convolution.py index 20a402aa1a59bd..6a75ad1d45b39f 100644 --- a/tools/mo/openvino/tools/mo/ops/convolution.py +++ b/tools/mo/openvino/tools/mo/ops/convolution.py @@ -106,7 +106,7 @@ def infer(node: Node): weights_index = node.weights_index if node.has_valid('weights_index') else 1 # Reshape weights kernel to original shape - # In case of caffe or MXNet framework, values for weights have no structured shape like OIHW + # In case of Caffe framework, values for weights have no structured shape like OIHW # so we have to reshape weights to normal shape # For this case, Convolution node should have attribute reshape_kernel = True if node.has_valid('reshape_kernel') and node.reshape_kernel: diff --git a/tools/mo/openvino/tools/mo/ops/cumsum.py b/tools/mo/openvino/tools/mo/ops/cumsum.py index 413cc4d904ffdc..0cd72270a6153e 100644 --- a/tools/mo/openvino/tools/mo/ops/cumsum.py +++ b/tools/mo/openvino/tools/mo/ops/cumsum.py @@ -57,19 +57,3 @@ def infer(node: Node): reverse = node.reverse if node.has_valid('reverse') else False exclusive = node.exclusive if node.has_valid('exclusive') else False node.out_port(0).data.set_value(cumsum(input_value, axis=axis, reverse=reverse, exclusive=exclusive)) - - -class MXNetCumSum(Op): - enabled = False - op = 'MXNetCumSum' - - def __init__(self, graph: Graph, attrs: dict): - super().__init__(graph, { - 'op': self.op, - 'type': None, - - 'infer': None, - - 'in_ports_count': 1, - 'out_ports_count': 1, - }, attrs) diff --git a/tools/mo/openvino/tools/mo/ops/div_sqrt_dim.py b/tools/mo/openvino/tools/mo/ops/div_sqrt_dim.py deleted file mode 100644 index e8c4ef3720c60f..00000000000000 --- a/tools/mo/openvino/tools/mo/ops/div_sqrt_dim.py +++ /dev/null @@ -1,22 +0,0 @@ -# Copyright (C) 2018-2024 Intel Corporation -# SPDX-License-Identifier: Apache-2.0 -from openvino.tools.mo.graph.graph import Graph -from openvino.tools.mo.ops.op import Op - - -class DivSqrtDimOp(Op): - """ - MXNet operation that matches the formula out = (data / sqrt(data.shape[-1])). - Will be replaced with the corresponding sub-graph - """ - op = '_contrib_div_sqrt_dim' - - def __init__(self, graph: Graph, attrs: dict): - mandatory_props = { - 'type': None, - 'op': self.op, - 'infer': None, - 'in_ports_count': 1, - 'out_ports_count': 1, - } - super().__init__(graph, mandatory_props, attrs) diff --git a/tools/mo/openvino/tools/mo/ops/eye.py b/tools/mo/openvino/tools/mo/ops/eye.py index 2eb384c21ad10e..73e52da5ea0d6f 100644 --- a/tools/mo/openvino/tools/mo/ops/eye.py +++ b/tools/mo/openvino/tools/mo/ops/eye.py @@ -110,24 +110,3 @@ def __init__(self, graph: Graph, attrs: dict): 'out_ports_count': 1, 'output_type': np.float32, }, attrs) - - -class MXEye(Op): - """ Eye operation that that generates shift matrix or a batch of matrices. - Eye operation from MXNet doesn't have inputs. Only attributes: row number, column number and diagonal index - """ - op = 'MXEye' - enabled = False - - def __init__(self, graph: Graph, attrs: dict): - super().__init__(graph, { - 'type': None, - 'op': self.op, - 'infer': None, - 'in_ports_count': 0, - 'out_ports_count': 1, - 'num_rows': 1, - 'num_columns': 1, - 'diagonal_index': 0, - 'output_type': np.float32, - }, attrs) diff --git a/tools/mo/openvino/tools/mo/ops/lstm_sequence.py b/tools/mo/openvino/tools/mo/ops/lstm_sequence.py index 993e66459333e4..8db2e629c9af92 100644 --- a/tools/mo/openvino/tools/mo/ops/lstm_sequence.py +++ b/tools/mo/openvino/tools/mo/ops/lstm_sequence.py @@ -54,7 +54,7 @@ def supported_attrs(self): 'sequence_dim', # sequence dimension index in input shape 'blobs_wrb', # input blobs have three separate components W, R and B like in ONNX/LSTM 'has_num_directions', # if True, output shape has 4 dimensions; 3D otherwise - 'format', # format type of input blobs for different frameworks (onnx, tf, mxnet) + 'format', # format type of input blobs for different frameworks (onnx, tf) ] def backend_attrs(self): diff --git a/tools/mo/openvino/tools/mo/ops/mxreshape.py b/tools/mo/openvino/tools/mo/ops/mxreshape.py deleted file mode 100644 index 1d318b0a518fe8..00000000000000 --- a/tools/mo/openvino/tools/mo/ops/mxreshape.py +++ /dev/null @@ -1,24 +0,0 @@ -# Copyright (C) 2018-2024 Intel Corporation -# SPDX-License-Identifier: Apache-2.0 - -from openvino.tools.mo.graph.graph import Graph -from openvino.tools.mo.ops.op import Op - - -class MXReshape(Op): - ''' - The is internal op which use for resolving different reshape mxnet cases - ''' - op = 'MXReshape' - enabled = False - - def __init__(self, graph: Graph, attrs: dict): - super().__init__(graph, { - 'op': self.op, - 'type': None, - 'dim': None, - 'reverse': False, - 'in_ports_count': 2, - 'out_ports_count': 1, - 'infer': None, - }, attrs) diff --git a/tools/mo/openvino/tools/mo/ops/slice.py b/tools/mo/openvino/tools/mo/ops/slice.py index a237f9f6e40f99..c59aba17799a82 100644 --- a/tools/mo/openvino/tools/mo/ops/slice.py +++ b/tools/mo/openvino/tools/mo/ops/slice.py @@ -16,7 +16,6 @@ A number of transformations take place on the front phase to convert framework slicing: - AttributedSlice, TFSlice -> Slice - CaffeSlice -> Split - - MXSlice -> StridedSlice """ @@ -59,7 +58,7 @@ def __init__(self, graph: Graph, attrs: dict): class TFSlice(Op): """ - TFSlice differs from Slice in ONNX, Caffe and MXNet. + TFSlice differs from Slice in ONNX, Caffe. TFSlice has 'begin' and 'size' inputs while Slice has 'start', 'end', 'step', and 'axis' inputs. https://www.tensorflow.org/api_docs/python/tf/slice Is replaced with internal Slice op on the front phase. @@ -77,26 +76,6 @@ def __init__(self, graph: Graph, attrs: dict): }, attrs) -class MXSlice(Op): - """ - Slice operation in MXNet is different from ONNX, Caffe, Tensorflow. It has begin, end & step attributes - https://mxnet.apache.org/versions/1.6/api/python/docs/api/symbol/op/index.html#mxnet.symbol.op.slice - Is replaced with the StridedSlice from opset on the front phase. - """ - op = 'MXSlice' - enabled = False - - def __init__(self, graph: Graph, attrs: dict): - super().__init__(graph, { - 'kind': 'op', - 'type': None, - 'op': self.op, - 'in_ports_count': 1, - 'out_ports_count': 1, - 'infer': None - }, attrs) - - def slice_infer(node: Node, steps_idx: int, axes_idx: int): input_value = node.in_port(0).data.get_value() input_shape = node.in_port(0).data.get_shape() diff --git a/tools/mo/openvino/tools/mo/utils/cli_parser.py b/tools/mo/openvino/tools/mo/utils/cli_parser.py index c3def8d2b7892a..edea30d077878b 100644 --- a/tools/mo/openvino/tools/mo/utils/cli_parser.py +++ b/tools/mo/openvino/tools/mo/utils/cli_parser.py @@ -898,7 +898,7 @@ def get_common_cli_parser(parser: argparse.ArgumentParser = None): action='store_true', default=False) common_group.add_argument("--use_legacy_frontend", help='Force the usage of legacy Frontend for model conversion into IR. ' - 'The legacy Frontend is Python based and is available for TensorFlow*, ONNX*, MXNet*, ' + 'The legacy Frontend is Python based and is available for TensorFlow*, ONNX*, ' 'Caffe*, and Kaldi* models.', action='store_true', default=False) add_args_by_description(common_group, mo_convert_params_common) @@ -958,18 +958,6 @@ def get_tf_cli_options(): return OrderedDict(sorted(d.items(), key=lambda t: t[0])) -def get_mxnet_cli_options(): - d = { - 'input_symbol': '- Deploy-ready symbol file', - 'nd_prefix_name': '- Prefix name for args.nd and argx.nd files', - 'pretrained_model_name': '- Pretrained model to be merged with the .nd files', - 'save_params_from_nd': '- Enable saving built parameters file from .nd files', - 'legacy_mxnet_model': '- Enable MXNet loader for models trained with MXNet version lower than 1.0.0', - } - - return OrderedDict(sorted(d.items(), key=lambda t: t[0])) - - def get_kaldi_cli_options(): d = { 'counts': '- A file name with full path to the counts file or empty string if you want to use counts from model', @@ -1032,28 +1020,9 @@ def get_tf_cli_parser(parser: argparse.ArgumentParser = None): return parser -def get_mxnet_cli_parser(parser: argparse.ArgumentParser = None): - """ - Specifies cli arguments for Model Conversion for MXNet* - - Returns - ------- - ArgumentParser instance - """ - if not parser: - parser = argparse.ArgumentParser(usage='%(prog)s [options]') - get_common_cli_parser(parser=parser) - - mx_group = parser.add_argument_group('MXNet-specific parameters') - mo_convert_params_mxnet = get_mo_convert_params()['MXNet-specific parameters:'] - add_args_by_description(mx_group, mo_convert_params_mxnet) - - return parser - - def get_kaldi_cli_parser(parser: argparse.ArgumentParser = None): """ - Specifies cli arguments for Model Conversion for MXNet* + Specifies cli arguments for Model Conversion for Kaldi* Returns ------- @@ -1099,7 +1068,6 @@ def get_all_cli_parser(): get_common_cli_parser(parser=parser) get_tf_cli_parser(parser=parser) get_caffe_cli_parser(parser=parser) - get_mxnet_cli_parser(parser=parser) get_kaldi_cli_parser(parser=parser) get_onnx_cli_parser(parser=parser) diff --git a/tools/mo/openvino/tools/mo/utils/guess_framework.py b/tools/mo/openvino/tools/mo/utils/guess_framework.py index 39a6c6ed9967ad..d9058bed878e76 100644 --- a/tools/mo/openvino/tools/mo/utils/guess_framework.py +++ b/tools/mo/openvino/tools/mo/utils/guess_framework.py @@ -11,8 +11,6 @@ def deduce_legacy_frontend_by_namespace(argv: Namespace): if not hasattr(argv, 'framework') or not argv.framework: if getattr(argv, 'saved_model_dir', None) or getattr(argv, 'input_meta_graph', None): argv.framework = 'tf' - elif getattr(argv, 'input_symbol', None) or getattr(argv, 'pretrained_model_name', None): - argv.framework = 'mxnet' elif getattr(argv, 'input_proto', None): argv.framework = 'caffe' elif argv.input_model is None: @@ -20,7 +18,7 @@ def deduce_legacy_frontend_by_namespace(argv: Namespace): else: argv.framework = guess_framework_by_ext(argv.input_model) - return map(lambda x: argv.framework == x, ['tf', 'caffe', 'mxnet', 'kaldi', 'onnx']) + return map(lambda x: argv.framework == x, ['tf', 'caffe', 'kaldi', 'onnx']) def guess_framework_by_ext(input_model_path: str) -> int: @@ -30,8 +28,6 @@ def guess_framework_by_ext(input_model_path: str) -> int: return 'tf' elif re.match(r'^.*\.pbtxt$', input_model_path): return 'tf' - elif re.match(r'^.*\.params$', input_model_path): - return 'mxnet' elif re.match(r'^.*\.nnet$', input_model_path): return 'kaldi' elif re.match(r'^.*\.mdl', input_model_path): diff --git a/tools/mo/openvino/tools/mo/utils/import_extensions.py b/tools/mo/openvino/tools/mo/utils/import_extensions.py index a08640f97a7315..f408b50ff2dd5b 100644 --- a/tools/mo/openvino/tools/mo/utils/import_extensions.py +++ b/tools/mo/openvino/tools/mo/utils/import_extensions.py @@ -75,7 +75,7 @@ def load_dir(framework: str, path: str, get_front_classes: callable): internal_dirs = get_internal_dirs(framework, get_front_classes) prefix = 'openvino.tools.' if ext == 'mo' else '' - exclude_modules = {'tf', 'onnx', 'kaldi', 'mxnet', 'caffe'} + exclude_modules = {'tf', 'onnx', 'kaldi', 'caffe'} exclude_modules.remove(framework) for p in internal_dirs.keys(): diff --git a/tools/mo/openvino/tools/mo/utils/ir_reader/extenders/priorbox_extender.py b/tools/mo/openvino/tools/mo/utils/ir_reader/extenders/priorbox_extender.py index 923a4df2dd5d35..ad301efeb3d865 100644 --- a/tools/mo/openvino/tools/mo/utils/ir_reader/extenders/priorbox_extender.py +++ b/tools/mo/openvino/tools/mo/utils/ir_reader/extenders/priorbox_extender.py @@ -17,11 +17,6 @@ def extend(op: Node): for attr in attrs: PriorBox_extender.attr_restore(op, attr) - if 'framework' in op.graph.graph['cmd_params'] and op.graph.graph['cmd_params'].framework == 'mxnet': - # Need to use separate shape inference function as done in MO pipeline. - op['infer'] = multi_box_prior_infer_mxnet - op['stop_attr_upd'] = True - @staticmethod def attr_restore(node: Node, attribute: str, value=None): # Function to restore some specific attr for PriorBox & PriorBoxClustered layers diff --git a/tools/mo/requirements_mxnet.txt b/tools/mo/requirements_mxnet.txt deleted file mode 100644 index bb4ec290ed5912..00000000000000 --- a/tools/mo/requirements_mxnet.txt +++ /dev/null @@ -1,8 +0,0 @@ --c ../constraints.txt -numpy>=1.16.6,<1.27 -mxnet -networkx -defusedxml -urllib3 -requests -fastjsonschema \ No newline at end of file diff --git a/tools/mo/setup.py b/tools/mo/setup.py index 19fc797cf157b0..c2b50ac656dfd2 100644 --- a/tools/mo/setup.py +++ b/tools/mo/setup.py @@ -101,7 +101,7 @@ def read_requirements(path: str) -> List[str]: requirements_txt = [] py_modules = [] for item in os.listdir(): - if re.match(r'requirements_?(tf|tf2|onnx|mxnet|kaldi|caffe)?\.txt', item): + if re.match(r'requirements_?(tf|tf2|onnx|kaldi|caffe)?\.txt', item): requirements_txt.append(item) for item in os.listdir(prefix): if re.match(r'mo(.*)\.py|main(.*)\.py', item): @@ -155,7 +155,6 @@ def find_package_modules(self, package, package_dir): }, package_data={ 'openvino.tools.mo.front.caffe.proto': ['*.proto'], - 'openvino.tools.mo.front.mxnet': ['*.json'], 'openvino.tools.mo.front.onnx': ['*.json'], 'openvino.tools.mo.front.tf': ['*.json'], 'openvino.tools.mo.front.caffe': ['CustomLayersMapping.xml*'] @@ -163,7 +162,6 @@ def find_package_modules(self, package, package_dir): extras_require={ 'caffe': read_requirements('requirements_caffe.txt'), 'kaldi': read_requirements('requirements_kaldi.txt'), - 'mxnet': read_requirements('requirements_mxnet.txt'), 'onnx': read_requirements('requirements_onnx.txt'), 'tensorflow': read_requirements('requirements_tf.txt'), 'tensorflow2': read_requirements('requirements_tf2.txt'), diff --git a/tools/mo/unit_tests/mo/front/mxnet/MXFFTToDFT_test.py b/tools/mo/unit_tests/mo/front/mxnet/MXFFTToDFT_test.py deleted file mode 100644 index 923048475c5955..00000000000000 --- a/tools/mo/unit_tests/mo/front/mxnet/MXFFTToDFT_test.py +++ /dev/null @@ -1,187 +0,0 @@ -# Copyright (C) 2018-2024 Intel Corporation -# SPDX-License-Identifier: Apache-2.0 - - -import pytest - -from openvino.tools.mo.front.mxnet.MXFFTToDFT import MXFFTToDFT -from openvino.tools.mo.front.common.partial_infer.utils import int64_array -from openvino.tools.mo.utils.ir_engine.compare_graphs import compare_graphs -from unit_tests.utils.graph import build_graph - - -fft_graph_node_attrs = { - 'placeholder': {'shape': int64_array([3, 100, 100]), 'type': 'Parameter', 'kind': 'op', 'op': 'Parameter'}, - 'fft': {'kind': 'op', 'op': 'MXFFT', 'is_inverse': False}, - 'abs': {'type': 'Abs', 'kind': 'op', 'op': 'Abs'}, - 'output': {'type': None, 'value': None, 'kind': 'op', 'op': 'Result'}, -} - -fft_graph_edges = [ - ('placeholder', 'fft', {'in': 0}), - ('fft', 'abs'), - ('abs', 'output'), -] - - -ref_converted_fft_graph_node_attrs = { - 'placeholder': {'shape': int64_array([3, 100, 100]), 'type': 'Parameter', 'kind': 'op', 'op': 'Parameter'}, - 'rank': {'kind': 'op', 'op': 'Rank'}, - 'unsqueeze': {'type': 'Unsqueeze', 'kind': 'op', 'op': 'Unsqueeze'}, - 'unsqueeze_axis': { - 'type': 'Const', 'kind': 'op', 'op': 'Const', 'shape': int64_array([1]), 'value': int64_array([-1]) - }, - 'one': { - 'type': 'Const', 'kind': 'op', 'op': 'Const', 'shape': int64_array([]), 'value': int64_array(1) - }, - 'add': {'type': 'Add', 'kind': 'op', 'op': 'Add'}, - 'zero1': { - 'type': 'Const', 'kind': 'op', 'op': 'Const', 'shape': int64_array([]), 'value': int64_array(0) - }, - 'broadcast1': {'type': 'Broadcast', 'kind': 'op', 'op': 'Broadcast'}, - 'one2': { - 'type': 'Const', 'kind': 'op', 'op': 'Const', 'shape': int64_array([]), 'value': int64_array(1) - }, - 'zero2': { - 'type': 'Const', 'kind': 'op', 'op': 'Const', 'shape': int64_array([]), 'value': int64_array(0) - }, - 'scatter': {'type': 'ScatterUpdate', 'kind': 'op', 'op': 'ScatterUpdate'}, - 'pad': {'type': 'Pad', 'kind': 'op', 'op': 'Pad', 'mode': 'constant'}, - 'fft_axes': { - 'type': 'Const', 'kind': 'op', 'op': 'Const', 'shape': int64_array([1]), 'value': int64_array([-1]) - }, - 'fft': {'kind': 'op', 'op': 'DFT', 'type': 'DFT'}, - 'one3': { - 'type': 'Const', 'kind': 'op', 'op': 'Const', 'shape': int64_array([]), 'value': int64_array(1) - }, - 'sub': {'type': 'Subtract', 'kind': 'op', 'op': 'Sub'}, - 'zero3': { - 'type': 'Const', 'kind': 'op', 'op': 'Const', 'shape': int64_array([]), 'value': int64_array(0) - }, - 'broadcast2': {'type': 'Broadcast', 'kind': 'op', 'op': 'Broadcast'}, - 'm1_2': { - 'type': 'Const', 'kind': 'op', 'op': 'Const', 'shape': int64_array([2]), 'value': int64_array([-1, 2]) - }, - 'concat': {'type': 'Concat', 'kind': 'op', 'op': 'Concat', 'axis': 0}, - 'reshape': {'kind': 'op', 'op': 'Reshape', 'type': 'Reshape'}, - 'abs': {'type': 'Abs', 'kind': 'op', 'op': 'Abs'}, - 'output': {'type': None, 'value': None, 'kind': 'op', 'op': 'Result'}, -} - -ref_converted_fft_graph_edges = [ - ('placeholder', 'rank', {'in': 0, 'out': 0}), - ('placeholder', 'unsqueeze', {'in': 0, 'out': 0}), - ('unsqueeze_axis', 'unsqueeze', {'in': 1, 'out': 0}), - ('rank', 'add', {'in': 0, 'out': 0}), - ('one', 'add', {'in': 1, 'out': 0}), - ('zero1', 'broadcast1', {'in': 0, 'out': 0}), - ('add', 'broadcast1', {'in': 1, 'out': 0}), - ('broadcast1', 'scatter', {'in': 0, 'out': 0}), - ('rank', 'scatter', {'in': 1, 'out': 0}), - ('one2', 'scatter', {'in': 2, 'out': 0}), - ('zero2', 'scatter', {'in': 3, 'out': 0}), - ('unsqueeze', 'pad', {'in': 0, 'out': 0}), - ('broadcast1', 'pad', {'in': 1, 'out': 0}), - ('scatter', 'pad', {'in': 2, 'out': 0}), - ('pad', 'fft', {'in': 0, 'out': 0}), - ('fft_axes', 'fft', {'in': 1, 'out': 0}), - ('rank', 'sub', {'in': 0, 'out': 0}), - ('one3', 'sub', {'in': 1, 'out': 0}), - ('zero3', 'broadcast2', {'in': 0, 'out': 0}), - ('sub', 'broadcast2', {'in': 1, 'out': 0}), - ('broadcast2', 'concat', {'in': 0, 'out': 0}), - ('m1_2', 'concat', {'in': 1, 'out': 0}), - ('fft', 'reshape', {'in': 0, 'out': 0}), - ('concat', 'reshape', {'in': 1, 'out': 0}), - ('reshape', 'abs'), - ('abs', 'output'), -] - - -ref_converted_ifft_graph_node_attrs = { - 'placeholder': {'shape': int64_array([3, 100, 100]), 'type': 'Parameter', 'kind': 'op', 'op': 'Parameter'}, - 'rank': {'kind': 'op', 'op': 'Rank'}, - 'subtracted_one': { - 'type': 'Const', 'kind': 'op', 'op': 'Const', 'shape': int64_array([]), 'value': int64_array(1) - }, - 'sub': {'type': 'Subtract', 'kind': 'op', 'op': 'Sub'}, - 'broadcast': {'type': 'Broadcast', 'kind': 'op', 'op': 'Broadcast'}, - 'broadcasted_value': { - 'type': 'Const', 'kind': 'op', 'op': 'Const', 'shape': int64_array([]), 'value': int64_array(0) - }, - 'new_shape': {'type': 'Concat', 'kind': 'op', 'op': 'Concat', 'axis': 0}, - 'new_shape_const': { - 'type': 'Const', 'kind': 'op', 'op': 'Const', 'shape': int64_array([2]), 'value': int64_array([-1, 2]) - }, - 'reshape': {'kind': 'op', 'op': 'Reshape', 'type': 'Reshape'}, - 'fft': {'kind': 'op', 'op': 'IDFT', 'type': 'IDFT'}, - 'fft_axes': { - 'type': 'Const', 'kind': 'op', 'op': 'Const', 'shape': int64_array([1]), 'value': int64_array([-1]) - }, - 'split': {'kind': 'op', 'op': 'Split', 'type': 'Split', 'num_splits': 2}, - 'split_axes': { - 'type': 'Const', 'kind': 'op', 'op': 'Const', 'shape': int64_array([]), 'value': int64_array(-1) - }, - 'squeeze': {'kind': 'op', 'op': 'Squeeze', 'type': 'Squeeze'}, - 'squeeze_axes': { - 'type': 'Const', 'kind': 'op', 'op': 'Const', 'shape': int64_array([1]), 'value': int64_array([-1]) - }, - 'abs': {'type': 'Abs', 'kind': 'op', 'op': 'Abs'}, - 'output': {'type': None, 'value': None, 'kind': 'op', 'op': 'Result'}, -} - -ref_converted_ifft_graph_edges = [ - ('placeholder', 'rank', {'out': 0}), - ('placeholder', 'reshape', {'out': 0}), - ('rank', 'sub'), - ('subtracted_one', 'sub'), - ('broadcasted_value', 'broadcast'), - ('sub', 'broadcast'), - ('broadcast', 'new_shape'), - ('new_shape_const', 'new_shape'), - ('new_shape', 'reshape'), - ('reshape', 'fft'), - ('fft_axes', 'fft'), - ('fft', 'split'), - ('split_axes', 'split'), - ('split', 'squeeze', {'out': 0}), - ('squeeze_axes', 'squeeze'), - ('squeeze', 'abs'), - ('abs', 'output'), -] - -class TestMXFFTToDFTTest(): - @pytest.mark.parametrize("input_shape",[int64_array([3, 100, 100, 8]), int64_array([5, 60])]) - def test_fft_replacement(self, input_shape): - graph = build_graph(nodes_attrs=fft_graph_node_attrs, - edges=fft_graph_edges, - update_attributes={ - 'placeholder': {'shape': input_shape} - }) - graph.stage = 'front' - MXFFTToDFT().find_and_replace_pattern(graph) - ref_graph = build_graph(nodes_attrs=ref_converted_fft_graph_node_attrs, - edges=ref_converted_fft_graph_edges, - update_attributes={ - 'placeholder': {'shape': input_shape} - }) - (flag, resp) = compare_graphs(graph, ref_graph, 'output') - assert flag, resp - - @pytest.mark.parametrize("input_shape",[int64_array([3, 100, 100, 8]), int64_array([5, 60])]) - def test_ifft_replacement(self, input_shape): - graph = build_graph(nodes_attrs=fft_graph_node_attrs, - edges=fft_graph_edges, - update_attributes={ - 'placeholder': {'shape': input_shape}, - 'fft': {'is_inverse': True} - }) - graph.stage = 'front' - MXFFTToDFT().find_and_replace_pattern(graph) - ref_graph = build_graph(nodes_attrs=ref_converted_ifft_graph_node_attrs, - edges=ref_converted_ifft_graph_edges, - update_attributes={ - 'placeholder': {'shape': input_shape} - }) - (flag, resp) = compare_graphs(graph, ref_graph, 'output') - assert flag, resp diff --git a/tools/mo/unit_tests/mo/front/mxnet/RNN_ext_test.py b/tools/mo/unit_tests/mo/front/mxnet/RNN_ext_test.py deleted file mode 100644 index 363c7d74c24ab9..00000000000000 --- a/tools/mo/unit_tests/mo/front/mxnet/RNN_ext_test.py +++ /dev/null @@ -1,86 +0,0 @@ -# Copyright (C) 2018-2024 Intel Corporation -# SPDX-License-Identifier: Apache-2.0 - -import numpy as np - -from openvino.tools.mo.front.mxnet.RNN_ext import RNNFrontExtractor -from openvino.tools.mo.utils.error import Error -from unit_tests.mo.unit_test_with_mocked_telemetry import UnitTestWithMockedTelemetry -from unit_tests.utils.extractors import PB - - -class RNNFrontExtractorTest(UnitTestWithMockedTelemetry): - @staticmethod - def _create_node(**attrs): - params = {'attrs': { - **attrs - }} - node = PB({'symbol_dict': params}) - return node - - base_attrs = { - 'batch_dim': 1, - 'sequence_dim': 0, - 'blobs_wrb': False, - 'format': 'mxnet', - 'gate_order': [1, 0, 2, 3], - } - - def test_base_attrs(self): - attrs = { - 'state_size': 128, - 'mode': 'lstm', - } - - additional_attrs = { - 'multilayers': False, - 'hidden_size': 128, - 'has_num_directions': False, - 'direction': 'forward', - 'num_layers': 1, - } - - node = self._create_node(**attrs) - RNNFrontExtractor.extract(node) - - expect_attrs = {**self.base_attrs, **additional_attrs} - - for key in expect_attrs.keys(): - equal = np.all(np.equal(node[key], expect_attrs[key], dtype=object)) - self.assertTrue(equal, 'Values for attr {} are not equal'.format(key)) - - self.assertTrue(node.op == 'LSTM') - - def test_unsupported_mode(self): - attrs = { - 'state_size': 128, - 'mode': 'abracadabra', - } - node = self._create_node(**attrs) - with self.assertRaises(Error): - RNNFrontExtractor.extract(node) - - def test_additional_attrs(self): - attrs = { - 'state_size': 128, - 'mode': 'lstm', - 'bidirectional': True, - 'num_layers': 2, - } - - additional_attrs = { - 'multilayers': True, - 'hidden_size': 128, - 'has_num_directions': True, - 'direction': 'bidirectional', - 'num_layers': 2, - } - - node = self._create_node(**attrs) - RNNFrontExtractor.extract(node) - - expect_attrs = {**self.base_attrs, **additional_attrs} - - for key in expect_attrs.keys(): - equal = np.all(np.equal(node[key], expect_attrs[key], dtype=object)) - self.assertTrue(equal, 'Values for attr {} are not equal'.format(key)) diff --git a/tools/mo/unit_tests/mo/front/mxnet/__init__.py b/tools/mo/unit_tests/mo/front/mxnet/__init__.py deleted file mode 100644 index e69de29bb2d1d6..00000000000000 diff --git a/tools/mo/unit_tests/mo/front/mxnet/activation_test.py b/tools/mo/unit_tests/mo/front/mxnet/activation_test.py deleted file mode 100644 index d4406affed6a0b..00000000000000 --- a/tools/mo/unit_tests/mo/front/mxnet/activation_test.py +++ /dev/null @@ -1,50 +0,0 @@ -# Copyright (C) 2018-2024 Intel Corporation -# SPDX-License-Identifier: Apache-2.0 - -import unittest - -from openvino.tools.mo.front.mxnet.activation import ActivationFrontExtractor -from openvino.tools.mo.graph.graph import Node -from unit_tests.utils.graph import build_graph - - -class TestActivationFrontExtractorOp(unittest.TestCase): - def test_extract_sigmoid_layer(self): - graph = build_graph( - {'node_1': {'type': 'Parameter', 'kind': 'op', 'op': 'Parameter'}, - 'act_node': {'type': 'Activation', 'kind': 'op', 'op': 'Activation', }, - 'node_2': {'type': 'Identity', 'kind': 'op'}, - }, - [ - ('node_1', 'act_node'), - ('act_node', 'node_2'), - ], - { - 'act_node': {'symbol_dict': {'attrs': {'act_type': 'sigmoid'}}}, - }) - - act_node = Node(graph, 'act_node') - act_extr_op = ActivationFrontExtractor() - supported = act_extr_op.extract(act_node) - self.assertTrue(supported) - self.assertEqual(act_node['op'], 'Sigmoid') - - def test_extract_relu_layer(self): - graph = build_graph( - {'node_1': {'type': 'Parameter', 'kind': 'op', 'op': 'Parameter'}, - 'act_node': {'type': 'relu', 'kind': 'op', 'op': 'Activation', }, - 'node_2': {'type': 'Identity', 'kind': 'op'}, - }, - [ - ('node_1', 'act_node'), - ('act_node', 'node_2'), - ], - { - 'act_node': {'symbol_dict': {'attrs': {'act_type': 'relu'}}}, - }) - - act_node = Node(graph, 'act_node') - act_extr_op = ActivationFrontExtractor() - supported = act_extr_op.extract(act_node) - self.assertTrue(supported) - self.assertEqual(act_node['op'], 'ReLU') diff --git a/tools/mo/unit_tests/mo/front/mxnet/add_input_data_to_prior_boxes_test.py b/tools/mo/unit_tests/mo/front/mxnet/add_input_data_to_prior_boxes_test.py deleted file mode 100644 index ead213b5163e8f..00000000000000 --- a/tools/mo/unit_tests/mo/front/mxnet/add_input_data_to_prior_boxes_test.py +++ /dev/null @@ -1,57 +0,0 @@ -# Copyright (C) 2018-2024 Intel Corporation -# SPDX-License-Identifier: Apache-2.0 - -import unittest -from argparse import Namespace - -import numpy as np - -from openvino.tools.mo.front.mxnet.add_input_data_to_prior_boxes import AddInputDataToPriorBoxes -from openvino.tools.mo.graph.graph import Node -from unit_tests.utils.graph import build_graph - - -class TestMxnetPipeline(unittest.TestCase): - def test_mxnet_pipeline_1(self): - graph = build_graph( - {'data': {'type': 'Identity', 'value': None, 'kind': 'op', 'op': 'Parameter'}, - 'node_2': {'type': 'Identity', 'value': None, 'kind': 'op'}, - 'node_multi_box': {'type': '_contrib_MultiBoxPrior', 'kind': 'op', 'op': '_contrib_MultiBoxPrior'}, - }, - [('data', 'node_2'), - ('node_2', 'node_multi_box')], - { - 'data': {'shape': np.array([1, 3, 227, 227])}, - 'node_2': {'shape': np.array([1, 3, 10, 10])}, - }) - - graph.graph['cmd_params'] = Namespace(input=None) - AddInputDataToPriorBoxes().find_and_replace_pattern(graph) - node_multi_box = Node(graph, 'node_multi_box') - - node_input1 = node_multi_box.in_node(0) - node_input2 = node_multi_box.in_node(1) - self.assertEqual(node_input1.name, 'node_2') - self.assertEqual(node_input2.name, 'data') - - def test_mxnet_pipeline_2(self): - graph = build_graph( - {'node_1': {'type': 'Identity', 'value': None, 'kind': 'op', 'op': 'Parameter'}, - 'node_2': {'type': 'Identity', 'value': None, 'kind': 'op'}, - 'node_multi_box': {'type': '_contrib_MultiBoxPrior', 'kind': 'op', 'op': '_contrib_MultiBoxPrior'}, - }, - [('node_1', 'node_2'), - ('node_2', 'node_multi_box')], - { - 'node_1': {'shape': np.array([1, 3, 227, 227])}, - 'node_2': {'shape': np.array([1, 3, 10, 10])}, - }) - - graph.graph['cmd_params'] = Namespace(input='node_1') - AddInputDataToPriorBoxes().find_and_replace_pattern(graph) - node_multi_box = Node(graph, 'node_multi_box') - - node_input1 = node_multi_box.in_node(0) - node_input2 = node_multi_box.in_node(1) - self.assertEqual(node_input1.name, 'node_2') - self.assertEqual(node_input2.name, 'node_1') diff --git a/tools/mo/unit_tests/mo/front/mxnet/arange_like_test.py b/tools/mo/unit_tests/mo/front/mxnet/arange_like_test.py deleted file mode 100644 index c93d254824fe4b..00000000000000 --- a/tools/mo/unit_tests/mo/front/mxnet/arange_like_test.py +++ /dev/null @@ -1,238 +0,0 @@ -# Copyright (C) 2018-2021 Intel Corporation -# SPDX-License-Identifier: Apache-2.0 -import unittest -from openvino.tools.mo.front.common.partial_infer.utils import int64_array -from openvino.tools.mo.front.mxnet.arange_like_replacer import ArangeLikeReplacer -from openvino.tools.mo.utils.ir_engine.compare_graphs import compare_graphs -from unit_tests.utils.graph import build_graph, shaped_parameter, regular_op_with_empty_data, result, connect, \ - shaped_const_with_data, connect_data - - -class ArangeLikeReplacerTest(unittest.TestCase): - def test_axis_not_none_start_0(self): - graph = build_graph( - nodes_attrs={ - **shaped_parameter('input', int64_array([1, 3, 5, 5])), - **regular_op_with_empty_data('arange_like', {'op': 'arange_like', 'type': None, 'axis': 3, 'repeat': 1, - 'start': 0, 'step': 1}), - **result('result') - }, - edges=[ - *connect('input', 'arange_like'), - *connect('arange_like', 'result') - ] - ) - ref_graph = build_graph( - nodes_attrs={ - **shaped_parameter('input', int64_array([1, 3, 5, 5])), - **regular_op_with_empty_data('shape_of', {'op': 'ShapeOf', 'type': 'ShapeOf'}), - **shaped_const_with_data('gather_axis', None), - **shaped_const_with_data('gather_indices', None), - **regular_op_with_empty_data('gather', {'op': 'Gather', 'type': 'Gather'}), - **shaped_const_with_data('range_start', None), - **shaped_const_with_data('range_step', None), - **shaped_const_with_data('squeeze_const', None), - **regular_op_with_empty_data('squeeze', {'op': 'Squeeze', 'type': 'Squeeze'}), - **regular_op_with_empty_data('range', {'op': 'Range', 'type': 'Range'}), - **result('result') - }, - edges=[ - *connect('input', 'shape_of'), - *connect('shape_of', '0:gather'), - *connect('gather_axis', '1:gather'), - *connect('gather_indices', '2:gather'), - *connect('range_start', '0:range'), - *connect('gather', '0:squeeze'), - *connect('squeeze_const', '1:squeeze'), - *connect('squeeze', '1:range'), - *connect('range_step', '2:range'), - *connect('range', 'result') - ], - update_attributes={ - 'gather_axis': {'value': 3}, - 'gather_indices': {'value': 0}, - 'range_start': {'value': 0}, - 'range_step': {'value': 1} - } - ) - ArangeLikeReplacer().find_and_replace_pattern(graph) - flag, resp = compare_graphs(graph, ref_graph, 'result', 'result', check_op_attrs=True) - self.assertTrue(flag, resp) - - def test_axis_not_none_start_1_step_2(self): - graph = build_graph( - nodes_attrs={ - **shaped_parameter('input', int64_array([1, 3, 5, 5])), - **regular_op_with_empty_data('arange_like', {'op': 'arange_like', 'type': None, 'axis': 3, 'repeat': 1, - 'start': 1, 'step': 2}), - **result('result') - }, - edges=[ - *connect('input', 'arange_like'), - *connect('arange_like', 'result') - ] - ) - ref_graph = build_graph( - nodes_attrs={ - **shaped_parameter('input', int64_array([1, 3, 5, 5])), - **regular_op_with_empty_data('shape_of', {'op': 'ShapeOf', 'type': 'ShapeOf'}), - **shaped_const_with_data('gather_axis', None), - **shaped_const_with_data('gather_indices', None), - **regular_op_with_empty_data('gather', {'op': 'Gather', 'type': 'Gather'}), - **regular_op_with_empty_data('mul', {'op': 'Mul', 'type': 'Multiply'}), - **shaped_const_with_data('mul_const', None), - **shaped_const_with_data('range_start', None), - **shaped_const_with_data('range_step', None), - **shaped_const_with_data('add_const', None), - **regular_op_with_empty_data('add', {'op': 'Add', 'type': 'Add'}), - **shaped_const_with_data('squeeze_const', None), - **regular_op_with_empty_data('squeeze', {'op': 'Squeeze', 'type': 'Squeeze'}), - **regular_op_with_empty_data('range', {'op': 'Range', 'type': 'Range'}), - **regular_op_with_empty_data('slice', {'op': 'Slice', 'type': None}), - **shaped_const_with_data('slice_start', None), - **shaped_const_with_data('slice_axes', None), - **shaped_const_with_data('slice_step', None), - **result('result') - }, - edges=[ - *connect('input', 'shape_of'), - *connect('shape_of', '0:gather'), - *connect('gather_axis', '1:gather'), - *connect('gather_indices', '2:gather'), - *connect('range_start', '0:range'), - *connect('gather', '0:mul'), - *connect('mul_const', '1:mul'), - *connect('mul', '0:add'), - *connect('add_const', '1:add'), - *connect('squeeze_const', '1:squeeze'), - *connect('add', '0:squeeze'), - *connect('squeeze', '1:range'), - *connect('range_step', '2:range'), - *connect('range', '0:slice'), - *connect('slice_start', '1:slice'), - *connect_data('gather', '2:slice'), - *connect('slice_axes', '3:slice'), - *connect('slice_step', '4:slice'), - *connect('slice', 'result') - ], - update_attributes={ - 'gather_axis': {'value': 3}, - 'gather_indices': {'value': 0}, - 'range_start': {'value': 1}, - 'range_step': {'value': 2}, - 'add_const': {'value': 1}, - 'mul_const': {'value': 2}, - 'slice_start': {'value': int64_array([0])}, - 'slice_axes': {'value': int64_array([0])}, - 'slice_step': {'value': int64_array([1])}, - } - ) - ArangeLikeReplacer().find_and_replace_pattern(graph) - flag, resp = compare_graphs(graph, ref_graph, 'result', 'result', check_op_attrs=True) - self.assertTrue(flag, resp) - - def test_axis_none_start_0(self): - graph = build_graph( - nodes_attrs={ - **shaped_parameter('input', int64_array([1, 3, 5, 5])), - **regular_op_with_empty_data('arange_like', {'op': 'arange_like', 'type': None, 'axis': None, - 'repeat': 1, 'start': 0, 'step': 1}), - **result('result') - }, - edges=[ - *connect('input', 'arange_like'), - *connect('arange_like', 'result') - ] - ) - ref_graph = build_graph( - nodes_attrs={ - **shaped_parameter('input', int64_array([1, 3, 5, 5])), - **regular_op_with_empty_data('shape_of', {'op': 'ShapeOf', 'type': 'ShapeOf'}), - **regular_op_with_empty_data('reduce_prod', {'op': 'ReduceProd', 'type': 'ReduceProd'}), - **shaped_const_with_data('reduce_prod_const', None), - **shaped_const_with_data('squeeze_const', None), - **regular_op_with_empty_data('squeeze', {'op': 'Squeeze', 'type': 'Squeeze'}), - **shaped_const_with_data('range_start', None), - **shaped_const_with_data('range_step', None), - **regular_op_with_empty_data('range', {'op': 'Range', 'type': 'Range'}), - **regular_op_with_empty_data('reshape_backward', {'op': 'Reshape', 'type': 'Reshape'}), - **result('result') - }, - edges=[ - *connect('input', 'shape_of'), - *connect('shape_of', '0:reduce_prod'), - *connect('reduce_prod_const', '1:reduce_prod'), - *connect('squeeze_const', '1:squeeze'), - *connect('reduce_prod', '0:squeeze'), - *connect('range_start', '0:range'), - *connect('range_step', '2:range'), - *connect('squeeze', '1:range'), - *connect('range', '0:reshape_backward'), - *connect_data('shape_of', '1:reshape_backward'), - *connect('reshape_backward', 'result') - ], - update_attributes={ - 'range_start': {'value': 0}, - 'range_step': {'value': 1}, - 'reduce_prod_const': {'value': int64_array([0])} - } - ) - - ArangeLikeReplacer().find_and_replace_pattern(graph) - flag, resp = compare_graphs(graph, ref_graph, 'result', 'result', check_op_attrs=True) - self.assertTrue(flag, resp) - - def test_axis_none_start_1(self): - graph = build_graph( - nodes_attrs={ - **shaped_parameter('input', int64_array([1, 3, 5, 5])), - **regular_op_with_empty_data('arange_like', {'op': 'arange_like', 'type': None, 'axis': None, - 'repeat': 1, 'start': 1, 'step': 1}), - **result('result') - }, - edges=[ - *connect('input', 'arange_like'), - *connect('arange_like', 'result') - ] - ) - ref_graph = build_graph( - nodes_attrs={ - **shaped_parameter('input', int64_array([1, 3, 5, 5])), - **regular_op_with_empty_data('shape_of', {'op': 'ShapeOf', 'type': 'ShapeOf'}), - **regular_op_with_empty_data('reduce_prod', {'op': 'ReduceProd', 'type': 'ReduceProd'}), - **shaped_const_with_data('reduce_prod_const', None), - **shaped_const_with_data('squeeze_const', None), - **regular_op_with_empty_data('squeeze', {'op': 'Squeeze', 'type': 'Squeeze'}), - **shaped_const_with_data('add_const', None), - **regular_op_with_empty_data('add', {'op': 'Add', 'type': 'Add'}), - **shaped_const_with_data('range_start', None), - **shaped_const_with_data('range_step', None), - **regular_op_with_empty_data('range', {'op': 'Range', 'type': 'Range'}), - **regular_op_with_empty_data('reshape_backward', {'op': 'Reshape', 'type': 'Reshape'}), - **result('result') - }, - edges=[ - *connect('input', 'shape_of'), - *connect('shape_of', '0:reduce_prod'), - *connect('reduce_prod_const', '1:reduce_prod'), - *connect('squeeze_const', '1:squeeze'), - *connect('add_const', '1:add'), - *connect('reduce_prod', '0:add'), - *connect('add', '0:squeeze'), - *connect('range_start', '0:range'), - *connect('range_step', '2:range'), - *connect('squeeze', '1:range'), - *connect('range', '0:reshape_backward'), - *connect_data('shape_of', '1:reshape_backward'), - *connect('reshape_backward', 'result') - ], - update_attributes={ - 'range_start': {'value': 1}, - 'range_step': {'value': 1}, - 'add_const': {'value': 1}, - 'reduce_prod_const': {'value': int64_array([0])} - } - ) - ArangeLikeReplacer().find_and_replace_pattern(graph) - flag, resp = compare_graphs(graph, ref_graph, 'result', 'result', check_op_attrs=True) - self.assertTrue(flag, resp) diff --git a/tools/mo/unit_tests/mo/front/mxnet/check_softmax_node_inputs_test.py b/tools/mo/unit_tests/mo/front/mxnet/check_softmax_node_inputs_test.py deleted file mode 100644 index e905fc5ee62a37..00000000000000 --- a/tools/mo/unit_tests/mo/front/mxnet/check_softmax_node_inputs_test.py +++ /dev/null @@ -1,47 +0,0 @@ -# Copyright (C) 2018-2024 Intel Corporation -# SPDX-License-Identifier: Apache-2.0 - -import unittest - -from openvino.tools.mo.front.mxnet.check_softmax_node_inputs import CheckSoftmaxNodeInputs -from openvino.tools.mo.graph.graph import Node -from unit_tests.utils.graph import build_graph - - -class TestCheckSoftmaxNodeInputs(unittest.TestCase): - def test_remove_softmax_output_input(self): - graph = build_graph( - {'node_1': {'type': 'Identity', 'value': None, 'kind': 'op', 'op': 'Parameter'}, - 'node_2': {'type': 'Identity', 'value': None, 'kind': 'op', 'op': 'Parameter'}, - 'softmax': {'type': 'SoftmaxOutput', 'value': None, 'kind': 'op', 'op': 'SoftmaxOutput'}, - }, - [('node_1', 'softmax'), - ('node_2', 'softmax') - ]) - - pattern = CheckSoftmaxNodeInputs() - pattern.find_and_replace_pattern(graph) - - node_softmax = Node(graph, 'softmax') - - self.assertEqual(len(node_softmax.in_nodes()), 1) - - node_input1 = node_softmax.in_node(0) - self.assertEqual(node_input1.name, 'node_1') - - def test_remove_softmax_activation_input(self): - graph = build_graph( - {'node_1': {'type': 'Identity', 'value': None, 'kind': 'op', 'op': 'Parameter'}, - 'softmax': {'type': 'SoftmaxActivation', 'value': None, 'kind': 'op', 'op': 'SoftmaxActivation'}, - }, - [('node_1', 'softmax')]) - - pattern = CheckSoftmaxNodeInputs() - pattern.find_and_replace_pattern(graph) - - node_softmax = Node(graph, 'softmax') - - self.assertEqual(len(node_softmax.in_nodes()), 1) - - node_input1 = node_softmax.in_node(0) - self.assertEqual(node_input1.name, 'node_1') diff --git a/tools/mo/unit_tests/mo/front/mxnet/conv_ext_test.py b/tools/mo/unit_tests/mo/front/mxnet/conv_ext_test.py deleted file mode 100644 index 7bd59dfca07a7b..00000000000000 --- a/tools/mo/unit_tests/mo/front/mxnet/conv_ext_test.py +++ /dev/null @@ -1,206 +0,0 @@ -# Copyright (C) 2018-2024 Intel Corporation -# SPDX-License-Identifier: Apache-2.0 - -import unittest - -import numpy as np - -from openvino.tools.mo.front.mxnet.conv_ext import DeconvFrontExtractor -from unit_tests.utils.extractors import PB - - -class TestDeconvShapesParsing(unittest.TestCase): - def test_conv_ext_ideal_numbers(self): - params = {'attrs': { - "kernel": "(4, 4)", - "no_bias": "True", - "num_filter": "21", - "num_group": "14", - "pad": "(4, 4)", - "stride": "(2, 2)", - "dilate": "(3, 3)", - "workspace": "1536" - }} - node = PB({'symbol_dict': params}) - DeconvFrontExtractor.extract(node) - exp_res = { - 'op': 'Deconvolution', - 'pad': np.array([[0, 0], [0, 0], [4, 4], [4, 4]]), - 'pad_spatial_shape': np.array([[4, 4], [4, 4]]), - 'stride': np.array([1, 1, 2, 2]), - 'kernel_spatial': np.array([4, 4]), - 'dilation': np.array([1, 1, 3, 3]), - 'group': 14, - 'output': 21, - 'bias_addable': True, - 'bias_term': False, - } - for key in exp_res.keys(): - if key in ('pad', 'pad_spatial_shape', 'stride', 'kernel_spatial', 'dilation'): - np.testing.assert_equal(node[key], exp_res[key]) - else: - self.assertEqual(node[key], exp_res[key]) - - - def test_conv_ext_no_bias(self): - params = { 'attrs':{ - "kernel": "(4, 4)", - "num_filter": "21", - "num_group": "14", - "pad": "(4, 4)", - "stride": "(2, 2)", - "dilate": "(3, 3)", - "workspace": "1536" - }} - node = PB({'symbol_dict': params}) - DeconvFrontExtractor.extract(node) - exp_res = { - 'op': 'Deconvolution', - 'pad': np.array([[0, 0], [0, 0], [4, 4], [4, 4]]), - 'pad_spatial_shape': np.array([[4, 4], [4, 4]]), - 'stride': np.array([1, 1, 2, 2]), - 'kernel_spatial': np.array([4, 4]), - 'dilation': np.array([1, 1, 3, 3]), - 'group': 14, - 'output': 21, - 'bias_addable': True, - 'bias_term': False, - } - for key in exp_res.keys(): - if key in ('pad', 'pad_spatial_shape', 'stride', 'kernel_spatial', 'dilation'): - np.testing.assert_equal(node[key], exp_res[key]) - else: - self.assertEqual(node[key], exp_res[key]) - - - def test_conv_ext_with_bias(self): - params = { 'attrs':{ - "kernel": "(4, 4)", - "no_bias": "False", - "num_filter": "21", - "num_group": "14", - "pad": "(4, 4)", - "stride": "(2, 2)", - "dilate": "(3, 3)", - "workspace": "1536" - }} - node = PB({'symbol_dict': params}) - DeconvFrontExtractor.extract(node) - exp_res = { - 'op': 'Deconvolution', - 'pad': np.array([[0, 0], [0, 0], [4, 4], [4, 4]]), - 'pad_spatial_shape': np.array([[4, 4], [4, 4]]), - 'stride': np.array([1, 1, 2, 2]), - 'kernel_spatial': np.array([4, 4]), - 'dilation': np.array([1, 1, 3, 3]), - 'group': 14, - 'output': 21, - 'bias_addable': True, - 'bias_term': True, - } - for key in exp_res.keys(): - if key in ('pad', 'pad_spatial_shape', 'stride', 'kernel_spatial', 'dilation'): - np.testing.assert_equal(node[key], exp_res[key]) - else: - self.assertEqual(node[key], exp_res[key]) - - - def test_deconv_ext_target_shape(self): - params = {'attrs': { - "kernel": "(4, 4)", - "no_bias": "True", - "num_filter": "21", - "num_group": "14", - "pad": "(4, 4)", - "stride": "(2, 2)", - "dilate": "(3, 3)", - "workspace": "1536", - "target_shape": "(120, 120)" - }} - node = PB({'symbol_dict': params}) - DeconvFrontExtractor.extract(node) - exp_res = { - 'op': 'Deconvolution', - 'pad': np.array([[0, 0], [0, 0], [4, 4], [4, 4]]), - 'pad_spatial_shape': np.array([[4, 4], [4, 4]]), - 'stride': np.array([1, 1, 2, 2]), - 'kernel_spatial': np.array([4, 4]), - 'dilation': np.array([1, 1, 3, 3]), - 'group': 14, - 'output': 21, - 'bias_addable': True, - 'bias_term': False, - 'output_spatial_shape': np.array([120, 120]), - } - for key in exp_res.keys(): - if key in ('pad', 'pad_spatial_shape', 'stride', 'kernel_spatial', 'dilation', 'output_spatial_shape'): - np.testing.assert_equal(node[key], exp_res[key]) - else: - self.assertEqual(node[key], exp_res[key]) - - def test_deconv_ext_output_pad(self): - params = {'attrs': { - "kernel": "(4, 4)", - "no_bias": "True", - "num_filter": "21", - "num_group": "14", - "pad": "(4, 4)", - "stride": "(2, 2)", - "dilate": "(3, 3)", - "workspace": "1536", - "adj": "(1, 1)" - }} - node = PB({'symbol_dict': params}) - DeconvFrontExtractor.extract(node) - exp_res = { - 'op': 'Deconvolution', - 'pad': np.array([[0, 0], [0, 0], [4, 4], [4, 4]]), - 'pad_spatial_shape': np.array([[4, 4], [4, 4]]), - 'stride': np.array([1, 1, 2, 2]), - 'kernel_spatial': np.array([4, 4]), - 'dilation': np.array([1, 1, 3, 3]), - 'group': 14, - 'output': 21, - 'bias_addable': True, - 'bias_term': False, - 'output_padding': np.array([0, 0, 1, 1]), - } - for key in exp_res.keys(): - if key in ('pad', 'pad_spatial_shape', 'stride', 'kernel_spatial', 'dilation', 'output_spatial_shape', 'output_padding'): - np.testing.assert_equal(node[key], exp_res[key]) - else: - self.assertEqual(node[key], exp_res[key]) - - def test_deconv_ext_target_shape_with_output_pad(self): - params = {'attrs': { - "kernel": "(4, 4)", - "no_bias": "True", - "num_filter": "21", - "num_group": "14", - "pad": "(4, 4)", - "stride": "(2, 2)", - "dilate": "(3, 3)", - "workspace": "1536", - "target_shape": "(120, 120)", - "adj": "(1, 1)" - }} - node = PB({'symbol_dict': params}) - DeconvFrontExtractor.extract(node) - exp_res = { - 'op': 'Deconvolution', - 'pad': np.array([[0, 0], [0, 0], [4, 4], [4, 4]]), - 'pad_spatial_shape': np.array([[4, 4], [4, 4]]), - 'stride': np.array([1, 1, 2, 2]), - 'kernel_spatial': np.array([4, 4]), - 'dilation': np.array([1, 1, 3, 3]), - 'group': 14, - 'output': 21, - 'bias_addable': True, - 'bias_term': False, - 'output_spatial_shape': np.array([120, 120]), - } - for key in exp_res.keys(): - if key in ('pad', 'pad_spatial_shape', 'stride', 'kernel_spatial', 'dilation', 'output_spatial_shape'): - np.testing.assert_equal(node[key], exp_res[key]) - else: - self.assertEqual(node[key], exp_res[key]) diff --git a/tools/mo/unit_tests/mo/front/mxnet/custom_test.py b/tools/mo/unit_tests/mo/front/mxnet/custom_test.py deleted file mode 100644 index 86063846156d04..00000000000000 --- a/tools/mo/unit_tests/mo/front/mxnet/custom_test.py +++ /dev/null @@ -1,44 +0,0 @@ -# Copyright (C) 2018-2024 Intel Corporation -# SPDX-License-Identifier: Apache-2.0 - -import unittest - -from openvino.tools.mo.front.mxnet.custom import CustomFrontExtractorOp -from openvino.tools.mo.front.extractor import FrontExtractorOp, MXNetCustomFrontExtractorOp -from openvino.tools.mo.graph.graph import Node -from unit_tests.utils.graph import build_graph - -attrs = {'test_attr': 1} - - -class FakeExtractor(MXNetCustomFrontExtractorOp): - @classmethod - def extract(cls, node: Node): - return True, attrs - - -class TestCustomFrontExtractorOp(unittest.TestCase): - @classmethod - def setUpClass(cls): - FrontExtractorOp.registered_ops['Custom'] = CustomFrontExtractorOp - - def test_extract_custom_layer(self): - graph = build_graph( - {'node_1': {'type': 'Identity', 'value': None, 'kind': 'op', 'op': 'Parameter'}, - 'node_2': {'type': 'Identity', 'value': None, 'kind': 'op'}, - 'node_custom': {'type': 'Custom', 'value': None, 'kind': 'op', 'op': 'Custom', }, - 'node_3': {'type': 'Identity', 'value': None, 'kind': 'op'}, - }, - [('node_1', 'node_2'), - ('node_2', 'node_custom'), - ('node_custom', 'node_3'), - ], - { - 'node_custom': {'symbol_dict': {'attrs': {'op_type': 'test_type'}}}, - }) - - custom_node = Node(graph, 'node_custom') - custom_op = FakeExtractor() - supported, op_attrs = custom_op.extract(custom_node) - self.assertTrue(supported) - self.assertEqual(op_attrs, attrs) diff --git a/tools/mo/unit_tests/mo/front/mxnet/div_sqrt_dim_test.py b/tools/mo/unit_tests/mo/front/mxnet/div_sqrt_dim_test.py deleted file mode 100644 index 92bfb114e0ad57..00000000000000 --- a/tools/mo/unit_tests/mo/front/mxnet/div_sqrt_dim_test.py +++ /dev/null @@ -1,56 +0,0 @@ -# Copyright (C) 2018-2024 Intel Corporation -# SPDX-License-Identifier: Apache-2.0 - -import unittest - -import numpy as np - -from openvino.tools.mo.front.common.partial_infer.utils import int64_array -from openvino.tools.mo.front.mxnet.div_sqrt_dim import DivSqrtDim -from openvino.tools.mo.utils.ir_engine.compare_graphs import compare_graphs -from unit_tests.utils.graph import build_graph, shaped_parameter, regular_op_with_empty_data, result, connect, \ - shaped_const_with_data, connect_data, connect_front - - -class DivSqrtDimTest(unittest.TestCase): - - def test_1(self): - graph = build_graph( - nodes_attrs={ - **shaped_parameter('input', int64_array([1, 3, 15, 15])), - **regular_op_with_empty_data('div_sqrt_dim', {'op': '_contrib_div_sqrt_dim'}), - **result('result') - }, - edges=[ - *connect('input', 'div_sqrt_dim'), - *connect('div_sqrt_dim', 'result') - ] - ) - - ref_graph = build_graph( - nodes_attrs={ - **shaped_parameter('input', int64_array([1, 3, 15, 15])), - **regular_op_with_empty_data('div_sqrt_shape_of', {'op': 'ShapeOf', 'type': 'ShapeOf'}), - **shaped_const_with_data('gather_axis', None), - **shaped_const_with_data('gather_indices', None), - **regular_op_with_empty_data('gather', {'op': 'Gather', 'type': 'Gather'}), - **regular_op_with_empty_data('power', {'op': 'AttributedPower', 'power': 0.5, 'type': 'Power'}), - **regular_op_with_empty_data('cast', {'op': 'Cast', 'type': 'Convert', 'dst_type': np.float32}), - **regular_op_with_empty_data('div', {'op': 'Div', 'type': 'Divide'}), - **result('result') - }, - edges=[ - *connect('input', '0:div'), - *connect_data('input', 'div_sqrt_shape_of'), - *connect('div_sqrt_shape_of', '0:gather'), - *connect('gather_axis', '1:gather'), - *connect('gather_indices', '2:gather'), - *connect('gather', 'cast'), - *connect('cast', 'power'), - *connect('power', '1:div'), - *connect('div', 'result') - ], - ) - DivSqrtDim().find_and_replace_pattern(graph) - flag, resp = compare_graphs(graph, ref_graph, 'result', 'result', check_op_attrs=True) - self.assertTrue(flag, resp) diff --git a/tools/mo/unit_tests/mo/front/mxnet/extractors/__init__.py b/tools/mo/unit_tests/mo/front/mxnet/extractors/__init__.py deleted file mode 100644 index e69de29bb2d1d6..00000000000000 diff --git a/tools/mo/unit_tests/mo/front/mxnet/extractors/multibox_prior_test.py b/tools/mo/unit_tests/mo/front/mxnet/extractors/multibox_prior_test.py deleted file mode 100644 index 892abc261f1d8d..00000000000000 --- a/tools/mo/unit_tests/mo/front/mxnet/extractors/multibox_prior_test.py +++ /dev/null @@ -1,38 +0,0 @@ -# Copyright (C) 2018-2024 Intel Corporation -# SPDX-License-Identifier: Apache-2.0 - -import unittest - -import numpy as np - -from openvino.tools.mo.front.mxnet.extractors.multibox_prior import multi_box_prior_ext -from openvino.tools.mo.front.mxnet.extractors.utils import AttrDictionary - - -class TestMultiBoxPrior_Parsing(unittest.TestCase): - def test_multi_box_prior_check_attrs(self): - attrs = { - 'ratios': '(1,2,0.5)', - 'steps': '(0.02666666666666667, 0.02666666666666667)', - 'clip': 'False', - 'sizes': '(0.1,0.141)' - } - - res = multi_box_prior_ext(AttrDictionary(attrs)) - exp_attrs = { - 'type': 'PriorBox', - 'step': 0.02666666666666667, - 'offset': 0.5, - 'variance': '0.100000,0.100000,0.200000,0.200000', - 'flip': 0, - 'clip': 0, - 'min_size': [0.1, 0.141], - 'max_size': '', - 'aspect_ratio': [1, 2, 0.5], - } - - for key in exp_attrs.keys(): - if key in ['aspect_ratio', 'variance']: - np.testing.assert_equal(res[key], exp_attrs[key]) - else: - self.assertEqual(res[key], exp_attrs[key]) diff --git a/tools/mo/unit_tests/mo/front/mxnet/extractors/relu_test.py b/tools/mo/unit_tests/mo/front/mxnet/extractors/relu_test.py deleted file mode 100644 index 97bbfefe87f9f0..00000000000000 --- a/tools/mo/unit_tests/mo/front/mxnet/extractors/relu_test.py +++ /dev/null @@ -1,30 +0,0 @@ -# Copyright (C) 2018-2024 Intel Corporation -# SPDX-License-Identifier: Apache-2.0 - -import unittest - -from openvino.tools.mo.front.mxnet.extractors.relu import ReLUFrontExtractor -from openvino.tools.mo.graph.graph import Node -from unit_tests.utils.graph import build_graph - - -class TestReluFrontExtractorOp(unittest.TestCase): - def test_extract_relu_layer(self): - graph = build_graph( - {'node_1': {'type': 'Parameter', 'kind': 'op', 'op': 'Parameter'}, - 'relu_node': {'type': 'relu', 'kind': 'op', 'op': 'relu', }, - 'node_2': {'type': 'Parameter', 'kind': 'op'}, - }, - [ - ('node_1', 'relu_node'), - ('relu_node', 'node_2'), - ], - { - 'relu_node': {'symbol_dict': {'attrs': {}}}, - }) - - relu_node = Node(graph, 'relu_node') - relu_extr_op = ReLUFrontExtractor() - supported = relu_extr_op.extract(relu_node) - self.assertTrue(supported) - self.assertEqual(relu_node['op'], 'ReLU') diff --git a/tools/mo/unit_tests/mo/front/mxnet/extractors/slice_axis_test.py b/tools/mo/unit_tests/mo/front/mxnet/extractors/slice_axis_test.py deleted file mode 100644 index 6dc2d14e9f5c40..00000000000000 --- a/tools/mo/unit_tests/mo/front/mxnet/extractors/slice_axis_test.py +++ /dev/null @@ -1,65 +0,0 @@ -# Copyright (C) 2018-2024 Intel Corporation -# SPDX-License-Identifier: Apache-2.0 - -import unittest - -import numpy as np - -from openvino.tools.mo.front.mxnet.extractors.slice_axis import mxnet_slice_axis_infer -from openvino.tools.mo.front.mxnet.extractors.slice_axis import slice_axis_ext -from openvino.tools.mo.front.mxnet.extractors.utils import AttrDictionary -from openvino.tools.mo.graph.graph import Node -from unit_tests.utils.graph import build_graph - - -class TestMXNetSliceAxisExtractorOp(unittest.TestCase): - def test_extract_slice_axis_layer(self): - graph = build_graph( - {'node_1': {'type': 'Identity', 'value': None, 'kind': 'op', 'op': 'Parameter'}, - 'slice_axis_node': {'type': 'sigmoid', 'kind': 'op', 'op': 'slice_axis', }, - 'node_3': {'type': 'Identity', 'value': None, 'kind': 'op'}, - }, - [ - ('node_1', 'slice_axis_node'), - ('slice_axis_node', 'node_3'), - ], - { - 'slice_axis_node': {'symbol_dict': {'attrs': {'axis': 0, 'begin': 10, 'end': 25}}}, - }) - - exp_attrs = { - 'op': 'Crop', - 'axis': 0, - 'offset': 10, - 'dim': 25 - } - - slice_axis_node = Node(graph, 'slice_axis_node') - res = slice_axis_ext(AttrDictionary(slice_axis_node['symbol_dict']['attrs'])) - - for key in exp_attrs.keys(): - self.assertEqual(res[key], exp_attrs[key]) - - -class TestMXNetSliceAxisInfer(unittest.TestCase): - def test_slice_axis_infer_layer(self): - graph = build_graph( - {'node_1': {'name': 'data', 'type': 'Identity', 'value': None, 'kind': 'op', 'op': 'Parameter'}, - 'slice_axis_node': {'name': 'slice_axis_node', 'type': 'sigmoid', 'value': None, - 'kind': 'op', 'op': 'slice_axis', }, - 'node_3': {'name': 'node_3', 'type': 'Identity', 'value': None, 'kind': 'op'}, - }, - [ - ('node_1', 'slice_axis_node'), - ('slice_axis_node', 'node_3'), - ], - { - 'node_1': {'shape': np.array([1, 1024, 19, 19])}, - 'slice_axis_node': {'axis': 1, 'offset': 10, 'dim': 25}, - }) - - slice_axis_node = Node(graph, 'slice_axis_node') - mxnet_slice_axis_infer(slice_axis_node) - res_shape = [1, 15, 19, 19] - for i in range(0, len(graph.node['node_3']['shape'])): - self.assertEqual(graph.node['node_3']['shape'][i], res_shape[i]) diff --git a/tools/mo/unit_tests/mo/front/mxnet/extractors/utils_test.py b/tools/mo/unit_tests/mo/front/mxnet/extractors/utils_test.py deleted file mode 100644 index b2d19f8d273430..00000000000000 --- a/tools/mo/unit_tests/mo/front/mxnet/extractors/utils_test.py +++ /dev/null @@ -1,198 +0,0 @@ -# Copyright (C) 2018-2024 Intel Corporation -# SPDX-License-Identifier: Apache-2.0 - -import unittest -from unittest.mock import patch - -import mxnet as mx - -from openvino.tools.mo.front.mxnet.extractors.utils import AttrDictionary -from openvino.tools.mo.front.mxnet.extractors.utils import load_params - - -class TestAttrDictionary(unittest.TestCase): - def testBool(self): - attrs = { - "global_pool": "True" - } - - attr_dict = AttrDictionary(attrs) - global_pool = attr_dict.bool("global_pool", False) - self.assertEqual(True, global_pool) - - def testBoolAsDigits(self): - attrs = { - "global_pool": "1" - } - - attr_dict = AttrDictionary(attrs) - global_pool = attr_dict.bool("global_pool", False) - self.assertEqual(True, global_pool) - - def testBoolWithoutAttr(self): - attrs = { - "something": "1" - } - - attr_dict = AttrDictionary(attrs) - global_pool = attr_dict.bool("global_pool", False) - self.assertEqual(False, global_pool) - - def testStrAttr(self): - attrs = { - "something": "Val" - } - - attr_dict = AttrDictionary(attrs) - attr = attr_dict.str("something", "Text") - self.assertEqual("Val", attr) - - def testStrAttrWithoutAttr(self): - attrs = { - "something2": "Val" - } - - attr_dict = AttrDictionary(attrs) - attr = attr_dict.str("something", "Text") - self.assertEqual("Text", attr) - - def testFloatAttr(self): - attrs = { - "something": "0.5" - } - - attr_dict = AttrDictionary(attrs) - attr = attr_dict.float("something", 0.1) - self.assertEqual(0.5, attr) - - def testFloatWithoutAttr(self): - attrs = { - "something2": "0.5" - } - - attr_dict = AttrDictionary(attrs) - attr = attr_dict.float("something", 0.1) - self.assertEqual(0.1, attr) - - def testIntAttr(self): - attrs = { - "something": "5" - } - - attr_dict = AttrDictionary(attrs) - attr = attr_dict.float("something", 1) - self.assertEqual(5, attr) - - def testIntWithoutAttr(self): - attrs = { - "something2": "5" - } - - attr_dict = AttrDictionary(attrs) - attr = attr_dict.float("something", 1) - self.assertEqual(1, attr) - - def testTupleAttr(self): - attrs = { - "something": "(5,6,7)" - } - - attr_dict = AttrDictionary(attrs) - a, b, c = attr_dict.tuple("something", int, (1, 2, 3)) - self.assertEqual(5, a) - self.assertEqual(6, b) - self.assertEqual(7, c) - - def testTupleWithoutAttr(self): - attrs = { - "something2": "(5,6,7)" - } - - attr_dict = AttrDictionary(attrs) - a, b, c = attr_dict.tuple("something", int, (1, 2, 3)) - self.assertEqual(1, a) - self.assertEqual(2, b) - self.assertEqual(3, c) - - def testTupleWithEmptyTupleAttr(self): - attrs = { - "something2": "()" - } - - attr_dict = AttrDictionary(attrs) - a, b = attr_dict.tuple("something", int, (2, 3)) - self.assertEqual(2, a) - self.assertEqual(3, b) - - def testTupleWithEmptyListAttr(self): - attrs = { - "something2": "[]" - } - - attr_dict = AttrDictionary(attrs) - a, b = attr_dict.tuple("something", int, (2, 3)) - self.assertEqual(2, a) - self.assertEqual(3, b) - - def testListAttr(self): - attrs = { - "something": "5,6,7" - } - - attr_dict = AttrDictionary(attrs) - l = attr_dict.list("something", int, [1, 2, 3]) - self.assertEqual(5, l[0]) - self.assertEqual(6, l[1]) - self.assertEqual(7, l[2]) - - def testListWithoutAttr(self): - attrs = { - "something2": "5,6,7" - } - - attr_dict = AttrDictionary(attrs) - l = attr_dict.list("something", int, [1, 2, 3]) - self.assertEqual(1, l[0]) - self.assertEqual(2, l[1]) - self.assertEqual(3, l[2]) - - def testIntWithAttrNone(self): - attrs = { - "something": "None" - } - - attr_dict = AttrDictionary(attrs) - attr = attr_dict.int("something", None) - self.assertEqual(None, attr) - - -class TestUtils(unittest.TestCase): - @patch('mxnet.nd.load') - def test_load_symbol_nodes_from_params(self, mock_nd_load): - mock_nd_load.return_value = {'arg:conv0_weight': mx.nd.array([1, 2], dtype='float32'), - 'arg:conv1_weight': mx.nd.array([2, 3], dtype='float32'), - 'aux:bn_data_mean': mx.nd.array([5, 6], dtype='float32')} - model_params = load_params("model.params") - self.assertTrue('conv0_weight' in model_params._param_names) - self.assertTrue('conv1_weight' in model_params._param_names) - self.assertTrue('bn_data_mean' in model_params._aux_names) - self.assertEqual([1., 2.], model_params._arg_params['conv0_weight'].asnumpy().tolist()) - self.assertEqual([2., 3.], model_params._arg_params['conv1_weight'].asnumpy().tolist()) - self.assertEqual([5., 6.], model_params._aux_params['bn_data_mean'].asnumpy().tolist()) - - @patch('mxnet.nd.load') - def test_load_symbol_nodes_from_args_nd(self, mock_nd_load): - mock_nd_load.return_value = {'conv0_weight': mx.nd.array([1, 2], dtype='float32'), - 'conv1_weight': mx.nd.array([2, 3], dtype='float32')} - model_params = load_params("args_model.nd", data_names=('data1', 'data2')) - self.assertTrue('conv0_weight' in model_params._param_names) - self.assertTrue('conv1_weight' in model_params._param_names) - self.assertEqual([1., 2.], model_params._arg_params['conv0_weight'].asnumpy().tolist()) - self.assertEqual([2., 3.], model_params._arg_params['conv1_weight'].asnumpy().tolist()) - - @patch('mxnet.nd.load') - def test_load_symbol_nodes_from_auxs_nd(self, mock_nd_load): - mock_nd_load.return_value = {'bn_data_mean': mx.nd.array([5, 6], dtype='float32')} - model_params = load_params("auxs_model.nd") - self.assertTrue('bn_data_mean' in model_params._aux_names) - self.assertEqual([5., 6.], model_params._aux_params['bn_data_mean'].asnumpy().tolist()) diff --git a/tools/mo/unit_tests/mo/front/mxnet/gather_test.py b/tools/mo/unit_tests/mo/front/mxnet/gather_test.py deleted file mode 100644 index 6a0ce4a8971906..00000000000000 --- a/tools/mo/unit_tests/mo/front/mxnet/gather_test.py +++ /dev/null @@ -1,58 +0,0 @@ -# Copyright (C) 2018-2024 Intel Corporation -# SPDX-License-Identifier: Apache-2.0 - -import unittest - -import numpy as np - -from openvino.tools.mo.front.mxnet.gather import GatherFrontReplacer -from openvino.tools.mo.utils.ir_engine.compare_graphs import compare_graphs -from unit_tests.utils.graph import build_graph - - -class GatherTest(unittest.TestCase): - def test_embedding_replace1(self): - graph = build_graph( - {'placeholder_1': {'shape': None, 'type': 'Parameter', 'kind': 'op', 'op': 'Parameter'}, - 'embedding_const': {'value': None, 'shape': None, 'kind': 'op', 'op': 'Const'}, - 'embedding': {'type': None, 'kind': 'op', 'op': 'Embedding'}, - 'last': {'type': None, 'kind': 'op', 'op': None}, - }, - [ - ('placeholder_1', 'embedding', {'out': 0, 'in': 0}), - ('embedding_const', 'embedding', {'out': 0, 'in': 1}), - ('embedding', 'last') - ], - { - 'placeholder_1': {'shape': np.array([32, 35])}, - 'embedding_const': {'shape': np.array([2000, 650]), - 'bias': np.array(np.random.randint(0, 225, (2000, 650)))}, - }, nodes_with_edges_only=True) - - graph_ref = build_graph( - {'placeholder_1': {'shape': None, 'type': 'Parameter', 'kind': 'op', 'op': 'Parameter'}, - 'embedding_const': {'value': None, 'kind': 'op', 'op': 'Const'}, - 'axis_const': {'value': 0, 'kind': 'op', 'data_type': None, - 'type': 'Const', 'op': 'Const'}, - 'embedding': {'kind': 'op', 'op': 'Gather'}, - 'last': {'type': None, 'kind': 'op', 'op': None}, - }, - [ - ('embedding_const', 'embedding', {'in': 1}), - ('axis_const', 'embedding', {'in': 2}), - ('placeholder_1', 'embedding', {'in': 0}), - ('embedding', 'last') - ], - {'placeholder_1': {'shape': np.array([32, 35])}, - 'embedding_const': {'shape': np.array([2000, 650]), - 'bias': np.array(np.random.randint(0, 225, (2000, 650)))}, - }, nodes_with_edges_only=True) - - graph.graph['layout'] = 'NCHW' - graph.stage = 'front' - - replacer = GatherFrontReplacer() - replacer.find_and_replace_pattern(graph) - - (flag, resp) = compare_graphs(graph, graph_ref, 'last') - self.assertTrue(flag, resp) diff --git a/tools/mo/unit_tests/mo/front/mxnet/gluoncv_ssd_anchors_test.py b/tools/mo/unit_tests/mo/front/mxnet/gluoncv_ssd_anchors_test.py deleted file mode 100644 index 04862581703883..00000000000000 --- a/tools/mo/unit_tests/mo/front/mxnet/gluoncv_ssd_anchors_test.py +++ /dev/null @@ -1,118 +0,0 @@ -# Copyright (C) 2018-2024 Intel Corporation -# SPDX-License-Identifier: Apache-2.0 - -import numpy as np -import unittest - -from openvino.tools.mo.front.mxnet.gluoncv_ssd_anchors import SsdAnchorsReplacer -from openvino.tools.mo.front.common.partial_infer.utils import int64_array -from openvino.tools.mo.utils.ir_engine.compare_graphs import compare_graphs -from unit_tests.utils.graph import build_graph - -nodes_attributes = { - 'slice_like': {'kind': 'op', 'op': 'slice_like'}, - 'model_reshape0': {'kind': 'op', 'op': 'Reshape'}, - 'model_reshape0_const': {'kind': 'op', 'op': 'Const', 'value': int64_array([1, -1, 4])}, - 'model_reshape1': {'kind': 'op', 'op': 'Reshape'}, - 'model_reshape1_const': {'kind': 'op', 'op': 'Const', 'value': int64_array([1, -1, 4])}, - 'model_reshape2': {'kind': 'op', 'op': 'Reshape'}, - 'model_reshape2_const': {'kind': 'op', 'op': 'Const', 'value': int64_array([1, -1])}, - 'reshape0': {'kind': 'op', 'op': 'Reshape'}, - 'reshape0_const': {'kind': 'op', 'op': 'Const', 'value': int64_array([1, -1])}, - 'concat': {'kind': 'op', 'op': 'Concat'}, - 'reshape1': {'kind': 'op', 'op': 'Reshape'}, - 'reshape1_const': {'kind': 'op', 'op': 'Const', 'value': int64_array([1, 2, -1])}, - 'split': {'kind': 'op', 'op': 'Split', 'num_splits': 2}, - 'split_const': {'kind': 'op', 'op': 'Const', 'value': int64_array(1)}, - 'reshape2': {'kind': 'op', 'op': 'Reshape'}, - 'reshape2_const': {'kind': 'op', 'op': 'Const', 'value': int64_array([-1, 4])}, - 'value': {'kind': 'op', 'op': 'Split', 'num_splits': 4}, - 'value_const': {'kind': 'op', 'op': 'Const', 'value': int64_array(1)}, - 'div_1': {'kind': 'op', 'op': 'Div'}, - 'div_1_const': {'kind': 'op', 'op': 'Const', 'value': np.array([2], dtype=np.float32)}, - 'div_2': {'kind': 'op', 'op': 'Div'}, - 'div_2_const': {'kind': 'op', 'op': 'Const', 'value': np.array([2], dtype=np.float32)}, - 'xmin': {'kind': 'op', 'op': 'Sub'}, - 'ymin': {'kind': 'op', 'op': 'Sub'}, - 'xmax': {'kind': 'op', 'op': 'Add'}, - 'ymax': {'kind': 'op', 'op': 'Add'}, - 'concat_value': {'kind': 'op', 'op': 'Concat', 'axis': 1}, - 'reshape3': {'kind': 'op', 'op': 'Reshape'}, - 'reshape3_const': {'kind': 'op', 'op': 'Const', 'value': int64_array([1, 1, -1])}, - 'end_concat': {'kind': 'op', 'op': 'Concat'}, - 'detection_output': {'kind': 'op', 'op': 'DetectionOutput'} -} - - -class SsdAnchorsReplacerTest(unittest.TestCase): - - def test_replacer(self): - graph = build_graph( - nodes_attrs=nodes_attributes, - edges=[ - ('slice_like', 'model_reshape0', {'in': 0}), - ('model_reshape0_const', 'model_reshape0', {'in': 1}), - ('model_reshape0', 'model_reshape1', {'in': 0}), - ('model_reshape1_const', 'model_reshape1', {'in': 1}), - ('model_reshape1', 'model_reshape2', {'in': 0}), - ('model_reshape2_const', 'model_reshape2', {'in': 1}), - ('model_reshape2', 'reshape0', {'in': 0}), - ('reshape0_const', 'reshape0', {'in': 1}), - ('reshape0', 'concat'), - ('concat', 'detection_output', {'in': 2}) - ], - nodes_with_edges_only=True - ) - - ref_graph = build_graph( - nodes_attrs=nodes_attributes, - edges=[ - ('slice_like', 'model_reshape0', {'in': 0}), - ('model_reshape0_const', 'model_reshape0', {'in': 1}), - ('model_reshape0', 'model_reshape1', {'in': 0}), - ('model_reshape1_const', 'model_reshape1', {'in': 1}), - ('model_reshape1', 'model_reshape2', {'in': 0}), - ('model_reshape2_const', 'model_reshape2', {'in': 1}), - ('model_reshape2', 'reshape0', {'in': 0}), - ('reshape0_const', 'reshape0', {'in': 1}), - ('reshape0', 'concat'), - ('concat', 'reshape1', {'in': 0}), - ('reshape1_const', 'reshape1', {'in': 1}), - ('reshape1', 'split', {'in': 0}), - ('split_const', 'split', {'in': 1}), - ('split', 'reshape2', {'out': 0, 'in': 0}), - ('reshape2_const', 'reshape2', {'in': 1}), - ('reshape2', 'value', {'in': 0}), - ('value_const', 'value', {'in': 1}), - ('value', 'xmin', {'out': 0, 'in': 0}), - ('value', 'ymin', {'out': 1, 'in': 0}), - ('value', 'xmax', {'out': 0, 'in': 1}), - ('value', 'ymax', {'out': 1, 'in': 1}), - ('value', 'div_1', {'out': 2, 'in': 0}), - ('value', 'div_2', {'out': 3, 'in': 0}), - ('div_1_const', 'div_1', {'in': 1}), - ('div_2_const', 'div_2', {'in': 1}), - ('div_1', 'xmin', {'in': 1, 'out': 0}), - ('div_1', 'xmax', {'in': 0, 'out': 0}), - ('div_2', 'ymin', {'in': 1, 'out': 0}), - ('div_2', 'ymax', {'in': 0, 'out': 0}), - ('xmin', 'concat_value', {'in': 0}), - ('ymin', 'concat_value', {'in': 1}), - ('xmax', 'concat_value', {'in': 2}), - ('ymax', 'concat_value', {'in': 3}), - ('concat_value', 'reshape3', {'in': 0}), - ('reshape3_const', 'reshape3', {'in': 1}), - ('reshape3', 'end_concat', {'in': 0}), - ('split', 'end_concat', {'in': 1}), - ('end_concat', 'detection_output', {'in': 2}) - ], - update_attributes={ - 'concat': {'axis': 1} - }, - nodes_with_edges_only=True - ) - graph.stage = 'front' - graph.graph['cmd_params'].data_type = 'FP32' - SsdAnchorsReplacer().find_and_replace_pattern(graph) - flag, resp = compare_graphs(graph, ref_graph, 'detection_output', check_op_attrs=True) - self.assertTrue(flag, resp) diff --git a/tools/mo/unit_tests/mo/front/mxnet/leaky_relu_test.py b/tools/mo/unit_tests/mo/front/mxnet/leaky_relu_test.py deleted file mode 100644 index cd313c884e7d72..00000000000000 --- a/tools/mo/unit_tests/mo/front/mxnet/leaky_relu_test.py +++ /dev/null @@ -1,75 +0,0 @@ -# Copyright (C) 2018-2024 Intel Corporation -# SPDX-License-Identifier: Apache-2.0 - -import unittest - -import numpy as np - -from openvino.tools.mo.front.mxnet.leaky_relu import LeakyReLUFrontExtractor -from openvino.tools.mo.graph.graph import Node -from unit_tests.utils.graph import build_graph - - -class TestLeakyReLUFrontExtractorOp(unittest.TestCase): - def test_extract_leaky_relu_layer(self): - graph = build_graph( - {'node_1': {'type': 'Parameter', 'kind': 'op', 'op': 'Parameter'}, - 'act_node': {'type': 'LeakyReLU', 'kind': 'op', 'op': 'LeakyReLU', }, - 'node_2': {'type': 'Identity', 'kind': 'op'}, - }, - [ - ('node_1', 'act_node'), - ('act_node', 'node_2'), - ], - { - 'act_node': {'symbol_dict': {'attrs': {'slope': '0.6'}}}, - }) - - act_node = Node(graph, 'act_node') - act_extr_op = LeakyReLUFrontExtractor() - supported = act_extr_op.extract(act_node) - self.assertTrue(supported) - self.assertEqual(act_node['op'], 'LeakyReLU') - self.assertEqual(act_node['negative_slope'], 0.6) - - def test_extract_prelu_layer(self): - graph = build_graph( - {'node_1': {'type': 'Parameter', 'kind': 'op', 'op': 'Parameter'}, - 'node_3': {'type': 'Parameter', 'kind': 'op', 'op': 'Parameter'}, - 'act_node': {'type': 'LeakyReLU', 'kind': 'op', 'op': 'LeakyReLU', }, - 'node_2': {'type': 'Identity', 'kind': 'op'}, - }, - [ - ('node_1', 'act_node'), - ('node_3', 'act_node'), - ('act_node', 'node_2'), - ], - { - 'act_node': {'symbol_dict': {'attrs': {'act_type': 'prelu'}}}, - 'node_3': {'value': np.array([1], dtype=np.float32)}, - }) - act_node = Node(graph, 'act_node') - act_extr_op = LeakyReLUFrontExtractor() - supported = act_extr_op.extract(act_node) - self.assertTrue(supported) - self.assertEqual(act_node['op'], 'PReLU') - - def test_extract_elu_layer(self): - graph = build_graph( - {'node_1': {'type': 'Parameter', 'kind': 'op', 'op': 'Parameter'}, - 'act_node': {'type': 'LeakyReLU', 'kind': 'op', 'op': 'LeakyReLU', }, - 'node_2': {'type': 'Parameter', 'kind': 'op'}, - }, - [ - ('node_1', 'act_node'), - ('act_node', 'node_2'), - ], - { - 'act_node': {'symbol_dict': {'attrs': {'act_type': 'elu'}}}, - }) - - act_node = Node(graph, 'act_node') - act_extr_op = LeakyReLUFrontExtractor() - supported = act_extr_op.extract(act_node) - self.assertTrue(supported) - self.assertEqual(act_node['op'], 'Elu') diff --git a/tools/mo/unit_tests/mo/front/mxnet/loader_test.py b/tools/mo/unit_tests/mo/front/mxnet/loader_test.py deleted file mode 100644 index c384e360a3b5f6..00000000000000 --- a/tools/mo/unit_tests/mo/front/mxnet/loader_test.py +++ /dev/null @@ -1,78 +0,0 @@ -# Copyright (C) 2018-2024 Intel Corporation -# SPDX-License-Identifier: Apache-2.0 - -from unittest.mock import patch - -from openvino.tools.mo.front.mxnet.loader import load_symbol_nodes, parse_input_model -from unit_tests.mo.unit_test_with_mocked_telemetry import UnitTestWithMockedTelemetry - - -class MockSymbolLoadObj(): - def tojson(self): - pass - - -class TestLoader(UnitTestWithMockedTelemetry): - @patch('json.load') - @patch('json.loads') - @patch('os.path.isfile') - @patch('mxnet.symbol.load') - def test_load_symbol_nodes(self, mock_symbol_load, mock_isfile, mock_json_loads, mock_json_load): - mock_isfile.return_value = True - mock_json_load.return_value = {'nodes': ''} - mock_json_loads.return_value = {'nodes': {'node1': 1}} - mock_symbol_load_obj = MockSymbolLoadObj() - mock_symbol_load.return_value = mock_symbol_load_obj - with patch('openvino.tools.mo.front.mxnet.loader.open') as mock_open: - self.assertEqual({'node1': 1}, load_symbol_nodes("model_name", legacy_mxnet_model=True)) - - @patch('json.load') - @patch('json.loads') - @patch('os.path.isfile') - @patch('mxnet.symbol.load') - def test_load_symbol_with_custom_nodes(self, mock_symbol_load, mock_isfile, mock_json_loads, mock_json_load): - mock_isfile.return_value = True - mock_json_load.return_value = {'nodes': [{'op': 'custom_op'}, {'op': 'custom_op'}]} - mock_json_loads.return_value = {'nodes': {'node1': 1}} - mock_symbol_load_obj = MockSymbolLoadObj() - mock_symbol_load.return_value = mock_symbol_load_obj - with patch('openvino.tools.mo.front.mxnet.loader.open') as mock_open: - list_nodes = load_symbol_nodes("model_name", legacy_mxnet_model=False) - self.assertEqual(2, len(list_nodes)) - for node in list_nodes: - self.assertEqual({'op': 'custom_op'}, node) - - def test_parse_input_model(self): - input_model = '/model-optimizer-mxnet/data/nd/vgg19-0015.params' - model_name, iteration_number = parse_input_model(input_model) - self.assertEqual(model_name, '/model-optimizer-mxnet/data/nd/vgg19') - self.assertEqual(iteration_number, 15) - - - @patch('json.load') - @patch('json.loads') - @patch('os.path.isfile') - @patch('mxnet.symbol.load') - def test_load_symbol_nodes_with_json_and_lagacy_mode(self, mock_symbol_load, mock_isfile, mock_json_loads, mock_json_load): - mock_isfile.return_value = True - mock_json_load.return_value = {'nodes': ''} - mock_json_loads.return_value = {'nodes': {'node1': 1}} - mock_symbol_load_obj = MockSymbolLoadObj() - mock_symbol_load.return_value = mock_symbol_load_obj - with patch('openvino.tools.mo.front.mxnet.loader.open') as mock_open: - self.assertEqual({'node1': 1}, load_symbol_nodes("model_name", input_symbol="some-symbol.json", legacy_mxnet_model=True)) - - - @patch('json.load') - @patch('json.loads') - @patch('os.path.isfile') - @patch('mxnet.symbol.load') - def test_load_symbol_nodes_with_json(self, mock_symbol_load, mock_isfile, mock_json_loads, mock_json_load): - mock_isfile.return_value = True - #json.load - mock_json_load.return_value = {'nodes': {'node1': 1}} - mock_json_loads.return_value = {'nodes': ''} - mock_symbol_load_obj = MockSymbolLoadObj() - mock_symbol_load.return_value = mock_symbol_load_obj - with patch('openvino.tools.mo.front.mxnet.loader.open') as mock_open: - self.assertEqual({'node1': 1}, load_symbol_nodes("model_name", input_symbol="some-symbol.json", legacy_mxnet_model=False)) diff --git a/tools/mo/unit_tests/mo/front/mxnet/multibox_detection_test.py b/tools/mo/unit_tests/mo/front/mxnet/multibox_detection_test.py deleted file mode 100644 index 33dbf6f9a2f8ce..00000000000000 --- a/tools/mo/unit_tests/mo/front/mxnet/multibox_detection_test.py +++ /dev/null @@ -1,67 +0,0 @@ -# Copyright (C) 2018-2024 Intel Corporation -# SPDX-License-Identifier: Apache-2.0 - -import unittest - -from openvino.tools.mo.front.mxnet.multibox_detection_ext import MultiBoxDetectionOutputExtractor -from unit_tests.utils.extractors import PB - - -class TestMultiBoxDetection_Parsing(unittest.TestCase): - def test_multi_box_detection_check_attrs(self): - params = {'attrs': { - "force_suppress": "True", - "nms_threshold": "0.4", - "nms_topk": "400", - "variances": "(0.1, 0.1, 0.2, 0.2)" - }} - - node = PB({'symbol_dict': params}) - MultiBoxDetectionOutputExtractor.extract(node) - - exp_attrs = { - 'type': 'DetectionOutput', - 'keep_top_k': 400, - 'variance_encoded_in_target': 0, - 'code_type': "caffe.PriorBoxParameter.CENTER_SIZE", - 'share_location': 1, - 'confidence_threshold': 0.01, - 'background_label_id': 0, - 'nms_threshold': 0.4, - 'top_k': 400, - 'decrease_label_id': 1, - 'clip_before_nms': 1, - 'normalized': 1, - } - - for key in exp_attrs.keys(): - self.assertEqual(node[key], exp_attrs[key]) - - def test_multi_box_detection_check_attrs_without_top_k(self): - params = {'attrs': { - "force_suppress": "True", - "nms_threshold": "0.2", - "threshold": "0.02", - "variances": "(0.1, 0.1, 0.2, 0.2)" - }} - - node = PB({'symbol_dict': params}) - MultiBoxDetectionOutputExtractor.extract(node) - - exp_attrs = { - 'type': 'DetectionOutput', - 'keep_top_k': -1, - 'variance_encoded_in_target': 0, - 'code_type': "caffe.PriorBoxParameter.CENTER_SIZE", - 'share_location': 1, - 'confidence_threshold': 0.02, - 'background_label_id': 0, - 'nms_threshold': 0.2, - 'top_k': -1, - 'decrease_label_id': 1, - 'clip_before_nms': 1, - 'normalized': 1, - } - - for key in exp_attrs.keys(): - self.assertEqual(node[key], exp_attrs[key]) diff --git a/tools/mo/unit_tests/mo/front/mxnet/mx_reshape_reverse_test.py b/tools/mo/unit_tests/mo/front/mxnet/mx_reshape_reverse_test.py deleted file mode 100644 index 6c10829bd51e61..00000000000000 --- a/tools/mo/unit_tests/mo/front/mxnet/mx_reshape_reverse_test.py +++ /dev/null @@ -1,109 +0,0 @@ -# Copyright (C) 2018-2024 Intel Corporation -# SPDX-License-Identifier: Apache-2.0 - -import unittest - -from openvino.tools.mo.front.mxnet.mx_reshape_reverse import MXReshapeReverse -from openvino.tools.mo.front.common.partial_infer.utils import int64_array -from unit_tests.utils.graph import build_graph - - -class TestMXReshapeReverseTest(unittest.TestCase): - nodes_attributes = { - 'node_1': {'shape': int64_array([1, 2, 3, 4]), 'type': 'Parameter', 'kind': 'op', 'op': 'Parameter'}, - - 'shape_node': {'kind': 'op', 'op': 'ShapeOf', 'type': 'ShapeOf'}, - 'forward_reverse_unsqueeze_dims_node': {'kind': 'op', 'op': 'Const', 'type': 'Const', 'value': int64_array([0]), 'shape': int64_array([1])}, - 'forward_reverse_unsqueeze_node': {'kind': 'op', 'op': 'Unsqueeze', 'type': 'Unsqueeze'}, - 'forward_reverse_node': {'kind': 'op', 'op': 'Reverse', 'type': 'Reverse'}, - 'forward_reverse_squeeze_dims_node': {'kind': 'op', 'op': 'Const', 'type': 'Const', 'value': int64_array([0]), 'shape': int64_array([1])}, - 'forward_reverse_squeeze_node': {'kind': 'op', 'op': 'Squeeze', 'type': 'Squeeze'}, - 'reshape_node': {'kind': 'op', 'op': 'Reshape', 'type': 'Reshape'}, - 'reshape_shape_dim_node': {'kind': 'op', 'op': 'Const', 'type': 'Const', 'value': int64_array([3,2,1]), 'shape': int64_array([3])}, - 'reshape_shape_node': {'kind': 'op', 'op': 'Reshape', 'type': 'Reshape'}, - 'backward_shape_node': {'kind': 'op', 'op': 'ShapeOf', 'type': 'ShapeOf'}, - 'backward_reverse_unsqueeze_dims_node': {'kind': 'op', 'op': 'Const', 'type': 'Const', 'value': int64_array([0]), 'shape': int64_array([1])}, - 'backward_reverse_unsqueeze_node': {'kind': 'op', 'op': 'Unsqueeze', 'type': 'Unsqueeze'}, - 'backward_reverse_node': {'kind': 'op', 'op': 'Reverse', 'type': 'Reverse'}, - 'backward_reverse_squeeze_dims_node': {'kind': 'op', 'op': 'Const', 'type': 'Const', 'value': int64_array([0]), 'shape': int64_array([1])}, - 'backward_reverse_squeeze_node': {'kind': 'op', 'op': 'Squeeze', 'type': 'Squeeze'}, - 'last_reshape_node': {'kind': 'op', 'op': 'Reshape', 'type': 'Reshape'}, - 'last': {'type': None, 'value': None, 'kind': 'op', 'op': 'Result'}, - } - - def test_mx_reshape_reverse(self): - graph = build_graph({'node_1': {'shape': int64_array([1, 2, 3, 4]), 'type': 'Parameter', 'kind': 'op', 'op': 'Parameter'}, - 'reshape': {'kind': 'op', 'op': 'MXReshape', 'dim': int64_array([1,2,3]), 'reverse': True}, - 'last': {'type': None, 'value': None, 'kind': 'op', 'op': 'Result'}, - }, - [('node_1', 'reshape', {'in': 0}), - ('reshape', 'last', {'in': 0}), ], - nodes_with_edges_only=True - ) - - graph.stage = 'front' - pattern = MXReshapeReverse() - pattern.find_and_replace_pattern(graph) - graph.clean_up() - - graph_ref = build_graph(self.nodes_attributes, - [('node_1', 'shape_node', {'in': 0, 'out': 0}), - ('node_1', 'reshape_node', {'in': 0, 'out': 0}), - ('shape_node', 'forward_reverse_unsqueeze_node', {'in': 0, 'out': 0}), - ('forward_reverse_unsqueeze_dims_node', 'forward_reverse_unsqueeze_node', {'in': 1, 'out': 0}), - ('forward_reverse_unsqueeze_node', 'forward_reverse_node', {'in': 0, 'out': 0}), - ('forward_reverse_node', 'forward_reverse_squeeze_node', {'in': 0, 'out': 0}), - ('forward_reverse_squeeze_dims_node', 'forward_reverse_squeeze_node', {'in': 1, 'out': 0}), - ('forward_reverse_squeeze_node', 'reshape_node', {'in': 1, 'out': 0}), - ('reshape_node', 'reshape_shape_node', {'in': 0, 'out': 0}), - ('reshape_shape_dim_node', 'reshape_shape_node', {'in': 1, 'out': 0}), - - ('reshape_shape_node', 'backward_shape_node', {'in': 0, 'out': 0}), - ('backward_shape_node', 'backward_reverse_unsqueeze_node', {'in': 0, 'out': 0}), - ('backward_reverse_unsqueeze_dims_node', 'backward_reverse_unsqueeze_node', {'in': 1, 'out': 0}), - ('backward_reverse_unsqueeze_node', 'backward_reverse_node', {'in': 0, 'out': 0}), - ('backward_reverse_node', 'backward_reverse_squeeze_node', {'in': 0, 'out': 0}), - ('backward_reverse_squeeze_dims_node', 'backward_reverse_squeeze_node', {'in': 1, 'out': 0}), - - ('backward_reverse_squeeze_node', 'last_reshape_node', {'in': 1, 'out': 0}), - ('reshape_shape_node', 'last_reshape_node', {'in': 0, 'out': 0}), - ('last_reshape_node', 'last', {'in': 0, 'out': 0}), - ]) - graph_ref.clean_up() - - #Cannot use compare_graphs func. The outputs for some nodes not sorted. - - ref_nodes = graph_ref.get_op_nodes() - nodes = graph.get_op_nodes() - self.assertTrue(len(nodes) == len(ref_nodes)) - shapeof_count = 0 - ref_shapeof_count = 0 - reshape_count = 0 - ref_reshape_count = 0 - reverse_count = 0 - ref_reverse_count = 0 - - for rnode in ref_nodes: - if rnode['name'] == 'last': - last_ref_node = rnode - if rnode['op'] == 'ShapeOf': - ref_shapeof_count = ref_shapeof_count + 1 - if rnode['op'] == 'Reshape': - ref_reshape_count = ref_reshape_count + 1 - if rnode['op'] == 'Reverse': - ref_reverse_count = ref_reverse_count + 1 - - for node in nodes: - if node['name'] == 'last': - last_node = node - if node['op'] == 'ShapeOf': - shapeof_count = shapeof_count + 1 - if node['op'] == 'Reshape': - reshape_count = reshape_count + 1 - if node['op'] == 'Reverse': - reverse_count = reverse_count + 1 - - self.assertTrue(shapeof_count == ref_shapeof_count) - self.assertTrue(reshape_count == ref_reshape_count) - self.assertTrue(reverse_count == ref_reverse_count) - self.assertTrue(last_ref_node.op == last_node.op) diff --git a/tools/mo/unit_tests/mo/front/mxnet/mx_reshape_to_reshape_test.py b/tools/mo/unit_tests/mo/front/mxnet/mx_reshape_to_reshape_test.py deleted file mode 100644 index e2322b51b85891..00000000000000 --- a/tools/mo/unit_tests/mo/front/mxnet/mx_reshape_to_reshape_test.py +++ /dev/null @@ -1,105 +0,0 @@ -# Copyright (C) 2018-2024 Intel Corporation -# SPDX-License-Identifier: Apache-2.0 - -import unittest - -from openvino.tools.mo.front.mxnet.mx_reshape_to_reshape import MXReshapeToReshape -from openvino.tools.mo.front.common.partial_infer.utils import int64_array -from unit_tests.utils.graph import build_graph - - -class TestMXReshapeToReshape(unittest.TestCase): - def test_minus2(self): - graph = build_graph({'node_1': {'shape': int64_array([1, 2, 3, 4]), 'type': 'Parameter', 'kind': 'op', 'op': 'Parameter'}, - 'reshape': {'kind': 'op', 'op': 'MXReshape', 'dim': int64_array([1, 2, -2]), 'reverse': False}, - 'last': {'type': None, 'value': None, 'kind': 'op', 'op': 'Result'}, - }, - [('node_1', 'reshape', {'in': 0}), - ('reshape', 'last', {'in': 0}), ], - nodes_with_edges_only=True - ) - - graph.stage = 'front' - pattern = MXReshapeToReshape() - pattern.find_and_replace_pattern(graph) - graph.clean_up() - reshape_count = 0 - concat_count = 0 - mxreshape_count = 0 - - nodes = graph.get_op_nodes() - for node in nodes: - if node['op'] == 'Reshape': - reshape_count = reshape_count + 1 - elif node['op'] == 'MXReshape': - mxreshape_count = mxreshape_count + 1 - elif node['op'] == 'Concat': - concat_count = concat_count + 1 - - self.assertTrue(reshape_count == 1) - self.assertTrue(concat_count == 1) - self.assertTrue(mxreshape_count == 0) - - - def test_minus3(self): - graph = build_graph({'node_1': {'shape': int64_array([1, 2, 3, 4]), 'type': 'Parameter', 'kind': 'op', 'op': 'Parameter'}, - 'reshape': {'kind': 'op', 'op': 'MXReshape', 'dim': int64_array([1, -3, 4]), 'reverse': False}, - 'last': {'type': None, 'value': None, 'kind': 'op', 'op': 'Result'}, - }, - [('node_1', 'reshape', {'in': 0}), - ('reshape', 'last', {'in': 0}), ], - nodes_with_edges_only=True - ) - - graph.stage = 'front' - pattern = MXReshapeToReshape() - pattern.find_and_replace_pattern(graph) - graph.clean_up() - reshape_count = 0 - concat_count = 0 - mxreshape_count = 0 - - nodes = graph.get_op_nodes() - for node in nodes: - if node['op'] == 'Reshape': - reshape_count = reshape_count + 1 - elif node['op'] == 'MXReshape': - mxreshape_count = mxreshape_count + 1 - elif node['op'] == 'Concat': - concat_count = concat_count + 1 - - self.assertTrue(reshape_count == 1) - self.assertTrue(concat_count == 1) - self.assertTrue(mxreshape_count == 0) - - - def test_minus4(self): - graph = build_graph({'node_1': {'shape': int64_array([1, 6]), 'type': 'Parameter', 'kind': 'op', 'op': 'Parameter'}, - 'reshape': {'kind': 'op', 'op': 'MXReshape', 'dim': int64_array([1, -4, 2, 3, 1]), 'reverse': False}, - 'last': {'type': None, 'value': None, 'kind': 'op', 'op': 'Result'}, - }, - [('node_1', 'reshape', {'in': 0}), - ('reshape', 'last', {'in': 0}), ], - nodes_with_edges_only=True - ) - - graph.stage = 'front' - pattern = MXReshapeToReshape() - pattern.find_and_replace_pattern(graph) - graph.clean_up() - reshape_count = 0 - concat_count = 0 - mxreshape_count = 0 - - nodes = graph.get_op_nodes() - for node in nodes: - if node['op'] == 'Reshape': - reshape_count = reshape_count + 1 - elif node['op'] == 'MXReshape': - mxreshape_count = mxreshape_count + 1 - elif node['op'] == 'Concat': - concat_count = concat_count + 1 - - self.assertTrue(reshape_count == 1) - self.assertTrue(concat_count == 1) - self.assertTrue(mxreshape_count == 0) diff --git a/tools/mo/unit_tests/mo/front/mxnet/pooling_ext_test.py b/tools/mo/unit_tests/mo/front/mxnet/pooling_ext_test.py deleted file mode 100644 index 77ee7a73fc6d40..00000000000000 --- a/tools/mo/unit_tests/mo/front/mxnet/pooling_ext_test.py +++ /dev/null @@ -1,37 +0,0 @@ -# Copyright (C) 2018-2024 Intel Corporation -# SPDX-License-Identifier: Apache-2.0 - -import unittest - -import numpy as np - -from openvino.tools.mo.front.mxnet.pooling_ext import PoolingFrontExtractor -from unit_tests.utils.extractors import PB - - -class TestPoolingShapesParsing(unittest.TestCase): - def test_conv_ext_ideal_numbers(self): - params = {'attrs': { - "kernel": "(3, 4)", - "stride": "(3, 2)", - "pad": "(7, 8)", - "pool_type": "max" - }} - - node = PB({'symbol_dict': params}) - PoolingFrontExtractor.extract(node) - exp_res = { - 'op': 'Pooling', - 'pad': np.array([[0, 0], [0, 0], [7, 7], [8, 8]]), - 'pad_spatial_shape': np.array([[7, 7], [8, 8]]), - 'stride': np.array([1, 1, 3, 2]), - 'window': np.array([1, 1, 3, 4]), - 'pool_method': 'max', - 'exclude_pad': False, - } - - for key in exp_res.keys(): - if key in ('pad', 'stride', 'window', 'pad_spatial_shape'): - np.testing.assert_equal(node[key], exp_res[key]) - else: - self.assertEqual(node[key], exp_res[key]) diff --git a/tools/mo/unit_tests/mo/front/mxnet/sigmoid_test.py b/tools/mo/unit_tests/mo/front/mxnet/sigmoid_test.py deleted file mode 100644 index d45eb185d154ac..00000000000000 --- a/tools/mo/unit_tests/mo/front/mxnet/sigmoid_test.py +++ /dev/null @@ -1,30 +0,0 @@ -# Copyright (C) 2018-2024 Intel Corporation -# SPDX-License-Identifier: Apache-2.0 - -import unittest - -from openvino.tools.mo.front.mxnet.sigmoid import SigmoidFrontExtractor -from openvino.tools.mo.graph.graph import Node -from unit_tests.utils.graph import build_graph - - -class TestSigmoidFrontExtractorOp(unittest.TestCase): - def test_extract_sigmoid_layer(self): - graph = build_graph( - {'node_1': {'type': 'Parameter', 'kind': 'op', 'op': 'Parameter'}, - 'sigmoid_node': {'type': 'sigmoid', 'kind': 'op', 'op': 'sigmoid', }, - 'node_3': {'type': 'Identity', 'value': None, 'kind': 'op'}, - }, - [ - ('node_1', 'sigmoid_node'), - ('sigmoid_node', 'node_3'), - ], - { - 'sigmoid_node': {'symbol_dict': {'attrs': {}}}, - }) - - sigmoid_node = Node(graph, 'sigmoid_node') - sigmoid_extr_op = SigmoidFrontExtractor - supported = sigmoid_extr_op.extract(sigmoid_node) - self.assertTrue(supported) - self.assertEqual(sigmoid_node['op'], 'Sigmoid') diff --git a/tools/mo/unit_tests/mo/front/mxnet/ssd_pattern_flatten_softmax_activation_test.py b/tools/mo/unit_tests/mo/front/mxnet/ssd_pattern_flatten_softmax_activation_test.py deleted file mode 100644 index 25f1e5063920ac..00000000000000 --- a/tools/mo/unit_tests/mo/front/mxnet/ssd_pattern_flatten_softmax_activation_test.py +++ /dev/null @@ -1,32 +0,0 @@ -# Copyright (C) 2018-2024 Intel Corporation -# SPDX-License-Identifier: Apache-2.0 - -import unittest - -from openvino.tools.mo.front.mxnet.ssd_pattern_flatten_softmax_activation import SsdPatternFlattenSoftmaxActivation -from openvino.tools.mo.graph.graph import Node -from unit_tests.utils.graph import build_graph - - -class TestSsdPatternFlattenSoftmaxActivation(unittest.TestCase): - def test_pattern_remove_transpose(self): - graph = build_graph({'node_1': {'type': 'Identity', 'kind': 'op', 'op': 'Parameter'}, - 'node_2': {'type': 'Identity', 'kind': 'op'}, - 'node_3': {'type': 'Identity', 'kind': 'op'}, - 'node_softmax_activation': {'type': 'SoftMax', 'kind': 'op', 'op': 'SoftMax'}, - 'node_multi_box_detection': {'type': '_contrib_MultiBoxDetection', 'kind': 'op', - 'op': '_contrib_MultiBoxDetection'}, - 'node_4': {'type': 'Identity', 'kind': 'op'}, - }, - [('node_1', 'node_softmax_activation'), - ('node_2', 'node_multi_box_detection'), - ('node_softmax_activation', 'node_multi_box_detection'), - ('node_3', 'node_multi_box_detection'), - ('node_multi_box_detection', 'node_4'), ], - ) - - pattern = SsdPatternFlattenSoftmaxActivation() - pattern.find_and_replace_pattern(graph) - flatten_name = list(graph.nodes())[-1] - self.assertTrue(graph.has_node(flatten_name)) - self.assertFalse(graph.has_edge(Node(graph, 'node_softmax_activation').id, Node(graph, 'node_multi_box_detection').id)) diff --git a/tools/mo/unit_tests/mo/front/mxnet/ssd_pattern_remove_flatten_test.py b/tools/mo/unit_tests/mo/front/mxnet/ssd_pattern_remove_flatten_test.py deleted file mode 100644 index 1512f71eabffb2..00000000000000 --- a/tools/mo/unit_tests/mo/front/mxnet/ssd_pattern_remove_flatten_test.py +++ /dev/null @@ -1,29 +0,0 @@ -# Copyright (C) 2018-2024 Intel Corporation -# SPDX-License-Identifier: Apache-2.0 - -import unittest - -from openvino.tools.mo.front.mxnet.ssd_pattern_remove_flatten import SsdPatternRemoveFlatten -from openvino.tools.mo.graph.graph import Node -from unit_tests.utils.graph import build_graph - - -class TestSsdPatternRemoveFlatten(unittest.TestCase): - def test_pattern_remove_transpose(self): - graph = build_graph({'node_1': {'type': 'Identity', 'kind': 'op', 'op': 'Parameter'}, - 'node_2': {'type': 'Identity', 'kind': 'op'}, - 'node_multi_box_prior': {'type': '_contrib_MultiBoxPrior', 'kind': 'op', - 'op': '_contrib_MultiBoxPrior'}, - 'node_flatten': {'type': 'Flatten', 'kind': 'op', 'op': 'Flatten'}, - 'node_3': {'type': 'Identity', 'kind': 'op'}, - }, - [('node_1', 'node_2'), - ('node_2', 'node_multi_box_prior'), - ('node_multi_box_prior', 'node_flatten'), - ('node_flatten', 'node_3'), ], - ) - - pattern = SsdPatternRemoveFlatten() - pattern.find_and_replace_pattern(graph) - self.assertFalse(graph.has_node('node_flatten')) - self.assertTrue(graph.has_edge(Node(graph, 'node_multi_box_prior').id, Node(graph, 'node_3').id)) diff --git a/tools/mo/unit_tests/mo/front/mxnet/ssd_pattern_remove_reshape_test.py b/tools/mo/unit_tests/mo/front/mxnet/ssd_pattern_remove_reshape_test.py deleted file mode 100644 index fbc946e34c5afd..00000000000000 --- a/tools/mo/unit_tests/mo/front/mxnet/ssd_pattern_remove_reshape_test.py +++ /dev/null @@ -1,42 +0,0 @@ -# Copyright (C) 2018-2024 Intel Corporation -# SPDX-License-Identifier: Apache-2.0 - -import unittest - -from openvino.tools.mo.front.mxnet.ssd_pattern_remove_reshape import SsdPatternRemoveReshape -from openvino.tools.mo.graph.graph import Node -from unit_tests.utils.graph import build_graph - - -class TestSsdPatternRemoveReshape(unittest.TestCase): - def test_pattern_remove_reshape(self): - graph = build_graph({'node_1': {'type': 'Identity', 'kind': 'op', 'op': 'Parameter'}, - 'node_2': {'type': 'Identity', 'kind': 'op'}, - 'node_multi_box_prior1': {'type': '_contrib_MultiBoxPrior', 'kind': 'op', - 'op': '_contrib_MultiBoxPrior'}, - 'node_multi_box_prior2': {'type': '_contrib_MultiBoxPrior', 'kind': 'op', - 'op': '_contrib_MultiBoxPrior'}, - 'node_multi_box_prior3': {'type': '_contrib_MultiBoxPrior', 'kind': 'op', - 'op': '_contrib_MultiBoxPrior'}, - 'node_concat': {'type': 'Concat', 'kind': 'op', 'op': 'Concat'}, - 'node_reshape': {'type': 'Reshape', 'kind': 'op', 'op': 'Reshape'}, - 'node_3': {'type': 'Identity', 'kind': 'op'}, - }, - [('node_1', 'node_2'), - ('node_2', 'node_multi_box_prior1'), - ('node_2', 'node_multi_box_prior2'), - ('node_2', 'node_multi_box_prior3'), - ('node_multi_box_prior1', 'node_concat'), - ('node_multi_box_prior2', 'node_concat'), - ('node_multi_box_prior3', 'node_concat'), - ('node_concat', 'node_reshape'), - ('node_reshape', 'node_3'), ], - { - 'node_concat': {'symbol_dict': {'attrs': {'dim': 3}}}, - }) - graph.stage = 'front' - SsdPatternRemoveReshape().find_and_replace_pattern(graph) - node_concat = Node(graph, 'node_concat') - self.assertEqual(node_concat['symbol_dict']['attrs']['dim'], 2) - self.assertFalse(graph.has_node('node_reshape')) - self.assertTrue(graph.has_edge(Node(graph, 'node_concat').id, Node(graph, 'node_3').id)) diff --git a/tools/mo/unit_tests/mo/front/mxnet/ssd_pattern_remove_transpose_test.py b/tools/mo/unit_tests/mo/front/mxnet/ssd_pattern_remove_transpose_test.py deleted file mode 100644 index 1010d089a7360f..00000000000000 --- a/tools/mo/unit_tests/mo/front/mxnet/ssd_pattern_remove_transpose_test.py +++ /dev/null @@ -1,34 +0,0 @@ -# Copyright (C) 2018-2024 Intel Corporation -# SPDX-License-Identifier: Apache-2.0 - -import unittest - -from openvino.tools.mo.front.mxnet.ssd_pattern_remove_transpose import SsdPatternRemoveTranspose -from openvino.tools.mo.graph.graph import Node -from unit_tests.utils.graph import build_graph - - -class TestSsdPatternRemoveTranspose(unittest.TestCase): - def test_pattern_remove_transpose(self): - graph = build_graph({'node_1': {'type': 'Identity', 'value': None, 'kind': 'op', 'op': 'Parameter'}, - 'node_3': {'type': 'Identity', 'value': None, 'kind': 'op'}, - 'node_4': {'type': 'Identity', 'value': None, 'kind': 'op'}, - 'node_transpose': {'type': 'transpose', 'value': None, 'kind': 'op', 'op': 'Transpose'}, - 'node_softmax_activation': {'type': 'SoftMax', 'value': None, 'kind': 'op', - 'op': 'SoftMax'}, - 'node_multi_box_detection': {'type': '_contrib_MultiBoxDetection', 'value': None, - 'kind': 'op', 'op': '_contrib_MultiBoxDetection'}, - 'node_5': {'type': 'Identity', 'value': None, 'kind': 'op'}, - }, - [('node_1', 'node_transpose'), - ('node_transpose', 'node_softmax_activation'), - ('node_3', 'node_multi_box_detection'), - ('node_softmax_activation', 'node_multi_box_detection'), - ('node_4', 'node_multi_box_detection'), - ('node_multi_box_detection', 'node_5'), ], - ) - - pattern = SsdPatternRemoveTranspose() - pattern.find_and_replace_pattern(graph) - self.assertFalse(graph.has_node('node_transpose')) - self.assertTrue(graph.has_edge(Node(graph, 'node_1').id, Node(graph, 'node_softmax_activation').id)) diff --git a/tools/mo/unit_tests/mo/front/mxnet/ssd_reorder_detection_out_inputs_test.py b/tools/mo/unit_tests/mo/front/mxnet/ssd_reorder_detection_out_inputs_test.py deleted file mode 100644 index dabf570981daef..00000000000000 --- a/tools/mo/unit_tests/mo/front/mxnet/ssd_reorder_detection_out_inputs_test.py +++ /dev/null @@ -1,41 +0,0 @@ -# Copyright (C) 2018-2024 Intel Corporation -# SPDX-License-Identifier: Apache-2.0 - -import unittest - -import numpy as np - -from openvino.tools.mo.front.mxnet.ssd_reorder_detection_out_inputs import SsdReorderDetectionOutInputs -from openvino.tools.mo.graph.graph import Node -from unit_tests.utils.graph import build_graph - - -class TestSsdReorderDetectionOutInputs(unittest.TestCase): - def test_reorder_detection_out_inputs(self): - graph = build_graph( - {'node_1': {'type': 'Identity', 'kind': 'op', 'op': 'Parameter'}, - 'node_2': {'type': 'Identity', 'kind': 'op', 'op': 'Parameter'}, - 'node_3': {'type': 'Identity', 'kind': 'op', 'op': 'Parameter'}, - 'multi_box_detection': {'type': '_contrib_MultiBoxDetection', 'kind': 'op', - 'op': '_contrib_MultiBoxDetection'}, - }, - [('node_1', 'multi_box_detection'), - ('node_2', 'multi_box_detection'), - ('node_3', 'multi_box_detection')], - { - 'node_1': {'shape': np.array([1, 34928])}, - 'node_2': {'shape': np.array([1, 183372])}, - 'node_3': {'shape': np.array([1, 2, 34928])}, - }) - - pattern = SsdReorderDetectionOutInputs() - pattern.find_and_replace_pattern(graph) - - node_multi_box = Node(graph, 'multi_box_detection') - - node_input1 = node_multi_box.in_node(0) - node_input2 = node_multi_box.in_node(1) - node_input3 = node_multi_box.in_node(2) - self.assertEqual(node_input1.name, 'node_2') - self.assertEqual(node_input2.name, 'node_1') - self.assertEqual(node_input3.name, 'node_3') diff --git a/tools/mo/unit_tests/mo/utils/cli_parser_test.py b/tools/mo/unit_tests/mo/utils/cli_parser_test.py index ff7a3c19c35bd5..7413845bd7ece7 100644 --- a/tools/mo/unit_tests/mo/utils/cli_parser_test.py +++ b/tools/mo/unit_tests/mo/utils/cli_parser_test.py @@ -2051,8 +2051,6 @@ def test_mo_convert_params_parsing(self): 'tensorflow_custom_operations_config_update', 'tensorflow_object_detection_api_pipeline_config', 'tensorboard_logdir', 'tensorflow_custom_layer_libraries'}, - 'MXNet-specific parameters:': {'input_symbol', 'nd_prefix_name', 'pretrained_model_name', 'save_params_from_nd', - 'legacy_mxnet_model', 'enable_ssd_gluoncv'}, 'Kaldi-specific parameters:': {'counts', 'remove_output_softmax', 'remove_memory'}, 'PaddlePaddle-specific parameters:': {'example_output'}, } diff --git a/tools/openvino_dev/CMakeLists.txt b/tools/openvino_dev/CMakeLists.txt index 0ffcf046979b81..d8488e97d1082e 100644 --- a/tools/openvino_dev/CMakeLists.txt +++ b/tools/openvino_dev/CMakeLists.txt @@ -37,9 +37,6 @@ ov_cpack_add_component(${OV_CPACK_COMP_OPENVINO_DEV_REQ_FILES} HIDDEN) set(REQUIREMENTS_IN "${CMAKE_CURRENT_SOURCE_DIR}/requirements_dev.txt.in") set(EXTRAS_LIST _ caffe kaldi onnx pytorch tensorflow tensorflow2) -if(NOT (APPLE AND AARCH64)) - list(APPEND EXTRAS_LIST mxnet) -endif() foreach(EXTRAS IN LISTS EXTRAS_LIST) if(EXTRAS STREQUAL "_")