From e6ab01c6819d552160df7d7b0f7fbaf370f3ac06 Mon Sep 17 00:00:00 2001 From: Vitaliy Urusovskij Date: Wed, 20 Dec 2023 01:03:33 +0400 Subject: [PATCH] Shared `onnx` tests to API2.0 (#21726) --- .../include/onnx/quantized_models_tests.hpp | 8 +++--- .../src/onnx/quantized_models_tests.cpp | 26 +++++++------------ 2 files changed, 14 insertions(+), 20 deletions(-) diff --git a/src/tests/functional/plugin/shared/include/onnx/quantized_models_tests.hpp b/src/tests/functional/plugin/shared/include/onnx/quantized_models_tests.hpp index 52be11b2cc7f79..32ca7447af893b 100644 --- a/src/tests/functional/plugin/shared/include/onnx/quantized_models_tests.hpp +++ b/src/tests/functional/plugin/shared/include/onnx/quantized_models_tests.hpp @@ -5,19 +5,19 @@ #pragma once #include -#include "shared_test_classes/base/layer_test_utils.hpp" +#include "shared_test_classes/base/ov_subgraph.hpp" namespace ONNXTestsDefinitions { class QuantizedModelsTests : public testing::WithParamInterface, - virtual public LayerTestsUtils::LayerTestsCommon { + virtual public ov::test::SubgraphBaseStaticTest { public: static std::string getTestCaseName(const testing::TestParamInfo& obj); protected: void SetUp() override; - using LayerInputTypes = std::unordered_map>; - void runModel(const char* model, const LayerInputTypes& expected_layer_input_types, float thr); + using LayerInputTypes = std::unordered_map>; + void run_model(const char* model, const LayerInputTypes& expected_layer_input_types, float thr); }; } // namespace ONNXTestsDefinitions diff --git a/src/tests/functional/plugin/shared/src/onnx/quantized_models_tests.cpp b/src/tests/functional/plugin/shared/src/onnx/quantized_models_tests.cpp index 761cb04d921fc5..bc4c02679070df 100644 --- a/src/tests/functional/plugin/shared/src/onnx/quantized_models_tests.cpp +++ b/src/tests/functional/plugin/shared/src/onnx/quantized_models_tests.cpp @@ -24,15 +24,13 @@ static std::string getModelFullPath(const char* path) { FileUtils::makePath(ov::test::utils::getExecutableDirectory(), TEST_MODELS), path); } -void QuantizedModelsTests::runModel(const char* model, const LayerInputTypes& expected_layer_input_types, float thr) { - threshold = thr; - auto ie = getCore(); - auto network = ie->ReadNetwork(getModelFullPath(model)); - function = network.getFunction(); - Run(); - auto runtime_function = executableNetwork.GetExecGraphInfo().getFunction(); +void QuantizedModelsTests::run_model(const char* model, const LayerInputTypes& expected_layer_input_types, float thr) { + abs_threshold = thr; + function = core->read_model(getModelFullPath(model)); + ov::test::SubgraphBaseStaticTest::run(); + auto runtime_model = compiledModel.get_runtime_model(); int ops_found = 0; - for (const auto& node : runtime_function->get_ordered_ops()) { + for (const auto& node : runtime_model->get_ordered_ops()) { const auto& name = node->get_friendly_name(); if (expected_layer_input_types.count(name)) { ops_found++; @@ -47,25 +45,21 @@ void QuantizedModelsTests::runModel(const char* model, const LayerInputTypes& ex } TEST_P(QuantizedModelsTests, MaxPoolQDQ) { - SKIP_IF_CURRENT_TEST_IS_DISABLED(); - runModel("max_pool_qdq.onnx", {{"890_original", {ngraph::element::u8}}}, 1e-5); + run_model("max_pool_qdq.onnx", {{"890_original", {ov::element::u8}}}, 1e-5); } TEST_P(QuantizedModelsTests, MaxPoolFQ) { - SKIP_IF_CURRENT_TEST_IS_DISABLED(); - runModel("max_pool_fq.onnx", {{"887_original", {ngraph::element::u8}}}, 1e-5); + run_model("max_pool_fq.onnx", {{"887_original", {ov::element::u8}}}, 1e-5); } TEST_P(QuantizedModelsTests, ConvolutionQDQ) { - SKIP_IF_CURRENT_TEST_IS_DISABLED(); // activations have type uint8 and weights int8 - runModel("convolution_qdq.onnx", {{"908_original", {ngraph::element::u8, ngraph::element::i8}}}, 1.5e-2); + run_model("convolution_qdq.onnx", {{"908_original", {ov::element::u8, ov::element::i8}}}, 1.5e-2); } TEST_P(QuantizedModelsTests, ConvolutionFQ) { - SKIP_IF_CURRENT_TEST_IS_DISABLED(); // activations have type uint8 and weights int8 - runModel("convolution_fq.onnx", {{"902_original", {ngraph::element::u8, ngraph::element::i8}}}, 1.5e-2); + run_model("convolution_fq.onnx", {{"902_original", {ov::element::u8, ov::element::i8}}}, 1.5e-2); } } // namespace ONNXTestsDefinitions