Skip to content

Commit

Permalink
Add OVMS calculators tests (#52)
Browse files Browse the repository at this point in the history
* Add inference calculator GetContract tests

To run tests:
bazelisk test --test_output=all
//mediapipe/calculators/ovms:openvinomodelserversessioncalculator_test
bazelisk test --test_output=all
//mediapipe/calculators/ovms:openvinoinferencecalculator_test
* Update OVMS in WORKSPACE

* Exclude sporadic from CI

This test hase failure rate ~40% locally
  • Loading branch information
atobiszei authored Nov 29, 2023
1 parent 786d590 commit bb27905
Show file tree
Hide file tree
Showing 14 changed files with 718 additions and 126 deletions.
3 changes: 2 additions & 1 deletion Makefile
Original file line number Diff line number Diff line change
Expand Up @@ -37,8 +37,9 @@ tests: run_unit_tests run_hello_world run_hello_ovms
run_hello_ovms:
docker run $(OVMS_MEDIA_DOCKER_IMAGE):$(OVMS_MEDIA_IMAGE_TAG) bazel-bin/mediapipe/examples/desktop/hello_ovms/hello_ovms | grep -q "Output tensor data: 9 - 11"

MEDIAPIPE_UNSTABLE_TESTS_REGEX="MuxInputStreamHandlerTest.RemovesUnusedDataStreamPackets"
run_unit_tests:
docker run -e http_proxy=$(HTTP_PROXY) -e https_proxy=$(HTTPS_PROXY) $(OVMS_MEDIA_DOCKER_IMAGE):$(OVMS_MEDIA_IMAGE_TAG) bazel test --define=MEDIAPIPE_DISABLE_GPU=1 //mediapipe/framework/...
docker run -e http_proxy=$(HTTP_PROXY) -e https_proxy=$(HTTPS_PROXY) $(OVMS_MEDIA_DOCKER_IMAGE):$(OVMS_MEDIA_IMAGE_TAG) bazel test --define=MEDIAPIPE_DISABLE_GPU=1 --test_output=streamed --test_filter="-${MEDIAPIPE_UNSTABLE_TESTS_REGEX}" //mediapipe/framework/...

run_hello_world:
docker run $(OVMS_MEDIA_DOCKER_IMAGE):$(OVMS_MEDIA_IMAGE_TAG) bazel-bin/mediapipe/examples/desktop/hello_world/hello_world
Expand Down
101 changes: 88 additions & 13 deletions mediapipe/calculators/ovms/BUILD
Original file line number Diff line number Diff line change
Expand Up @@ -17,38 +17,85 @@
licenses(["notice"])

package(default_visibility = ["//visibility:public"])

cc_library(
name = "ovms_calculator",
srcs = ["modelapiovmsadapter.cc",
"modelapiovmsadapter.hpp",
"openvinomodelserversessioncalculator.cc",
"openvinoinferencecalculator.cc"],
srcs = [
],
deps = [
"//mediapipe/calculators/ovms:openvinoinferencecalculator_cc_proto",
"//mediapipe/calculators/ovms:openvinomodelserversessioncalculator_cc_proto",
"//mediapipe/calculators/openvino:openvino_tensors_to_classification_calculator_cc_proto",
"//mediapipe/calculators/openvino:openvino_tensors_to_detections_calculator_cc_proto",
"//mediapipe/calculators/openvino:openvino_converter_calculator_cc_proto",
"//mediapipe/calculators/openvino:openvino_converter_calculator",
"//mediapipe/calculators/openvino:openvino_tensors_to_classification_calculator",
"//mediapipe/calculators/openvino:openvino_tensors_to_detections_calculator",
"//mediapipe/framework:calculator_framework",
"@org_tensorflow//tensorflow/core:framework",
"//mediapipe/framework/port:status",
"//mediapipe/framework/formats:tensor", # Tensor GetContract
":modelapiovmsadapter",
":openvinoinferencecalculator",
":openvinomodelserversessioncalculator",
],
copts = ["-Iexternal/ovms/src","-Isrc"],
linkopts = ["-Lmediapipe/"],
alwayslink = 1,
)

cc_library(
name = "modelapiovmsadapter",
srcs = [
"modelapiovmsadapter.cc"
],
hdrs = [
"modelapiovmsadapter.hpp"
],
deps = [
"//mediapipe/framework/port:logging", # TODO remove logs but need better error handling/reporting in Model API
"@ovms//src:ovms_header",
"@model_api//:model_api",
"@org_tensorflow//tensorflow/lite:framework_stable", # to use tflite
"@linux_openvino//:openvino",
],
copts = ["-Iexternal/ovms/src","-Isrc"],
)

cc_library(
name = "openvinoinferencecalculator",
srcs = [
"openvinoinferencecalculator.cc"
],
hdrs = [
"openvinoinferencecalculator.h"
],
deps = [
":modelapiovmsadapter",
":openvinoinferencecalculator_cc_proto",
"//mediapipe/framework:calculator_framework",
"//mediapipe/framework/formats:tensor", # Tensor GetContract
"@linux_openvino//:openvino",
"@org_tensorflow//tensorflow/core:framework",
"@org_tensorflow//tensorflow/lite/c:c_api",
],
copts = ["-Iexternal/ovms/src","-Isrc"],
linkopts = ["-Lmediapipe/"],
alwayslink = 1,
)

cc_library(
name = "openvinomodelserversessioncalculator",
srcs = [
"openvinomodelserversessioncalculator.cc"
],
hdrs = [
"openvinomodelserversessioncalculator.h"
],
deps = [
":modelapiovmsadapter",
":openvinomodelserversessioncalculator_cc_proto",
"//mediapipe/framework:calculator_framework",
"@linux_openvino//:openvino",
],
copts = ["-Iexternal/ovms/src","-Isrc"],
alwayslink = 1,
)

load("@mediapipe//mediapipe/framework/port:build_config.bzl", "mediapipe_cc_proto_library", "mediapipe_proto_library")

# ovms
mediapipe_proto_library(
name = "openvinoinferencecalculator_proto",
srcs = ["openvinoinferencecalculator.proto"],
Expand All @@ -67,3 +114,31 @@ mediapipe_proto_library(
"@mediapipe//mediapipe/framework:calculator_proto",
],
)

cc_test(
name = "openvinoinferencecalculator_test",
srcs = ["openvinoinferencecalculator_test.cc",
],
deps = [
":ovms_calculator",
"@ovms//src:ovms_header",
"@ovms//src:ovms_lib",
"//mediapipe/framework/port:gtest_main",
"//mediapipe/framework/port:parse_text_proto",
],
copts = ["-Iexternal/ovms/src","-Isrc"],
)

cc_test(
name = "openvinomodelserversessioncalculator_test",
srcs = ["openvinomodelserversessioncalculator_test.cc",
],
deps = [
":ovms_calculator",
"@ovms//src:ovms_header",
"@ovms//src:ovms_lib",
"//mediapipe/framework/port:gtest_main",
"//mediapipe/framework/port:parse_text_proto",
],
copts = ["-Iexternal/ovms/src","-Isrc"],
)
4 changes: 2 additions & 2 deletions mediapipe/calculators/ovms/modelapiovmsadapter.cc
Original file line number Diff line number Diff line change
Expand Up @@ -25,10 +25,10 @@

#include <openvino/openvino.hpp>

#include "ovms.h" // NOLINT
#pragma GCC diagnostic push
#pragma GCC diagnostic ignored "-Wdeprecated-declarations"
#include "mediapipe/framework/calculator_framework.h"
#include "mediapipe/framework/port/canonical_errors.h"
#include "mediapipe/framework/port/logging.h"
#pragma GCC diagnostic pop
// here we need to decide if we have several calculators (1 for OVMS repository, 1-N inside mediapipe)
// for the one inside OVMS repo it makes sense to reuse code from ovms lib
Expand Down
8 changes: 2 additions & 6 deletions mediapipe/calculators/ovms/modelapiovmsadapter.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -26,15 +26,11 @@
#include <adapters/inference_adapter.h> // TODO fix path model_api/model_api/cpp/adapters/include/adapters/inference_adapter.h
#include <openvino/openvino.hpp>

#include "ovms.h" // NOLINT
#pragma GCC diagnostic push
#pragma GCC diagnostic ignored "-Wdeprecated-declarations"
#include "mediapipe/framework/calculator_framework.h"
#include "mediapipe/framework/port/canonical_errors.h"
#pragma GCC diagnostic pop
// here we need to decide if we have several calculators (1 for OVMS repository, 1-N inside mediapipe)
// for the one inside OVMS repo it makes sense to reuse code from ovms lib

class OVMS_Server_;
typedef struct OVMS_Server_ OVMS_Server;
namespace mediapipe {
namespace ovms {

Expand Down
6 changes: 3 additions & 3 deletions mediapipe/calculators/ovms/openvinoinferencecalculator.cc
Original file line number Diff line number Diff line change
Expand Up @@ -338,6 +338,7 @@ class OpenVINOInferenceCalculator : public CalculatorBase {
LOG(INFO) << "OpenVINOInferenceCalculator GetContract start";
RET_CHECK(!cc->Inputs().GetTags().empty());
RET_CHECK(!cc->Outputs().GetTags().empty());
RET_CHECK(cc->InputSidePackets().HasTag(SESSION_TAG));
for (const std::string& tag : cc->Inputs().GetTags()) {
// could be replaced with absl::StartsWith when migrated to MP
if (startsWith(tag, OVTENSORS_TAG)) {
Expand Down Expand Up @@ -515,10 +516,10 @@ class OpenVINOInferenceCalculator : public CalculatorBase {
try {
output = session->infer(input);
} catch (const std::exception& e) {
LOG(INFO) << "Catched exception from session infer():" << e.what();
LOG(INFO) << "Caught exception from session infer():" << e.what();
RET_CHECK(false);
} catch (...) {
LOG(INFO) << "Catched unknown exception from session infer()";
LOG(INFO) << "Caught unknown exception from session infer()";
RET_CHECK(false);
}
auto outputsCount = output.size();
Expand Down Expand Up @@ -659,7 +660,6 @@ class OpenVINOInferenceCalculator : public CalculatorBase {
LOG(INFO) << "Failed to deserialize tensor error:" << e.what();
RET_CHECK(false);
}
LOG(INFO) << "OVMS calculator will process TfLite tensors";
}
LOG(INFO) << "OpenVINOInferenceCalculator process end";
return absl::OkStatus();
Expand Down
44 changes: 44 additions & 0 deletions mediapipe/calculators/ovms/openvinoinferencecalculator.h
Original file line number Diff line number Diff line change
@@ -0,0 +1,44 @@
#pragma once
//*****************************************************************************
// Copyright 2023 Intel Corporation
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//*****************************************************************************
#include <memory>
#include <unordered_map>

#pragma GCC diagnostic push
#pragma GCC diagnostic ignored "-Wdeprecated-declarations"
#include "mediapipe/framework/calculator_framework.h"
#include "mediapipe/framework/port/canonical_errors.h"
#pragma GCC diagnostic pop
#pragma GCC diagnostic push
#pragma GCC diagnostic ignored "-Wall"
#include "tensorflow/lite/interpreter.h"
#pragma GCC diagnostic pop
class InferenceAdapter;
namespace mediapipe {
class OpenVINOInferenceCalculator : public CalculatorBase {
std::shared_ptr<::InferenceAdapter> session{nullptr};
std::unordered_map<std::string, std::string> outputNameToTag;
std::vector<std::string> input_order_list;
std::vector<std::string> output_order_list;
std::unique_ptr<tflite::Interpreter> interpreter_ = absl::make_unique<tflite::Interpreter>();
bool initialized = false;
public:
static absl::Status GetContract(CalculatorContract* cc);
absl::Status Close(CalculatorContext* cc) override final;
absl::Status Open(CalculatorContext* cc) override final;
absl::Status Process(CalculatorContext* cc) override final;
};
} // namespace mediapipe
Loading

0 comments on commit bb27905

Please sign in to comment.