Skip to content

Commit

Permalink
Working log_level and validate (#69)
Browse files Browse the repository at this point in the history
* Working log_level and validate
  • Loading branch information
rasapala authored Mar 20, 2024
1 parent 60588c6 commit 0d5cbdb
Show file tree
Hide file tree
Showing 9 changed files with 260 additions and 4 deletions.
22 changes: 22 additions & 0 deletions docs/development.md
Original file line number Diff line number Diff line change
Expand Up @@ -88,3 +88,25 @@ and adding dump function in your specific calculator
dumpOvTensorInput(input,"input");
#endif
```
# Setting log level

The log level in mediapipe application can be set using GLOG_minloglevel env variable. 0 is the most verbose for debuging purposes and 3 is the least verbose with the major errors only.
That setting is applicable both for mediapipe framework logs and also for OpenVINO inference calculator.
OpenVINO inference calculator log level is mapped to the defined GLOG_minloglevel value.

Recommended development GLOG_minloglevel setting is `0` as it will print most detailed information and perform additional graph and OpenVINOInferenceCalculator calculator validation.
If you want to set the flag in the desktop examples applications you can do it by setting the GLOG_minloglevel environment variable.

| GLOG_minloglevel | MediaPipe log level | Model Server log_level |
:------------------|------------------| :--------------------: |
| 0 | INFO | TRACE |
| 1 | WARNING | DEBUG |
| 2 | ERROR | INFO |
| | ERROR | INFO |
| 3 | FATAL | ERROR |


Example usage:
```bash
GLOG_minloglevel=0 make run_object_detection
```
9 changes: 9 additions & 0 deletions mediapipe/calculators/ovms/BUILD
Original file line number Diff line number Diff line change
Expand Up @@ -64,6 +64,10 @@ cc_library(
hdrs = [
"openvinoinferenceutils.h"
],
deps = [
"@ovms//src:ovms_header",
],
copts = ["-Iexternal/ovms/src","-Isrc"],
alwayslink = 1,
)

Expand Down Expand Up @@ -94,6 +98,7 @@ cc_library(
":openvinoinferenceutils",
"//mediapipe/framework:calculator_framework",
],
copts = ["-Iexternal/ovms/src","-Isrc"],
alwayslink = 1,
)

Expand All @@ -114,6 +119,7 @@ cc_library(
"@linux_openvino//:openvino",
"@org_tensorflow//tensorflow/core:framework",
"@org_tensorflow//tensorflow/lite/c:c_api",
"@ovms//src:ovms_header",
],
copts = ["-Iexternal/ovms/src","-Isrc"],
alwayslink = 1,
Expand All @@ -129,9 +135,11 @@ cc_library(
],
deps = [
":modelapiovmsadapter",
":openvinoinferenceutils",
":openvinomodelserversessioncalculator_cc_proto",
"//mediapipe/framework:calculator_framework",
"@linux_openvino//:openvino",
"@ovms//src:ovms_header",
],
copts = ["-Iexternal/ovms/src","-Isrc"],
alwayslink = 1,
Expand Down Expand Up @@ -180,6 +188,7 @@ cc_test(
srcs = ["openvinomodelserversessioncalculator_test.cc",
],
deps = [
"openvinomodelserversessioncalculator",
":ovms_calculator",
"@ovms//src:ovms_header",
"@ovms//src:ovms_lib",
Expand Down
104 changes: 101 additions & 3 deletions mediapipe/calculators/ovms/openvinoinferencecalculator_test.cc
Original file line number Diff line number Diff line change
Expand Up @@ -14,12 +14,14 @@
// limitations under the License.
//*****************************************************************************
#include "openvinoinferencecalculator.h"
#include "openvinomodelserversessioncalculator.h"
#include <algorithm>
#include <iostream>
#include <memory>
#include <chrono>
#include <thread>
#include <sstream>
#include <stdlib.h>
#include <unordered_map>

#include <adapters/inference_adapter.h>
Expand Down Expand Up @@ -56,11 +58,16 @@ PacketType OVTENSORS_TYPE;
PacketType MPTENSOR_TYPE;
PacketType MPTENSORS_TYPE;
public:
const std::string ovmsLogLevelEnv = "GLOG_minloglevel";
void SetUp() override {
OVTENSOR_TYPE.Set<ov::Tensor>();
OVTENSORS_TYPE.Set<std::vector<ov::Tensor>>();
MPTENSOR_TYPE.Set<mediapipe::Tensor>();
MPTENSORS_TYPE.Set<std::vector<mediapipe::Tensor>>();
setenv(ovmsLogLevelEnv.c_str(), "0", true);
}
void TearDown() override {
unsetenv(ovmsLogLevelEnv.c_str());
}
};

Expand Down Expand Up @@ -123,15 +130,23 @@ TEST_F(OpenVINOInferenceCalculatorTest, VerifyNotAllowedSideOutputPacket) {
EXPECT_EQ(abslStatus.code(), absl::StatusCode::kInternal) << abslStatus.message();
}

void runDummyInference(std::string& graph_proto) {
void runDummyInference(std::string& graph_proto, bool getContractExpectedFailure = false, bool inferenceExpectedFailure = false) {
CalculatorGraphConfig graph_config =
ParseTextProtoOrDie<CalculatorGraphConfig>(graph_proto);
const std::string inputStreamName = "input";
const std::string outputStreamName = "output";
// avoid creating pollers, retreiving packets etc.
std::vector<Packet> output_packets;

mediapipe::tool::AddVectorSink(outputStreamName, &graph_config, &output_packets);
CalculatorGraph graph(graph_config);

::mediapipe::CalculatorGraph graph;
if (getContractExpectedFailure) {
EXPECT_EQ(graph.Initialize(graph_config).code(), absl::StatusCode::kInternal);
return;
}
else
EXPECT_EQ(graph.Initialize(graph_config).code(), absl::StatusCode::kOk);
MP_ASSERT_OK(graph.StartRun({}));
auto datatype = ov::element::Type_t::f32;
ov::Shape shape{1,10};
Expand All @@ -140,7 +155,13 @@ void runDummyInference(std::string& graph_proto) {
MP_ASSERT_OK(graph.AddPacketToInputStream(
inputStreamName, Adopt(inputTensor.release()).At(Timestamp(0))));
MP_ASSERT_OK(graph.CloseInputStream(inputStreamName));
MP_ASSERT_OK(graph.WaitUntilIdle());
if (inferenceExpectedFailure) {
EXPECT_EQ(graph.WaitUntilIdle().code(), absl::StatusCode::kInternal);
return;
}
else
MP_ASSERT_OK(graph.WaitUntilIdle());

ASSERT_EQ(1, output_packets.size());
const ov::Tensor& outputTensor =
output_packets[0].Get<ov::Tensor>();
Expand Down Expand Up @@ -188,6 +209,83 @@ TEST_F(OpenVINOInferenceCalculatorTest, BasicDummyInference) {
)";
runDummyInference(graph_proto);
}
std::string failed_graph_proto = R"(
input_stream: "input"
output_stream: "output"
node {
calculator: "OpenVINOModelServerSessionCalculator"
output_side_packet: "SESSION:session"
node_options: {
[type.googleapis.com / mediapipe.OpenVINOModelServerSessionCalculatorOptions]: {
servable_name: "dummy"
server_config: "/mediapipe/mediapipe/calculators/ovms/test_data/config.json"
}
}
}
node {
calculator: "OpenVINOInferenceCalculator"
input_side_packet: "SESSION:session"
input_stream: "OVTENSOR:input"
output_stream: "OVTENSOR:output"
node_options: {
[type.googleapis.com / mediapipe.OpenVINOInferenceCalculatorOptions]: {
tag_to_input_tensor_names {
key: "OVTENSORS"
value: "b"
}
tag_to_output_tensor_names {
key: "OVTENSOR"
value: "a"
}
}
}
}
)";

TEST_F(OpenVINOInferenceCalculatorTest, ValidationFailedInDebug) {
setenv(ovmsLogLevelEnv.c_str(), "0", true);
runDummyInference(failed_graph_proto, true, true);
}
TEST_F(OpenVINOInferenceCalculatorTest, ValidationPassInInfo) {
setenv(ovmsLogLevelEnv.c_str(), "", true);
runDummyInference(failed_graph_proto, false, true);
}
TEST_F(OpenVINOInferenceCalculatorTest, ValidationFailedInDebugReorderCalculators) {
std::string graph_proto = R"(
input_stream: "input"
output_stream: "output"
node {
calculator: "OpenVINOInferenceCalculator"
input_side_packet: "SESSION:session"
input_stream: "OVTENSORS:input"
output_stream: "OVTENSOR:output"
node_options: {
[type.googleapis.com / mediapipe.OpenVINOInferenceCalculatorOptions]: {
tag_to_input_tensor_names {
key: "OVTENSOR"
value: "b"
}
tag_to_output_tensor_names {
key: "OVTENSOR"
value: "a"
}
}
}
}
node {
calculator: "OpenVINOModelServerSessionCalculator"
output_side_packet: "SESSION:session"
node_options: {
[type.googleapis.com / mediapipe.OpenVINOModelServerSessionCalculatorOptions]: {
servable_name: "dummy"
server_config: "/mediapipe/mediapipe/calculators/ovms/test_data/config.json"
}
}
}
)";
setenv(ovmsLogLevelEnv.c_str(), "0", true);
runDummyInference(graph_proto, true);
}
TEST_F(OpenVINOInferenceCalculatorTest, BasicDummyInferenceEmptyKey) {
std::string graph_proto = R"(
input_stream: "input"
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,7 @@
//*****************************************************************************
#include <unordered_map>

#include "ovms.h" // NOLINT
#pragma GCC diagnostic push
#pragma GCC diagnostic ignored "-Wdeprecated-declarations"
#include "mediapipe/framework/calculator_contract.h"
Expand All @@ -25,6 +26,8 @@

namespace mediapipe {

const char* OvmsLogLevelEnv = "GLOG_minloglevel";

static bool ValidateOrderLists(std::set<std::string> calculatorTags, const google::protobuf::RepeatedPtrField<std::string>& order_list) {
// Get output_stream types defined in the graph
std::vector<std::string> inputTypes;
Expand Down Expand Up @@ -176,6 +179,11 @@ bool ValidateCalculatorSettings(CalculatorContract* cc)
return false;
}

// Run deep validation only not in INFO log level for better performance
if (StringToLogLevel(std::string(std::getenv(OvmsLogLevelEnv)
== nullptr ? "" : std::getenv(OvmsLogLevelEnv))) == OVMS_LogLevel::OVMS_LOG_INFO)
return true;

const auto& options = cc->Options<OpenVINOInferenceCalculatorOptions>();

if (!ValidateOrderListsForNonVector(cc->Inputs().GetTags(), options.input_order_list())) {
Expand Down
32 changes: 32 additions & 0 deletions mediapipe/calculators/ovms/openvinoinferenceutils.cc
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,7 @@
#include <string>
#include <vector>

#include "ovms.h" // NOLINT
namespace mediapipe {

// Function from ovms/src/string_utils.h
Expand Down Expand Up @@ -57,4 +58,35 @@ bool endsWith(const std::string& str, const std::string& match) {
});
}

OVMS_LogLevel StringToLogLevel(const std::string& logLevel){
if (logLevel == "3")
return OVMS_LOG_ERROR;
if (logLevel == "1")
return OVMS_LOG_DEBUG;
if (logLevel == "0")
return OVMS_LOG_TRACE;
if (logLevel == "2")
return OVMS_LOG_INFO;

return OVMS_LOG_INFO;
}

std::string LogLevelToString(OVMS_LogLevel log_level) {
switch (log_level) {
case OVMS_LOG_INFO:
return "INFO";
case OVMS_LOG_ERROR:
return "ERROR";
case OVMS_LOG_DEBUG:
return "DEBUG";
case OVMS_LOG_TRACE:
return "TRACE";
case OVMS_LOG_WARNING:
return "WARNING";

}

return "unsupported";
}

} // namespace mediapipe
7 changes: 7 additions & 0 deletions mediapipe/calculators/ovms/openvinoinferenceutils.h
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,9 @@
//*****************************************************************************
#include <string>
#include <vector>

#include "ovms.h" // NOLINT

namespace mediapipe {

// Function from ovms/src/string_utils.h
Expand All @@ -26,4 +29,8 @@ std::vector<std::string> tokenize(const std::string& str, const char delimiter);
// Function from ovms/src/string_utils.h
bool endsWith(const std::string& str, const std::string& match);

OVMS_LogLevel StringToLogLevel(const std::string& logLevel);

std::string LogLevelToString(OVMS_LogLevel log_level);

} // namespace mediapipe
Original file line number Diff line number Diff line change
Expand Up @@ -28,6 +28,7 @@
#pragma GCC diagnostic push
#pragma GCC diagnostic ignored "-Wdeprecated-declarations"
#include "mediapipe/calculators/ovms/openvinomodelserversessioncalculator.pb.h"
#include "mediapipe/calculators/ovms/openvinoinferenceutils.h"
#include "mediapipe/framework/calculator_framework.h"
#include "mediapipe/framework/port/canonical_errors.h"
#include "modelapiovmsadapter.hpp"
Expand Down Expand Up @@ -121,6 +122,9 @@ absl::Status OpenVINOModelServerSessionCalculator::GetContract(CalculatorContrac
cc->OutputSidePackets().Tag(SESSION_TAG.c_str()).Set<std::shared_ptr<::InferenceAdapter>>();
const auto& options = cc->Options<OpenVINOModelServerSessionCalculatorOptions>();
RET_CHECK(!options.servable_name().empty());

OvmsLogLevel = StringToLogLevel(std::string(std::getenv(OvmsLogLevelEnv) == nullptr ? "" : std::getenv(OvmsLogLevelEnv)));
LOG(INFO) << "OpenVINOModelServerSessionCalculator ovms log level setting: " << LogLevelToString(OvmsLogLevel);
LOG(INFO) << "OpenVINOModelServerSessionCalculator GetContract end";
return absl::OkStatus();
}
Expand Down Expand Up @@ -167,7 +171,7 @@ absl::Status OpenVINOModelServerSessionCalculator::Open(CalculatorContext* cc) {
OVMS_ModelsSettingsNew(&guard.modelsSettings);
OVMS_ModelsSettingsSetConfigPath(guard.modelsSettings, options.server_config().c_str());
LOG(INFO) << "state config file:" << options.server_config();
OVMS_ServerSettingsSetLogLevel(guard.serverSettings, OVMS_LOG_DEBUG);
OVMS_ServerSettingsSetLogLevel(guard.serverSettings, OvmsLogLevel);

ASSERT_CAPI_STATUS_NULL(OVMS_ServerStartFromConfigurationFile(cserver, guard.serverSettings, guard.modelsSettings));

Expand Down Expand Up @@ -205,6 +209,8 @@ absl::Status OpenVINOModelServerSessionCalculator::Process(CalculatorContext* cc

bool OpenVINOModelServerSessionCalculator::triedToStartOVMS = false;
std::mutex OpenVINOModelServerSessionCalculator::loadingMtx;
const char* OpenVINOModelServerSessionCalculator::OvmsLogLevelEnv = "GLOG_minloglevel";
OVMS_LogLevel OpenVINOModelServerSessionCalculator::OvmsLogLevel = OVMS_LOG_INFO;

REGISTER_CALCULATOR(OpenVINOModelServerSessionCalculator);
} // namespace mediapipe
Original file line number Diff line number Diff line change
Expand Up @@ -47,5 +47,8 @@ class OpenVINOModelServerSessionCalculator : public CalculatorBase {
absl::Status Open(CalculatorContext* cc) override final;

absl::Status Process(CalculatorContext* cc) override final;
static OVMS_LogLevel OvmsLogLevel;
static const char* OvmsLogLevelEnv;
};

} // namespace mediapipe
Loading

0 comments on commit 0d5cbdb

Please sign in to comment.