Skip to content

Commit

Permalink
CVS-33484-cpu_extension_loader
Browse files Browse the repository at this point in the history
* CVS-33484-cpu_extension_loader
  • Loading branch information
intel-rrozestw committed Dec 1, 2020
1 parent 00df14f commit 4cb3af9
Show file tree
Hide file tree
Showing 6 changed files with 202 additions and 0 deletions.
4 changes: 4 additions & 0 deletions Dockerfile.centos
Original file line number Diff line number Diff line change
Expand Up @@ -47,6 +47,7 @@ RUN yum install -d6 -y epel-release centos-release-scl && yum update -d6 -y && y
boost-system \
boost-date-time \
cmake3 \
gcc-c++ \
devtoolset-8-gcc* \
automake \
autoconf \
Expand Down Expand Up @@ -209,6 +210,9 @@ ENV LD_LIBRARY_PATH=${LD_LIBRARY_PATH}:/opt/intel/openvino/deployment_tools/infe

RUN bazel build ${debug_bazel_flags} //src:ovms
RUN bazel build ${debug_bazel_flags} //src:libsampleloader.so

RUN cd /ovms/src/test/cpu_extension/ && make

RUN bazel test ${debug_bazel_flags} --test_summary=detailed --test_output=all //src:ovms_test

COPY ${ovms_metadata_file} metadata.json
Expand Down
10 changes: 10 additions & 0 deletions src/config.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,7 @@
#include "config.hpp"

#include <algorithm>
#include <filesystem>
#include <limits>
#include <regex>
#include <thread>
Expand Down Expand Up @@ -115,6 +116,10 @@ Config& Config::parse(int argc, char** argv) {
"Target device to run the inference",
cxxopts::value<std::string>()->default_value("CPU"),
"TARGET_DEVICE")
("cpu_extension",
"a path to shared library containing custom CPU layer implementation. Default: empty.",
cxxopts::value<std::string>()->default_value(""),
"CPU_EXTENSION")
("plugin_config",
"a dictionary of plugin configuration keys and their values, eg \"{\\\"CPU_THROUGHPUT_STREAMS\\\": \\\"1\\\"}\". Default throughput streams for CPU and GPU are calculated by OpenVINO",
cxxopts::value<std::string>(),
Expand Down Expand Up @@ -232,6 +237,11 @@ void Config::validate() {
exit(EX_USAGE);
}

// check cpu_extension path:
if (result->count("cpu_extension") && !std::filesystem::exists(this->cpuExtensionLibraryPath())) {
std::cerr << "File path provided as an --cpu_extension parameter does not exists in the filesystem: " << this->cpuExtensionLibraryPath() << std::endl;
exit(EX_USAGE);
}
return;
}

Expand Down
12 changes: 12 additions & 0 deletions src/config.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -105,6 +105,18 @@ class Config {
return result->operator[]("port").as<uint64_t>();
}

/**
* @brief Get the gRPC network interface address to bind to
*
* @return const std::string
*/
const std::string cpuExtensionLibraryPath() {
if (result != nullptr && result->count("cpu_extension")) {
return result->operator[]("cpu_extension").as<std::string>();
}
return "";
}

/**
* @brief Get the gRPC network interface address to bind to
*
Expand Down
15 changes: 15 additions & 0 deletions src/modelinstance.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -236,6 +236,21 @@ uint ModelInstance::getNumOfParallelInferRequests(const ModelConfig& modelConfig

void ModelInstance::loadOVEngine() {
engine = std::make_unique<InferenceEngine::Core>();
if (ovms::Config::instance().cpuExtensionLibraryPath() != "") {
SPDLOG_INFO("Loading custom CPU extension from {}", ovms::Config::instance().cpuExtensionLibraryPath());
try {
auto extension_ptr = InferenceEngine::make_so_pointer<InferenceEngine::IExtension>(ovms::Config::instance().cpuExtensionLibraryPath().c_str());
SPDLOG_INFO("Custom CPU extention loaded. Adding it.");
engine->AddExtension(extension_ptr, "CPU");
SPDLOG_INFO("Extention added.");
} catch (std::exception& ex) {
SPDLOG_CRITICAL("Custom CPU extention loading has failed! Reason: {}", ex.what());
throw;
} catch (...) {
SPDLOG_CRITICAL("Custom CPU extention loading has failed with an unknown error!");
throw;
}
}
}

std::unique_ptr<InferenceEngine::CNNNetwork> ModelInstance::loadOVCNNNetworkPtr(const std::string& modelFile) {
Expand Down
27 changes: 27 additions & 0 deletions src/test/cpu_extension/Makefile
Original file line number Diff line number Diff line change
@@ -0,0 +1,27 @@
#
# Copyright (c) 2020 Intel Corporation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#

all:
. /opt/intel/openvino/bin/setupvars.sh && /usr/bin/g++ -std=gnu++11 -fPIC -shared minimal_cpu_extension.cpp \
-I /opt/intel/openvino/inference_engine/include/ \
-I /opt/intel/openvino/deployment_tools/ngraph/include/ \
-o libminimal_cpu_extension.so

test:
curl --create-dirs https://download.01.org/opencv/2020/openvinotoolkit/2020.4/open_model_zoo/models_bin/3/face-detection-retail-0004/FP32/face-detection-retail-0004.xml https://download.01.org/opencv/2020/openvinotoolkit/2020.4/open_model_zoo/models_bin/3/face-detection-retail-0004/FP32/face-detection-retail-0004.bin -o model/1/face-detection-retail-0004.xml -o model/1/face-detection-retail-0004.bin
curl --create-dirs https://raw.githubusercontent.com/openvinotoolkit/model_server/master/example_client/images/people/people1.jpeg -o images/people1.jpeg
chmod 666 -vR ./images/ ./model/
/ovms/bazel-bin/src/ovms --cpu_extension /ovms/src/test/cpu_extension/libminimal_cpu_extension.so --model_path ./model/ --model_name face-detection
134 changes: 134 additions & 0 deletions src/test/cpu_extension/minimal_cpu_extension.cpp
Original file line number Diff line number Diff line change
@@ -0,0 +1,134 @@
//*****************************************************************************
// Copyright 2020 Intel Corporation
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//*****************************************************************************

#include <cstdio>

#include <ie_iextension.h>
#include <ngraph/opsets/opset.hpp>

namespace InferenceEngine {
namespace Extensions {
namespace Cpu {

class OvmsOperation : public ILayerExecImpl {
public:
explicit OvmsOperation(const std::shared_ptr<ngraph::Node>& node) {
::printf("OvmsOperation(node)\n");
}
StatusCode getSupportedConfigurations(std::vector<LayerConfig>& conf, ResponseDesc* resp) noexcept override;
StatusCode init(LayerConfig& config, ResponseDesc* resp) noexcept override;
StatusCode execute(std::vector<Blob::Ptr>& inputs, std::vector<Blob::Ptr>& outputs, ResponseDesc* resp) noexcept override;

private:
int64_t add;
ngraph::Shape inShape;
ngraph::Shape outShape;
std::string error;
};

StatusCode OvmsOperation::getSupportedConfigurations(std::vector<LayerConfig>& conf, ResponseDesc* resp) noexcept {
::printf("CPU_EXTENSIONS: getSupportedConfigurations()\n");
return OK;
}

StatusCode OvmsOperation::init(LayerConfig& config, ResponseDesc* resp) noexcept {
::printf("CPU_EXTENSIONS: init()\n");
return OK;
}

StatusCode OvmsOperation::execute(std::vector<Blob::Ptr>& inputs, std::vector<Blob::Ptr>& outputs, ResponseDesc* resp) noexcept {
::printf("CPU_EXTENSIONS: execute()\n");
return OK;
}
class INFERENCE_ENGINE_API_CLASS(OvmsMinimalCpuExtension) :
public IExtension {
public:
OvmsMinimalCpuExtension() = default;
InferenceEngine::ILayerImpl::Ptr getImplementation(const std::shared_ptr<ngraph::Node>& node, const std::string& implType);

void GetVersion(const InferenceEngine::Version*& versionInfo) const noexcept override {
::printf("OvmsMinimalCpuExtension::GetVersion()\n");
static InferenceEngine::Version ExtensionDescription = {
{1, 0}, // extension API version
"1.0",
"template_ext" // extension description message
};
versionInfo = &ExtensionDescription;
}

void Unload() noexcept override { ::printf("OvmsMinimalCpuExtension::Unload()\n"); }
void Release() noexcept override {
::printf("OvmsMinimalCpuExtension::Release()\n");
delete this;
}

std::map<std::string, ngraph::OpSet> getOpSets() override;
std::vector<std::string> getImplTypes(const std::shared_ptr<ngraph::Node>& node) override;
};

std::map<std::string, ngraph::OpSet> OvmsMinimalCpuExtension::getOpSets() {
::printf("OvmsMinimalCpuExtension::getOpSets()\n");
std::map<std::string, ngraph::OpSet> opsets;
ngraph::OpSet opset;
// opset.insert<OvmsOperation>();
opsets["custom_opset"] = opset;
return opsets;
}

std::vector<std::string> OvmsMinimalCpuExtension::getImplTypes(const std::shared_ptr<ngraph::Node>& node) {
::printf("OvmsMinimalCpuExtension::getImplTypes()\n");
if (std::dynamic_pointer_cast<OvmsOperation>(node)) {
::printf("OvmsMinimalCpuExtension::getImplTypes() -> dyncast ok, returning 'CPU'\n");
return {"CPU"};
}
::printf("OvmsMinimalCpuExtension::getImplTypes() -> dyncast failed, returning empty vector.\n");
return {};
}

InferenceEngine::ILayerImpl::Ptr OvmsMinimalCpuExtension::getImplementation(const std::shared_ptr<ngraph::Node>& node, const std::string& implType) {
::printf("OvmsMinimalCpuExtension::getImplementation()\n");
if (std::dynamic_pointer_cast<OvmsOperation>(node) && implType == "CPU") {
::printf("OvmsMinimalCpuExtension::getgetImplementation() -> dyncast ok, returning new OvmsOperation.\n");
return std::make_shared<OvmsOperation>(node);
}
::printf("OvmsMinimalCpuExtension::getgetImplementation() -> dyncast failed, returning nullptr.\n");
return nullptr;
}

// Exported function
INFERENCE_EXTENSION_API(StatusCode)
CreateExtension(IExtension*& ext, ResponseDesc* resp) noexcept {
try {
::printf("CreateExtension()\n");
ext = new OvmsMinimalCpuExtension;
::printf("CreateExtension - ok!\n");
return OK;
} catch (std::exception& ex) {
::printf("CreateExtension exception: %s\n", ex.what());
if (resp) {
std::string err = ((std::string) "Couldn't create extension: ") + ex.what();
err.copy(resp->msg, 255);
}
return GENERAL_ERROR;
} catch (...) {
::printf("CreateExtension exception!\n");
return GENERAL_ERROR;
}
}

} // namespace Cpu
} // namespace Extensions
} // namespace InferenceEngine

0 comments on commit 4cb3af9

Please sign in to comment.