Added ONNX reader for the OpenVINO (#532)
* Added ONNX reader for the OpenVINO * Fixed comments * Fixed comments * Fixed message * Fixed memory consumption * Revert IReaderPtr * Fixed Myriad tests * Fixed comment * Renamed inference_engine_ir_readers to inference_engine_ir_reader
This commit is contained in:
parent
d5434a036e
commit
3c718809d3
@ -20,7 +20,7 @@ ie_coverage_genhtml(INFO_FILE "inference_engine"
|
||||
PREFIX "${DLDT_COVERAGE_BASE_DIRECTORY}")
|
||||
|
||||
ie_coverage_extract(INPUT "dldt" OUTPUT "inference_engine_ir_reader"
|
||||
PATTERNS "${DLDT_COVERAGE_BASE_DIRECTORY}/ir_readers/*")
|
||||
PATTERNS "${DLDT_COVERAGE_BASE_DIRECTORY}/readers/*")
|
||||
ie_coverage_genhtml(INFO_FILE "inference_engine_ir_reader"
|
||||
PREFIX "${DLDT_COVERAGE_BASE_DIRECTORY}")
|
||||
|
||||
|
@ -82,8 +82,8 @@ function(ie_add_plugin)
|
||||
if(TARGET inference_engine_preproc)
|
||||
add_dependencies(${IE_PLUGIN_NAME} inference_engine_preproc)
|
||||
endif()
|
||||
if(TARGET inference_engine_ir_readers)
|
||||
add_dependencies(${IE_PLUGIN_NAME} inference_engine_ir_readers)
|
||||
if(TARGET inference_engine_ir_reader)
|
||||
add_dependencies(${IE_PLUGIN_NAME} inference_engine_ir_reader)
|
||||
endif()
|
||||
|
||||
# install rules
|
||||
|
@ -11,6 +11,7 @@ from libc.stdint cimport int64_t, uint8_t, int8_t, int32_t, uint16_t, int16_t
|
||||
from libc.string cimport memcpy
|
||||
|
||||
import os
|
||||
from fnmatch import fnmatch
|
||||
from pathlib import Path
|
||||
import threading
|
||||
import warnings
|
||||
@ -268,20 +269,23 @@ cdef class IECore:
|
||||
net.impl = self.impl.readNetwork(xml_buffer, bin_buffer, len(weights))
|
||||
free(xml_buffer)
|
||||
else:
|
||||
weights_ = "".encode()
|
||||
if isinstance(model, Path) and isinstance(weights, Path):
|
||||
if not model.is_file():
|
||||
raise Exception("Path to the model {} doesn't exist or it's a directory".format(model))
|
||||
if not weights.is_file():
|
||||
raise Exception("Path to the weights {} doesn't exist or it's a directory".format(weights))
|
||||
if model.suffix != ".onnx":
|
||||
if not weights.is_file():
|
||||
raise Exception("Path to the weights {} doesn't exist or it's a directory".format(weights))
|
||||
weights_ = bytes(weights)
|
||||
model_ = bytes(model)
|
||||
weights_ = bytes(weights)
|
||||
else:
|
||||
if not os.path.isfile(model):
|
||||
raise Exception("Path to the model {} doesn't exist or it's a directory".format(model))
|
||||
if not os.path.isfile(weights):
|
||||
raise Exception("Path to the weights {} doesn't exist or it's a directory".format(weights))
|
||||
if not fnmatch(model, "*.onnx"):
|
||||
if not os.path.isfile(weights):
|
||||
raise Exception("Path to the weights {} doesn't exist or it's a directory".format(weights))
|
||||
weights_ = weights.encode()
|
||||
model_ = model.encode()
|
||||
weights_ = weights.encode()
|
||||
net.impl = self.impl.readNetwork(model_, weights_)
|
||||
return net
|
||||
|
||||
|
@ -3,10 +3,7 @@ import pytest
|
||||
|
||||
|
||||
def model_path(is_myriad=False):
|
||||
if os.environ.get("MODELS_PATH"):
|
||||
path_to_repo = os.environ.get("MODELS_PATH")
|
||||
else:
|
||||
raise EnvironmentError("MODELS_PATH variable isn't set")
|
||||
path_to_repo = os.environ["MODELS_PATH"]
|
||||
if not is_myriad:
|
||||
test_xml = os.path.join(path_to_repo, "models", "test_model", 'test_model_fp32.xml')
|
||||
test_bin = os.path.join(path_to_repo, "models", "test_model", 'test_model_fp32.bin')
|
||||
@ -15,24 +12,27 @@ def model_path(is_myriad=False):
|
||||
test_bin = os.path.join(path_to_repo, "models", "test_model", 'test_model_fp16.bin')
|
||||
return (test_xml, test_bin)
|
||||
|
||||
|
||||
def model_onnx_path():
|
||||
path_to_repo = os.environ["MODELS_PATH"]
|
||||
test_onnx = os.path.join(path_to_repo, "models", "test_model", 'test_model.onnx')
|
||||
return test_onnx
|
||||
|
||||
|
||||
def image_path():
|
||||
if os.environ.get("DATA_PATH"):
|
||||
path_to_repo = os.environ.get("DATA_PATH")
|
||||
else:
|
||||
raise EnvironmentError("DATA_PATH variable isn't set")
|
||||
path_to_repo = os.environ["DATA_PATH"]
|
||||
path_to_img = os.path.join(path_to_repo, 'validation_set', '224x224', 'dog.bmp')
|
||||
return path_to_img
|
||||
|
||||
|
||||
def plugins_path():
|
||||
if os.environ.get("DATA_PATH"):
|
||||
path_to_repo = os.environ.get("DATA_PATH")
|
||||
else:
|
||||
raise EnvironmentError("DATA_PATH variable isn't set")
|
||||
path_to_repo = os.environ["DATA_PATH"]
|
||||
plugins_xml = os.path.join(path_to_repo, 'ie_class', 'plugins.xml')
|
||||
plugins_win_xml = os.path.join(path_to_repo, 'ie_class', 'plugins_mingw.xml')
|
||||
plugins_osx_xml = os.path.join(path_to_repo, 'ie_class', 'plugins_apple.xml')
|
||||
return (plugins_xml, plugins_win_xml, plugins_osx_xml)
|
||||
|
||||
|
||||
@pytest.fixture(scope='session')
|
||||
def device():
|
||||
return os.environ.get("TEST_DEVICE") if os.environ.get("TEST_DEVICE") else "CPU"
|
||||
|
@ -5,7 +5,7 @@ import numpy as np
|
||||
from pathlib import Path
|
||||
|
||||
from openvino.inference_engine import IENetwork, IECore, ExecutableNetwork
|
||||
from conftest import model_path, plugins_path
|
||||
from conftest import model_path, plugins_path, model_onnx_path
|
||||
|
||||
|
||||
test_net_xml, test_net_bin = model_path()
|
||||
@ -143,28 +143,39 @@ def test_get_metric_str():
|
||||
assert isinstance(param, str), "Parameter value for 'FULL_DEVICE_NAME' " \
|
||||
"metric must be string but {} is returned".format(type(param))
|
||||
|
||||
|
||||
def test_read_network_from_xml():
|
||||
ie = IECore()
|
||||
net = ie.read_network(model=test_net_xml, weights=test_net_bin)
|
||||
assert isinstance(net, IENetwork)
|
||||
|
||||
|
||||
def test_read_network_as_path():
|
||||
ie = IECore()
|
||||
net = ie.read_network(model=Path(model_path()[0]), weights=Path(test_net_bin))
|
||||
assert isinstance(net, IENetwork)
|
||||
|
||||
|
||||
def test_read_network_from_onnx():
|
||||
ie = IECore()
|
||||
net = ie.read_network(model=model_onnx_path())
|
||||
assert isinstance(net, IENetwork)
|
||||
|
||||
|
||||
def test_incorrect_xml():
|
||||
ie = IECore()
|
||||
with pytest.raises(Exception) as e:
|
||||
ie.read_network(model="./model.xml", weights=Path(test_net_bin))
|
||||
assert "Path to the model ./model.xml doesn't exist or it's a directory" in str(e.value)
|
||||
|
||||
|
||||
def test_incorrect_bin():
|
||||
ie = IECore()
|
||||
with pytest.raises(Exception) as e:
|
||||
ie.read_network(model=test_net_xml, weights="./model.bin")
|
||||
assert "Path to the weights ./model.bin doesn't exist or it's a directory" in str(e.value)
|
||||
|
||||
|
||||
def test_read_net_from_buffer():
|
||||
ie = IECore()
|
||||
with open(test_net_bin, 'rb') as f:
|
||||
@ -174,6 +185,7 @@ def test_read_net_from_buffer():
|
||||
net = ie.read_network(model=xml, weights=bin, init_from_buffer=True)
|
||||
assert isinstance(net, IENetwork)
|
||||
|
||||
|
||||
def test_net_from_buffer_valid():
|
||||
ie = IECore()
|
||||
with open(test_net_bin, 'rb') as f:
|
||||
|
@ -12,39 +12,38 @@
|
||||
#include "details/ie_no_copy.hpp"
|
||||
|
||||
#if defined(USE_STATIC_IE) || (defined(__GNUC__) && (__GNUC__ < 4))
|
||||
#define INFERENCE_ENGINE_API(...) extern "C" __VA_ARGS__
|
||||
#define INFERENCE_ENGINE_API_CPP(...) __VA_ARGS__
|
||||
#define INFERENCE_ENGINE_API_CLASS(...) __VA_ARGS__
|
||||
#define INFERENCE_ENGINE_CDECL __attribute__((cdecl))
|
||||
# define INFERENCE_ENGINE_API(...) extern "C" __VA_ARGS__
|
||||
# define INFERENCE_ENGINE_API_CPP(...) __VA_ARGS__
|
||||
# define INFERENCE_ENGINE_API_CLASS(...) __VA_ARGS__
|
||||
# define INFERENCE_ENGINE_CDECL __attribute__((cdecl))
|
||||
#else
|
||||
#if defined(_WIN32)
|
||||
#define INFERENCE_ENGINE_CDECL
|
||||
|
||||
#ifdef IMPLEMENT_INFERENCE_ENGINE_API
|
||||
#define INFERENCE_ENGINE_API(...) extern "C" __declspec(dllexport) __VA_ARGS__ __cdecl
|
||||
#define INFERENCE_ENGINE_API_CPP(...) __declspec(dllexport) __VA_ARGS__ __cdecl
|
||||
#define INFERENCE_ENGINE_API_CLASS(...) __declspec(dllexport) __VA_ARGS__
|
||||
#else
|
||||
#define INFERENCE_ENGINE_API(...) extern "C" __declspec(dllimport) __VA_ARGS__ __cdecl
|
||||
#define INFERENCE_ENGINE_API_CPP(...) __declspec(dllimport) __VA_ARGS__ __cdecl
|
||||
#define INFERENCE_ENGINE_API_CLASS(...) __declspec(dllimport) __VA_ARGS__
|
||||
#endif
|
||||
#else
|
||||
#define INFERENCE_ENGINE_CDECL __attribute__((cdecl))
|
||||
#define INFERENCE_ENGINE_API(...) extern "C" __attribute__((visibility("default"))) __VA_ARGS__
|
||||
#define INFERENCE_ENGINE_API_CPP(...) __attribute__((visibility("default"))) __VA_ARGS__
|
||||
#define INFERENCE_ENGINE_API_CLASS(...) __attribute__((visibility("default"))) __VA_ARGS__
|
||||
#endif
|
||||
# if defined(_WIN32)
|
||||
# define INFERENCE_ENGINE_CDECL
|
||||
# ifdef IMPLEMENT_INFERENCE_ENGINE_API
|
||||
# define INFERENCE_ENGINE_API(...) extern "C" __declspec(dllexport) __VA_ARGS__ __cdecl
|
||||
# define INFERENCE_ENGINE_API_CPP(...) __declspec(dllexport) __VA_ARGS__ __cdecl
|
||||
# define INFERENCE_ENGINE_API_CLASS(...) __declspec(dllexport) __VA_ARGS__
|
||||
# else
|
||||
# define INFERENCE_ENGINE_API(...) extern "C" __declspec(dllimport) __VA_ARGS__ __cdecl
|
||||
# define INFERENCE_ENGINE_API_CPP(...) __declspec(dllimport) __VA_ARGS__ __cdecl
|
||||
# define INFERENCE_ENGINE_API_CLASS(...) __declspec(dllimport) __VA_ARGS__
|
||||
# endif
|
||||
# else
|
||||
# define INFERENCE_ENGINE_CDECL __attribute__((cdecl))
|
||||
# define INFERENCE_ENGINE_API(...) extern "C" __attribute__((visibility("default"))) __VA_ARGS__
|
||||
# define INFERENCE_ENGINE_API_CPP(...) __attribute__((visibility("default"))) __VA_ARGS__
|
||||
# define INFERENCE_ENGINE_API_CLASS(...) __attribute__((visibility("default"))) __VA_ARGS__
|
||||
# endif
|
||||
#endif
|
||||
|
||||
#if defined(_WIN32)
|
||||
#define INFERENCE_ENGINE_DEPRECATED(msg) __declspec(deprecated(msg))
|
||||
# define INFERENCE_ENGINE_DEPRECATED(msg) __declspec(deprecated(msg))
|
||||
#elif defined __INTEL_COMPILER
|
||||
#define INFERENCE_ENGINE_DEPRECATED(msg) __attribute__((deprecated(msg)))
|
||||
# define INFERENCE_ENGINE_DEPRECATED(msg) __attribute__((deprecated(msg)))
|
||||
#elif defined(__GNUC__)
|
||||
#define INFERENCE_ENGINE_DEPRECATED(msg) __attribute__((deprecated((msg))))
|
||||
# define INFERENCE_ENGINE_DEPRECATED(msg) __attribute__((deprecated((msg))))
|
||||
#else
|
||||
#define INFERENCE_ENGINE_DEPRECATED(msg)
|
||||
# define INFERENCE_ENGINE_DEPRECATED(msg)
|
||||
#endif
|
||||
|
||||
#if defined IMPLEMENT_INFERENCE_ENGINE_API || defined IMPLEMENT_INFERENCE_ENGINE_PLUGIN
|
||||
@ -63,32 +62,32 @@
|
||||
|
||||
// Suppress warning "-Wdeprecated-declarations" / C4996
|
||||
#if defined(_MSC_VER)
|
||||
#define IE_DO_PRAGMA(x) __pragma(x)
|
||||
# define IE_DO_PRAGMA(x) __pragma(x)
|
||||
#elif defined(__GNUC__)
|
||||
#define IE_DO_PRAGMA(x) _Pragma(#x)
|
||||
# define IE_DO_PRAGMA(x) _Pragma(#x)
|
||||
#else
|
||||
#define IE_DO_PRAGMA(x)
|
||||
# define IE_DO_PRAGMA(x)
|
||||
#endif
|
||||
|
||||
#if defined(_MSC_VER) && !defined(__clang__)
|
||||
#define IE_SUPPRESS_DEPRECATED_START \
|
||||
IE_DO_PRAGMA(warning(push)) \
|
||||
IE_DO_PRAGMA(warning(disable : 4996))
|
||||
#define IE_SUPPRESS_DEPRECATED_END IE_DO_PRAGMA(warning(pop))
|
||||
# define IE_SUPPRESS_DEPRECATED_START \
|
||||
IE_DO_PRAGMA(warning(push)) \
|
||||
IE_DO_PRAGMA(warning(disable : 4996))
|
||||
# define IE_SUPPRESS_DEPRECATED_END IE_DO_PRAGMA(warning(pop))
|
||||
#elif defined(__INTEL_COMPILER)
|
||||
#define IE_SUPPRESS_DEPRECATED_START \
|
||||
IE_DO_PRAGMA(warning(push)) \
|
||||
IE_DO_PRAGMA(warning(disable : 1478))
|
||||
IE_DO_PRAGMA(warning(disable : 1786))
|
||||
#define IE_SUPPRESS_DEPRECATED_END IE_DO_PRAGMA(warning(pop))
|
||||
# define IE_SUPPRESS_DEPRECATED_START \
|
||||
IE_DO_PRAGMA(warning(push)) \
|
||||
IE_DO_PRAGMA(warning(disable : 1478))
|
||||
IE_DO_PRAGMA(warning(disable : 1786))
|
||||
# define IE_SUPPRESS_DEPRECATED_END IE_DO_PRAGMA(warning(pop))
|
||||
#elif defined(__clang__) || ((__GNUC__) && (__GNUC__ * 100 + __GNUC_MINOR__ > 405))
|
||||
#define IE_SUPPRESS_DEPRECATED_START \
|
||||
IE_DO_PRAGMA(GCC diagnostic push) \
|
||||
IE_DO_PRAGMA(GCC diagnostic ignored "-Wdeprecated-declarations")
|
||||
#define IE_SUPPRESS_DEPRECATED_END IE_DO_PRAGMA(GCC diagnostic pop)
|
||||
# define IE_SUPPRESS_DEPRECATED_START \
|
||||
IE_DO_PRAGMA(GCC diagnostic push) \
|
||||
IE_DO_PRAGMA(GCC diagnostic ignored "-Wdeprecated-declarations")
|
||||
# define IE_SUPPRESS_DEPRECATED_END IE_DO_PRAGMA(GCC diagnostic pop)
|
||||
#else
|
||||
#define IE_SUPPRESS_DEPRECATED_START
|
||||
#define IE_SUPPRESS_DEPRECATED_END
|
||||
# define IE_SUPPRESS_DEPRECATED_START
|
||||
# define IE_SUPPRESS_DEPRECATED_END
|
||||
#endif
|
||||
|
||||
#ifdef _WIN32
|
||||
@ -111,3 +110,25 @@
|
||||
# define ENABLE_UNICODE_PATH_SUPPORT
|
||||
# endif
|
||||
#endif
|
||||
|
||||
/**
|
||||
* @def INFERENCE_PLUGIN_API(type)
|
||||
* @brief Defines Inference Engine Plugin API method
|
||||
* @param type A plugin type
|
||||
*/
|
||||
|
||||
#if defined(_WIN32)
|
||||
# ifdef IMPLEMENT_INFERENCE_ENGINE_PLUGIN
|
||||
# define INFERENCE_PLUGIN_API(type) extern "C" __declspec(dllexport) type
|
||||
# else
|
||||
# define INFERENCE_PLUGIN_API(type) extern "C" type
|
||||
# endif
|
||||
#elif (__GNUC__ >= 4) // NOLINT
|
||||
# ifdef IMPLEMENT_INFERENCE_ENGINE_PLUGIN
|
||||
# define INFERENCE_PLUGIN_API(type) extern "C" __attribute__((visibility("default"))) type
|
||||
# else
|
||||
# define INFERENCE_PLUGIN_API(type) extern "C" type
|
||||
# endif
|
||||
#else
|
||||
# define INFERENCE_PLUGIN_API(TYPE) extern "C" TYPE
|
||||
#endif
|
||||
|
@ -25,28 +25,6 @@
|
||||
#include "ie_iexecutable_network.hpp"
|
||||
#include "ie_version.hpp"
|
||||
|
||||
/**
|
||||
* @def INFERENCE_PLUGIN_API(type)
|
||||
* @brief Defines Inference Engine Plugin API method
|
||||
* @param type A plugin type
|
||||
*/
|
||||
|
||||
#if defined(_WIN32)
|
||||
#ifdef IMPLEMENT_INFERENCE_ENGINE_PLUGIN
|
||||
#define INFERENCE_PLUGIN_API(type) extern "C" __declspec(dllexport) type
|
||||
#else
|
||||
#define INFERENCE_PLUGIN_API(type) extern "C" type
|
||||
#endif
|
||||
#elif (__GNUC__ >= 4) // NOLINT
|
||||
#ifdef IMPLEMENT_INFERENCE_ENGINE_PLUGIN
|
||||
#define INFERENCE_PLUGIN_API(type) extern "C" __attribute__((visibility("default"))) type
|
||||
#else
|
||||
#define INFERENCE_PLUGIN_API(type) extern "C" type
|
||||
#endif
|
||||
#else
|
||||
#define INFERENCE_PLUGIN_API(TYPE) extern "C" TYPE
|
||||
#endif
|
||||
|
||||
namespace InferenceEngine {
|
||||
|
||||
/**
|
||||
|
@ -4,7 +4,7 @@
|
||||
|
||||
add_subdirectory(preprocessing)
|
||||
|
||||
add_subdirectory(ir_readers)
|
||||
add_subdirectory(readers)
|
||||
|
||||
add_subdirectory(legacy_api)
|
||||
|
||||
|
@ -123,7 +123,7 @@ target_include_directories(${TARGET_NAME}_obj SYSTEM PRIVATE $<TARGET_PROPERTY:n
|
||||
$<TARGET_PROPERTY:pugixml,INTERFACE_INCLUDE_DIRECTORIES>)
|
||||
|
||||
target_include_directories(${TARGET_NAME}_obj PRIVATE "${CMAKE_CURRENT_SOURCE_DIR}"
|
||||
$<TARGET_PROPERTY:${TARGET_NAME}_ir_readers,INTERFACE_INCLUDE_DIRECTORIES>
|
||||
$<TARGET_PROPERTY:${TARGET_NAME}_reader_api,INTERFACE_INCLUDE_DIRECTORIES>
|
||||
$<TARGET_PROPERTY:${TARGET_NAME}_plugin_api,INTERFACE_INCLUDE_DIRECTORIES>)
|
||||
|
||||
if(ENABLE_PROFILING_ITT AND INTEL_ITT_LIBS)
|
||||
@ -158,8 +158,8 @@ target_link_libraries(${TARGET_NAME} PRIVATE Threads::Threads ${INTEL_ITT_LIBS}
|
||||
${NGRAPH_LIBRARIES} inference_engine_transformations
|
||||
PUBLIC ${CMAKE_DL_LIBS} ${TARGET_NAME}_legacy)
|
||||
|
||||
target_include_directories(${TARGET_NAME} INTERFACE ${PUBLIC_HEADERS_DIR}
|
||||
PRIVATE $<TARGET_PROPERTY:${TARGET_NAME}_plugin_api,INTERFACE_INCLUDE_DIRECTORIES>)
|
||||
target_include_directories(${TARGET_NAME} INTERFACE ${PUBLIC_HEADERS_DIR} PRIVATE
|
||||
$<TARGET_PROPERTY:${TARGET_NAME}_plugin_api,INTERFACE_INCLUDE_DIRECTORIES>)
|
||||
|
||||
if(WIN32)
|
||||
set_target_properties(${TARGET_NAME} PROPERTIES COMPILE_PDB_NAME ${TARGET_NAME})
|
||||
|
@ -82,6 +82,8 @@ CNNNetwork::CNNNetwork(const std::shared_ptr<const ngraph::Function>& graph) {
|
||||
}
|
||||
|
||||
// Copy nGraph function
|
||||
if (graph == nullptr)
|
||||
THROW_IE_EXCEPTION << "Cannot create CNNNetwork from empty nGraph function!";
|
||||
network = std::make_shared<CNNNetworkNGraphImpl>(copyFunction(graph, false, {}));
|
||||
actual = network.get();
|
||||
if (actual == nullptr) {
|
||||
|
55
inference-engine/src/inference_engine/ie_blob_stream.cpp
Normal file
55
inference-engine/src/inference_engine/ie_blob_stream.cpp
Normal file
@ -0,0 +1,55 @@
|
||||
// Copyright (C) 2018-2020 Intel Corporation
|
||||
// SPDX-License-Identifier: Apache-2.0
|
||||
//
|
||||
|
||||
#include "ie_blob_stream.hpp"
|
||||
|
||||
#include <ie_blob.h>
|
||||
#include <istream>
|
||||
|
||||
InferenceEngine::details::BlobStream::BlobBuffer::BlobBuffer(const InferenceEngine::Blob::CPtr& blob) {
|
||||
char* data = nullptr;
|
||||
std::streampos size;
|
||||
if (!blob) {
|
||||
size = 0;
|
||||
} else {
|
||||
data = blob->cbuffer().as<char*>();
|
||||
size = blob->byteSize();
|
||||
}
|
||||
setg(data, data, data + size);
|
||||
}
|
||||
InferenceEngine::details::BlobStream::BlobBuffer::~BlobBuffer() {}
|
||||
|
||||
std::streampos InferenceEngine::details::BlobStream::BlobBuffer::seekpos(std::streampos sp, std::ios_base::openmode which) {
|
||||
if (!(which & ios_base::in))
|
||||
return streampos(-1);
|
||||
if (sp < 0 || sp > egptr() - eback())
|
||||
return streampos(-1);
|
||||
setg(eback(), eback() + sp, egptr());
|
||||
return sp;
|
||||
}
|
||||
std::streampos InferenceEngine::details::BlobStream::BlobBuffer::seekoff(std::streamoff off, std::ios_base::seekdir way, std::ios_base::openmode which) {
|
||||
if (!(which & std::ios_base::in))
|
||||
return streampos(-1);
|
||||
switch (way) {
|
||||
default:
|
||||
case std::ios_base::beg:
|
||||
setg(eback(), eback() + off, egptr());
|
||||
break;
|
||||
case std::ios_base::cur:
|
||||
gbump(off);
|
||||
break;
|
||||
case std::ios_base::end:
|
||||
setg(eback(), egptr() + off, egptr());
|
||||
break;
|
||||
}
|
||||
return gptr() - eback();
|
||||
}
|
||||
|
||||
InferenceEngine::Blob::CPtr InferenceEngine::details::BlobStream::getBlob() {
|
||||
return blob;
|
||||
}
|
||||
|
||||
InferenceEngine::details::BlobStream::BlobStream(const InferenceEngine::Blob::CPtr& blob): buffer(blob), std::ios(0), std::istream(&buffer), blob(blob) {}
|
||||
|
||||
InferenceEngine::details::BlobStream::~BlobStream() {}
|
@ -11,11 +11,15 @@
|
||||
#include <map>
|
||||
#include <memory>
|
||||
#include <sstream>
|
||||
#include <streambuf>
|
||||
#include <string>
|
||||
#include <utility>
|
||||
#include <vector>
|
||||
#include <istream>
|
||||
#include <mutex>
|
||||
|
||||
#include "ie_blob_stream.hpp"
|
||||
#include <ie_reader_ptr.hpp>
|
||||
#include <ngraph/opsets/opset.hpp>
|
||||
#include "cpp/ie_cnn_net_reader.h"
|
||||
#include "cpp/ie_plugin_cpp.hpp"
|
||||
@ -44,7 +48,7 @@ InferenceEngine::details::SharedObjectLoader::Ptr cnnReaderLoader;
|
||||
|
||||
InferenceEngine::details::SharedObjectLoader::Ptr createCnnReaderLoader() {
|
||||
std::call_once(flag, [&] () {
|
||||
FileUtils::FilePath libraryName = FileUtils::toFilePath(std::string("inference_engine_ir_readers") + std::string(IE_BUILD_POSTFIX));
|
||||
FileUtils::FilePath libraryName = FileUtils::toFilePath(std::string("inference_engine_ir_reader") + std::string(IE_BUILD_POSTFIX));
|
||||
FileUtils::FilePath irReadersLibraryPath = FileUtils::makeSharedLibraryName(getInferenceEngineLibraryPath(), libraryName);
|
||||
|
||||
if (!FileUtils::fileExist(irReadersLibraryPath)) {
|
||||
@ -129,6 +133,79 @@ Parameter copyParameterValue(const Parameter & value) {
|
||||
|
||||
} // namespace
|
||||
|
||||
class Reader: public IReader {
|
||||
private:
|
||||
InferenceEngine::IReaderPtr ptr;
|
||||
std::once_flag readFlag;
|
||||
std::string name;
|
||||
std::string location;
|
||||
|
||||
InferenceEngine::IReaderPtr getReaderPtr() {
|
||||
std::call_once(readFlag, [&] () {
|
||||
FileUtils::FilePath libraryName = FileUtils::toFilePath(location);
|
||||
FileUtils::FilePath readersLibraryPath = FileUtils::makeSharedLibraryName(getInferenceEngineLibraryPath(), libraryName);
|
||||
|
||||
if (!FileUtils::fileExist(readersLibraryPath)) {
|
||||
THROW_IE_EXCEPTION << "Please, make sure that Inference Engine ONNX reader library "
|
||||
<< FileUtils::fromFilePath(::FileUtils::makeSharedLibraryName({}, libraryName)) << " is in "
|
||||
<< getIELibraryPath();
|
||||
}
|
||||
ptr = IReaderPtr(readersLibraryPath);
|
||||
});
|
||||
|
||||
return ptr;
|
||||
}
|
||||
|
||||
InferenceEngine::IReaderPtr getReaderPtr() const {
|
||||
return const_cast<Reader*>(this)->getReaderPtr();
|
||||
}
|
||||
|
||||
void Release() noexcept override {
|
||||
delete this;
|
||||
}
|
||||
|
||||
public:
|
||||
using Ptr = std::shared_ptr<Reader>;
|
||||
Reader(const std::string& name, const std::string location): name(name), location(location) {}
|
||||
bool supportModel(std::istream& model) const override {
|
||||
auto reader = getReaderPtr();
|
||||
return reader->supportModel(model);
|
||||
}
|
||||
CNNNetwork read(std::istream& model, const std::vector<IExtensionPtr>& exts) const override {
|
||||
auto reader = getReaderPtr();
|
||||
return reader->read(model, exts);
|
||||
}
|
||||
CNNNetwork read(std::istream& model, std::istream& weights, const std::vector<IExtensionPtr>& exts) const override {
|
||||
auto reader = getReaderPtr();
|
||||
return reader->read(model, weights, exts);
|
||||
}
|
||||
std::vector<std::string> getDataFileExtensions() const override {
|
||||
auto reader = getReaderPtr();
|
||||
return reader->getDataFileExtensions();
|
||||
}
|
||||
std::string getName() const {
|
||||
return name;
|
||||
}
|
||||
};
|
||||
|
||||
namespace {
|
||||
|
||||
// Extension to plugins creator
|
||||
std::multimap<std::string, Reader::Ptr> readers;
|
||||
|
||||
void registerReaders() {
|
||||
static std::mutex readerMutex;
|
||||
std::lock_guard<std::mutex> lock(readerMutex);
|
||||
// TODO: Read readers info from XML
|
||||
auto onnxReader = std::make_shared<Reader>("ONNX", std::string("inference_engine_onnx_reader") + std::string(IE_BUILD_POSTFIX));
|
||||
readers.emplace("onnx", onnxReader);
|
||||
readers.emplace("prototxt", onnxReader);
|
||||
auto irReader = std::make_shared<Reader>("IR", std::string("inference_engine_ir_reader") + std::string(IE_BUILD_POSTFIX));
|
||||
readers.emplace("xml", irReader);
|
||||
}
|
||||
|
||||
} // namespace
|
||||
|
||||
CNNNetReaderPtr CreateCNNNetReaderPtr() noexcept {
|
||||
auto loader = createCnnReaderLoader();
|
||||
return CNNNetReaderPtr(loader);
|
||||
@ -207,20 +284,6 @@ class Core::Impl : public ICore {
|
||||
std::vector<FileUtils::FilePath> listOfExtentions;
|
||||
};
|
||||
|
||||
/**
|
||||
* @brief Holds original blob in order to avoid situations
|
||||
* when original blob is allocated on stack
|
||||
*/
|
||||
class WeightsHolderBlob : public TBlob<uint8_t> {
|
||||
Blob::CPtr originBlob;
|
||||
|
||||
public:
|
||||
explicit WeightsHolderBlob(const Blob::CPtr& weights) :
|
||||
TBlob<uint8_t>(weights->getTensorDesc(),
|
||||
weights->cbuffer().as<uint8_t*>()),
|
||||
originBlob(weights) { }
|
||||
};
|
||||
|
||||
std::unordered_set<std::string> opsetNames;
|
||||
std::vector<IExtensionPtr> extensions;
|
||||
|
||||
@ -311,58 +374,57 @@ public:
|
||||
|
||||
CNNNetwork ReadNetwork(const std::string& modelPath, const std::string& binPath) const override {
|
||||
IE_PROFILING_AUTO_SCOPE(Core::ReadNetwork)
|
||||
IE_SUPPRESS_DEPRECATED_START
|
||||
ResponseDesc desc;
|
||||
CNNNetReaderPtr cnnReader(createCnnReaderLoader());
|
||||
StatusCode rt = cnnReader->ReadNetwork(modelPath.c_str(), &desc);
|
||||
if (rt != OK) THROW_IE_EXCEPTION << desc.msg;
|
||||
if (cnnReader->getVersion(&desc) >= 10) {
|
||||
std::lock_guard<std::mutex> lock(pluginsMutex);
|
||||
cnnReader->addExtensions(GetExtensions());
|
||||
}
|
||||
std::string bPath = binPath;
|
||||
if (bPath.empty()) {
|
||||
bPath = modelPath;
|
||||
auto pos = bPath.rfind('.');
|
||||
if (pos != std::string::npos) bPath = bPath.substr(0, pos);
|
||||
bPath += ".bin";
|
||||
|
||||
if (!FileUtils::fileExist(bPath)) bPath.clear();
|
||||
}
|
||||
std::ifstream modelStream(modelPath, std::ios::binary);
|
||||
if (!modelStream.is_open())
|
||||
THROW_IE_EXCEPTION << "Model file " << modelPath << " cannot be opened!";
|
||||
|
||||
if (!bPath.empty()) {
|
||||
rt = cnnReader->ReadWeights(bPath.c_str(), &desc);
|
||||
if (rt != OK) THROW_IE_EXCEPTION << desc.msg;
|
||||
} else {
|
||||
TBlob<uint8_t>::Ptr weights_ptr;
|
||||
rt = cnnReader->SetWeights(weights_ptr, &desc);
|
||||
if (rt != OK) THROW_IE_EXCEPTION << desc.msg;
|
||||
auto fileExt = modelPath.substr(modelPath.find_last_of(".") + 1);
|
||||
for (auto it = readers.lower_bound(fileExt); it != readers.upper_bound(fileExt); it++) {
|
||||
auto reader = it->second;
|
||||
if (reader->supportModel(modelStream)) {
|
||||
// Find weights
|
||||
std::string bPath = binPath;
|
||||
if (bPath.empty()) {
|
||||
auto pathWoExt = modelPath;
|
||||
auto pos = modelPath.rfind('.');
|
||||
if (pos != std::string::npos) pathWoExt = modelPath.substr(0, pos);
|
||||
for (const auto& ext : reader->getDataFileExtensions()) {
|
||||
bPath = pathWoExt + "." + ext;
|
||||
if (!FileUtils::fileExist(bPath)) {
|
||||
bPath.clear();
|
||||
} else {
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
if (!bPath.empty()) {
|
||||
std::ifstream binStream;
|
||||
binStream.open(bPath, std::ios::binary);
|
||||
if (!binStream.is_open())
|
||||
THROW_IE_EXCEPTION << "Weights file " << bPath << " cannot be opened!";
|
||||
return reader->read(modelStream, binStream, extensions);
|
||||
}
|
||||
return reader->read(modelStream, extensions);
|
||||
}
|
||||
}
|
||||
IE_SUPPRESS_DEPRECATED_END
|
||||
|
||||
return CNNNetwork(cnnReader);
|
||||
THROW_IE_EXCEPTION << "Unknown model format! Cannot read the model: " << modelPath;
|
||||
}
|
||||
|
||||
CNNNetwork ReadNetwork(const std::string& model, const Blob::CPtr& weights) const override {
|
||||
IE_PROFILING_AUTO_SCOPE(Core::ReadNetwork)
|
||||
IE_SUPPRESS_DEPRECATED_START
|
||||
ResponseDesc desc;
|
||||
CNNNetReaderPtr cnnReader(createCnnReaderLoader());
|
||||
StatusCode rt = cnnReader->ReadNetwork(model.data(), model.length(), &desc);
|
||||
if (rt != OK) THROW_IE_EXCEPTION << desc.msg;
|
||||
if (cnnReader->getVersion(&desc) >= 10) {
|
||||
std::lock_guard<std::mutex> lock(pluginsMutex);
|
||||
cnnReader->addExtensions(GetExtensions());
|
||||
}
|
||||
TBlob<uint8_t>::Ptr weights_ptr;
|
||||
if (weights) {
|
||||
weights_ptr = std::make_shared<WeightsHolderBlob>(weights);
|
||||
}
|
||||
rt = cnnReader->SetWeights(weights_ptr, &desc);
|
||||
if (rt != OK) THROW_IE_EXCEPTION << desc.msg;
|
||||
IE_SUPPRESS_DEPRECATED_END
|
||||
std::istringstream modelStream(model);
|
||||
details::BlobStream binStream(weights);
|
||||
|
||||
return CNNNetwork(cnnReader);
|
||||
for (auto it = readers.begin(); it != readers.end(); it++) {
|
||||
auto reader = it->second;
|
||||
if (reader->supportModel(modelStream)) {
|
||||
if (weights)
|
||||
return reader->read(modelStream, binStream, extensions);
|
||||
return reader->read(modelStream, extensions);
|
||||
}
|
||||
}
|
||||
THROW_IE_EXCEPTION << "Unknown model format! Cannot read the model from string!";
|
||||
}
|
||||
|
||||
ExecutableNetwork LoadNetwork(const CNNNetwork& network, const std::string& deviceName,
|
||||
@ -642,6 +704,7 @@ Core::Impl::Impl() {
|
||||
opsetNames.insert("opset1");
|
||||
opsetNames.insert("opset2");
|
||||
opsetNames.insert("opset3");
|
||||
registerReaders();
|
||||
}
|
||||
|
||||
Core::Impl::~Impl() {}
|
||||
|
@ -1,85 +0,0 @@
|
||||
// Copyright (C) 2018-2020 Intel Corporation
|
||||
// SPDX-License-Identifier: Apache-2.0
|
||||
//
|
||||
|
||||
#include <file_utils.h>
|
||||
#include <xml_parse_utils.h>
|
||||
|
||||
#include <ie_ir_reader.hpp>
|
||||
#include <memory>
|
||||
#include <ngraph/ngraph.hpp>
|
||||
#include <string>
|
||||
#include <vector>
|
||||
|
||||
#include "description_buffer.hpp"
|
||||
#include "ie_ir_parser.hpp"
|
||||
#include "ie_ngraph_utils.hpp"
|
||||
|
||||
using namespace InferenceEngine;
|
||||
|
||||
static size_t GetIRVersion(pugi::xml_node& root) {
|
||||
return XMLParseUtils::GetUIntAttr(root, "version", 0);
|
||||
}
|
||||
|
||||
std::shared_ptr<ngraph::Function> IRReader::read(const std::string& modelPath, const std::string& binPath) {
|
||||
std::ifstream modelFile(modelPath);
|
||||
if (!modelFile.is_open()) THROW_IE_EXCEPTION << "File " << modelPath << " cannot be openned!";
|
||||
|
||||
std::stringstream modelBuf;
|
||||
modelBuf << modelFile.rdbuf();
|
||||
|
||||
Blob::Ptr weights;
|
||||
std::string bPath = binPath;
|
||||
if (bPath.empty()) {
|
||||
bPath = modelPath;
|
||||
auto pos = bPath.rfind('.');
|
||||
if (pos != std::string::npos) bPath = bPath.substr(0, pos);
|
||||
bPath += ".bin";
|
||||
|
||||
if (!FileUtils::fileExist(bPath)) bPath.clear();
|
||||
}
|
||||
|
||||
if (!bPath.empty()) {
|
||||
int64_t fileSize = FileUtils::fileSize(bPath);
|
||||
|
||||
if (fileSize < 0)
|
||||
THROW_IE_EXCEPTION << "Filesize for: " << bPath << " - " << fileSize
|
||||
<< " < 0. Please, check weights file existence.";
|
||||
|
||||
size_t ulFileSize = static_cast<size_t>(fileSize);
|
||||
|
||||
weights = make_shared_blob<uint8_t>(TensorDesc(Precision::U8, {ulFileSize}, Layout::C));
|
||||
weights->allocate();
|
||||
FileUtils::readAllFile(bPath, weights->buffer(), ulFileSize);
|
||||
}
|
||||
|
||||
return read(modelBuf.str(), weights);
|
||||
}
|
||||
|
||||
std::shared_ptr<ngraph::Function> IRReader::read(const std::string& model, const Blob::CPtr& weights) {
|
||||
pugi::xml_document xmlDoc;
|
||||
pugi::xml_parse_result res = xmlDoc.load_buffer(model.data(), model.length());
|
||||
if (res.status != pugi::status_ok) {
|
||||
THROW_IE_EXCEPTION << res.description() << "at offset " << res.offset;
|
||||
}
|
||||
return readXml(xmlDoc, weights);
|
||||
}
|
||||
|
||||
std::shared_ptr<ngraph::Function> IRReader::readXml(const pugi::xml_document& xmlDoc, const Blob::CPtr& weights) {
|
||||
try {
|
||||
// check which version it is...
|
||||
pugi::xml_node root = xmlDoc.document_element();
|
||||
|
||||
auto version = GetIRVersion(root);
|
||||
IRParser parser(version, extensions);
|
||||
return parser.parse(root, weights);
|
||||
} catch (const std::string& err) {
|
||||
THROW_IE_EXCEPTION << err;
|
||||
} catch (const details::InferenceEngineException& e) {
|
||||
throw;
|
||||
} catch (const std::exception& e) {
|
||||
THROW_IE_EXCEPTION << e.what();
|
||||
} catch (...) {
|
||||
THROW_IE_EXCEPTION << "Unknown exception thrown";
|
||||
}
|
||||
}
|
@ -1,59 +0,0 @@
|
||||
// Copyright (C) 2018-2020 Intel Corporation
|
||||
// SPDX-License-Identifier: Apache-2.0
|
||||
//
|
||||
|
||||
#pragma once
|
||||
|
||||
#include <ie_api.h>
|
||||
#include <ie_blob.h>
|
||||
#include <ie_common.h>
|
||||
#include <ie_iextension.h>
|
||||
|
||||
#include <ie_icnn_network.hpp>
|
||||
#include <map>
|
||||
#include <memory>
|
||||
#include <string>
|
||||
#include <vector>
|
||||
|
||||
namespace pugi {
|
||||
class xml_node;
|
||||
class xml_document;
|
||||
} // namespace pugi
|
||||
|
||||
namespace ngraph {
|
||||
class Function;
|
||||
} // namespace ngraph
|
||||
|
||||
namespace InferenceEngine {
|
||||
|
||||
/**
|
||||
* @brief This class is the main interface to build and parse a network from a given IR
|
||||
*
|
||||
* All methods here do not throw exceptions and return a StatusCode and ResponseDesc object.
|
||||
* Alternatively, to use methods that throw exceptions, refer to the CNNNetReader wrapper class.
|
||||
*/
|
||||
class INFERENCE_ENGINE_API_CLASS(IRReader) {
|
||||
public:
|
||||
IRReader() = default;
|
||||
explicit IRReader(const std::vector<IExtensionPtr>& exts): extensions(exts) {}
|
||||
/**
|
||||
* @brief Reads IR xml and bin files
|
||||
* @param modelPath path to IR file
|
||||
* @param binPath path to bin file
|
||||
* @return shared pointer to nGraph function
|
||||
*/
|
||||
std::shared_ptr<ngraph::Function> read(const std::string& modelPath, const std::string& binPath = "");
|
||||
/**
|
||||
* @brief Reads IR xml and bin (with the same name) files
|
||||
* @param model string with IR
|
||||
* @param weights shared pointer to constant blob with weights
|
||||
* @return shared pointer to nGraph function
|
||||
*/
|
||||
std::shared_ptr<ngraph::Function> read(const std::string& model, const Blob::CPtr& weights);
|
||||
|
||||
private:
|
||||
std::shared_ptr<ngraph::Function> readXml(const pugi::xml_document& xmlDoc, const Blob::CPtr& weights);
|
||||
std::vector<IExtensionPtr> extensions;
|
||||
};
|
||||
|
||||
} // namespace InferenceEngine
|
@ -189,9 +189,10 @@ inline void UnorderedDFS(std::unordered_set<CNNLayer*>& visited, const Inference
|
||||
}
|
||||
|
||||
// visit parents
|
||||
for (auto&& input : cnnLayer->insData) {
|
||||
for (size_t i = 0; i < cnnLayer->insData.size(); i++) {
|
||||
auto& input = cnnLayer->insData[i];
|
||||
if (!input.lock()) {
|
||||
THROW_IE_EXCEPTION << "Data inserted into layer " << cnnLayer->name << " is nullptr";
|
||||
THROW_IE_EXCEPTION << "Data " << i << " inserted into layer " << cnnLayer->name << " is nullptr";
|
||||
} else {
|
||||
auto creatorLayer = input.lock()->getCreatorLayer().lock();
|
||||
if (creatorLayer) {
|
||||
|
@ -61,7 +61,7 @@ public:
|
||||
CNNLayer::Ptr createLayer(const std::shared_ptr<ngraph::Node>& layer) const override;
|
||||
|
||||
bool canCreate(const std::shared_ptr<ngraph::Node>& node) const override {
|
||||
auto castedPtr = std::dynamic_pointer_cast<NGT>(node);
|
||||
auto castedPtr = ngraph::as_type_ptr<NGT>(node);
|
||||
return castedPtr != nullptr;
|
||||
}
|
||||
|
||||
|
@ -37,6 +37,8 @@ public:
|
||||
}
|
||||
}
|
||||
explicit DisableReshape(const std::shared_ptr<const ngraph::Function>& graph) {
|
||||
IE_ASSERT(graph);
|
||||
|
||||
for (auto& op : graph->get_ops()) {
|
||||
addOp(op);
|
||||
}
|
||||
|
21
inference-engine/src/readers/CMakeLists.txt
Normal file
21
inference-engine/src/readers/CMakeLists.txt
Normal file
@ -0,0 +1,21 @@
|
||||
# Copyright (C) 2020 Intel Corporation
|
||||
# SPDX-License-Identifier: Apache-2.0
|
||||
#
|
||||
set(TARGET_NAME inference_engine_reader_api)
|
||||
|
||||
# Reader API library
|
||||
add_library(${TARGET_NAME} INTERFACE)
|
||||
|
||||
target_include_directories(${TARGET_NAME} INTERFACE
|
||||
"${CMAKE_CURRENT_SOURCE_DIR}/reader_api"
|
||||
$<TARGET_PROPERTY:inference_engine,INTERFACE_INCLUDE_DIRECTORIES>)
|
||||
|
||||
file(GLOB_RECURSE reader_api_src "${CMAKE_CURRENT_SOURCE_DIR}/reader_api/*.hpp"
|
||||
"${CMAKE_CURRENT_SOURCE_DIR}/reader_api/*.h")
|
||||
|
||||
add_cpplint_target(${TARGET_NAME}_cpplint FOR_SOURCES ${reader_api_src})
|
||||
|
||||
add_subdirectory(ir_reader)
|
||||
if(NGRAPH_ONNX_IMPORT_ENABLE)
|
||||
add_subdirectory(onnx_reader)
|
||||
endif()
|
@ -2,7 +2,7 @@
|
||||
# SPDX-License-Identifier: Apache-2.0
|
||||
#
|
||||
|
||||
set(TARGET_NAME "inference_engine_ir_readers")
|
||||
set(TARGET_NAME "inference_engine_ir_reader")
|
||||
|
||||
if(ENABLE_LTO)
|
||||
ie_enable_lto()
|
||||
@ -29,7 +29,7 @@ target_compile_definitions(${TARGET_NAME} PRIVATE IMPLEMENT_INFERENCE_ENGINE_API
|
||||
target_include_directories(${TARGET_NAME} PUBLIC ${PUBLIC_HEADERS_DIR})
|
||||
target_include_directories(${TARGET_NAME} PRIVATE "${IE_MAIN_SOURCE_DIR}/src/inference_engine")
|
||||
|
||||
target_link_libraries(${TARGET_NAME} PUBLIC inference_engine_plugin_api ${NGRAPH_LIBRARIES} inference_engine)
|
||||
target_link_libraries(${TARGET_NAME} PUBLIC inference_engine_reader_api inference_engine_plugin_api ${NGRAPH_LIBRARIES} inference_engine)
|
||||
target_link_libraries(${TARGET_NAME} PRIVATE pugixml)
|
||||
|
||||
# code style
|
@ -4,6 +4,7 @@
|
||||
|
||||
#include <file_utils.h>
|
||||
#include <ie_cnn_net_reader_impl.h>
|
||||
#include <ie_blob_stream.hpp>
|
||||
|
||||
#include <fstream>
|
||||
#include <map>
|
||||
@ -16,7 +17,7 @@
|
||||
#include "cnn_network_ngraph_impl.hpp"
|
||||
#include "details/os/os_filesystem.hpp"
|
||||
#include "ie_format_parser.h"
|
||||
#include "ie_ir_reader.hpp"
|
||||
#include "ie_ir_parser.hpp"
|
||||
#include "ie_profiling.hpp"
|
||||
#include "ie_plugin.hpp"
|
||||
#include "parsers.h"
|
||||
@ -39,10 +40,10 @@ StatusCode CNNNetReaderImpl::SetWeights(const TBlob<uint8_t>::Ptr& weights, Resp
|
||||
try {
|
||||
if (_version == 10) {
|
||||
// It's time to perform actual reading of V10 network and instantiate CNNNetworkNGraphImpl
|
||||
IRReader v10Reader(extensions);
|
||||
std::stringstream model;
|
||||
xmlDoc->save(model);
|
||||
network = std::make_shared<CNNNetworkNGraphImpl>(v10Reader.read(model.str(), weights));
|
||||
IRParser parser(_version, extensions);
|
||||
pugi::xml_node root = xmlDoc->document_element();
|
||||
details::BlobStream blobStream(weights);
|
||||
network = parser.parse(root, blobStream);
|
||||
} else if (weights) {
|
||||
_parser->SetWeights(weights);
|
||||
}
|
||||
@ -122,13 +123,9 @@ StatusCode CNNNetReaderImpl::ReadNetwork(const char* filepath, ResponseDesc* res
|
||||
return OK;
|
||||
}
|
||||
|
||||
StatusCode CNNNetReaderImpl::ReadNetwork() {
|
||||
description.clear();
|
||||
|
||||
StatusCode CNNNetReaderImpl::ReadNetwork(const pugi::xml_node& const_root, ResponseDesc * desc) {
|
||||
try {
|
||||
// check which version it is...
|
||||
pugi::xml_node root = xmlDoc->document_element();
|
||||
|
||||
pugi::xml_node root = *const_cast<pugi::xml_node*>(&const_root);
|
||||
_version = GetFileVersion(root);
|
||||
if (_version < 2) THROW_IE_EXCEPTION << "deprecated IR version: " << _version;
|
||||
if (_version == 10) {
|
||||
@ -147,13 +144,36 @@ StatusCode CNNNetReaderImpl::ReadNetwork() {
|
||||
THROW_IE_EXCEPTION << "cannot parse future versions: " << _version;
|
||||
}
|
||||
} catch (const std::string& err) {
|
||||
description = err;
|
||||
parseSuccess = false;
|
||||
return GENERAL_ERROR;
|
||||
return DescriptionBuffer(desc) << err;
|
||||
} catch (const InferenceEngineException& e) {
|
||||
description = e.what();
|
||||
parseSuccess = false;
|
||||
return GENERAL_ERROR;
|
||||
return DescriptionBuffer(desc) << e.what();
|
||||
} catch (const std::exception& e) {
|
||||
description = e.what();
|
||||
parseSuccess = false;
|
||||
return DescriptionBuffer(desc) << e.what();
|
||||
} catch (...) {
|
||||
parseSuccess = false;
|
||||
return DescriptionBuffer(UNEXPECTED, desc) << "Unknown exception thrown";
|
||||
}
|
||||
|
||||
return OK;
|
||||
}
|
||||
|
||||
StatusCode CNNNetReaderImpl::ReadNetwork() {
|
||||
description.clear();
|
||||
|
||||
try {
|
||||
// check which version it is...
|
||||
pugi::xml_node root = xmlDoc->document_element();
|
||||
|
||||
ResponseDesc resp;
|
||||
StatusCode ret = ReadNetwork(root, &resp);
|
||||
if (ret != OK)
|
||||
description = resp.msg;
|
||||
return ret;
|
||||
} catch (const std::exception& e) {
|
||||
description = e.what();
|
||||
parseSuccess = false;
|
@ -44,6 +44,8 @@ public:
|
||||
|
||||
StatusCode ReadNetwork(const void* model, size_t size, ResponseDesc* resp) noexcept override;
|
||||
|
||||
StatusCode ReadNetwork(const pugi::xml_node& root, ResponseDesc* resp);
|
||||
|
||||
StatusCode SetWeights(const TBlob<uint8_t>::Ptr& weights, ResponseDesc* resp) noexcept override;
|
||||
|
||||
StatusCode ReadWeights(const char* filepath, ResponseDesc* resp) noexcept override;
|
||||
@ -53,6 +55,10 @@ public:
|
||||
return network.get();
|
||||
}
|
||||
|
||||
std::shared_ptr<ICNNNetwork> getNetwork() {
|
||||
return network;
|
||||
}
|
||||
|
||||
bool isParseSuccess(ResponseDesc* resp) noexcept override {
|
||||
return parseSuccess;
|
||||
}
|
File diff suppressed because it is too large
Load Diff
@ -27,24 +27,30 @@ class IParser {
|
||||
public:
|
||||
using Ptr = std::shared_ptr<IParser>;
|
||||
virtual ~IParser() = default;
|
||||
virtual std::shared_ptr<ngraph::Function> parse(const pugi::xml_node& root, const Blob::CPtr& weights) = 0;
|
||||
virtual std::shared_ptr<ICNNNetwork> parse(const pugi::xml_node& root, std::istream& binStream) = 0;
|
||||
};
|
||||
|
||||
class IRParser {
|
||||
public:
|
||||
explicit IRParser(size_t version);
|
||||
IRParser(size_t version, const std::vector<InferenceEngine::IExtensionPtr>& exts);
|
||||
std::shared_ptr<ngraph::Function> parse(const pugi::xml_node& root, const Blob::CPtr& weights);
|
||||
std::shared_ptr<ICNNNetwork> parse(const pugi::xml_node& root, std::istream& binStream);
|
||||
virtual ~IRParser() = default;
|
||||
|
||||
private:
|
||||
IParser::Ptr parser;
|
||||
};
|
||||
|
||||
class CNNParser : public IParser {
|
||||
public:
|
||||
CNNParser() = default;
|
||||
std::shared_ptr<ICNNNetwork> parse(const pugi::xml_node& root, std::istream& binStream) override;
|
||||
};
|
||||
|
||||
class V10Parser : public IParser {
|
||||
public:
|
||||
explicit V10Parser(const std::vector<IExtensionPtr>& exts);
|
||||
std::shared_ptr<ngraph::Function> parse(const pugi::xml_node& root, const Blob::CPtr& weights) override;
|
||||
std::shared_ptr<ICNNNetwork> parse(const pugi::xml_node& root, std::istream& binStream) override;
|
||||
|
||||
private:
|
||||
std::map<std::string, ngraph::OpSet> opsets;
|
||||
@ -136,7 +142,7 @@ private:
|
||||
public:
|
||||
virtual ~LayerBaseCreator() {}
|
||||
virtual std::shared_ptr<ngraph::Node> createLayer(const ngraph::OutputVector& inputs,
|
||||
const pugi::xml_node& node, const Blob::CPtr& weights,
|
||||
const pugi::xml_node& node, std::istream& binStream,
|
||||
const GenericLayerParams& layerParsePrms) = 0;
|
||||
|
||||
bool shouldCreate(const std::string& nodeType) const;
|
||||
@ -148,7 +154,7 @@ private:
|
||||
public:
|
||||
explicit LayerCreator(const std::string& type): LayerBaseCreator(type) {}
|
||||
std::shared_ptr<ngraph::Node> createLayer(const ngraph::OutputVector& inputs, const pugi::xml_node& node,
|
||||
const Blob::CPtr& weights,
|
||||
std::istream& binStream,
|
||||
const GenericLayerParams& layerParsePrms) override;
|
||||
ngraph::NodeTypeInfo getNodeType() const override {
|
||||
return T::type_info;
|
||||
@ -156,7 +162,7 @@ private:
|
||||
};
|
||||
|
||||
std::shared_ptr<ngraph::Node> createNode(const ngraph::OutputVector& inputs, const pugi::xml_node& node,
|
||||
const Blob::CPtr& weights, const GenericLayerParams& params);
|
||||
std::istream& binStream, const GenericLayerParams& params);
|
||||
|
||||
GenericLayerParams parseGenericParams(const pugi::xml_node& node);
|
||||
|
61
inference-engine/src/readers/ir_reader/ie_ir_reader.cpp
Normal file
61
inference-engine/src/readers/ir_reader/ie_ir_reader.cpp
Normal file
@ -0,0 +1,61 @@
|
||||
// Copyright (C) 2018-2020 Intel Corporation
|
||||
// SPDX-License-Identifier: Apache-2.0
|
||||
//
|
||||
|
||||
#include <file_utils.h>
|
||||
#include <xml_parse_utils.h>
|
||||
|
||||
#include <ie_ir_reader.hpp>
|
||||
#include <memory>
|
||||
#include <ngraph/ngraph.hpp>
|
||||
#include <string>
|
||||
#include <vector>
|
||||
#include <sstream>
|
||||
|
||||
#include "description_buffer.hpp"
|
||||
#include "ie_ir_parser.hpp"
|
||||
#include "ie_ngraph_utils.hpp"
|
||||
|
||||
using namespace InferenceEngine;
|
||||
|
||||
static size_t GetIRVersion(pugi::xml_node& root) {
|
||||
return XMLParseUtils::GetUIntAttr(root, "version", 0);
|
||||
}
|
||||
|
||||
bool IRReader::supportModel(std::istream& model) const {
|
||||
model.seekg(0, model.beg);
|
||||
const int header_size = 128;
|
||||
std::string header(header_size, ' ');
|
||||
model.read(&header[0], header_size);
|
||||
// find '<net ' substring in the .xml file
|
||||
return (header.find("<net ") != std::string::npos) || (header.find("<Net ") != std::string::npos);
|
||||
}
|
||||
|
||||
CNNNetwork IRReader::read(std::istream& model, const std::vector<IExtensionPtr>& exts) const {
|
||||
std::istringstream emptyStream;
|
||||
return read(model, emptyStream, exts);
|
||||
}
|
||||
CNNNetwork IRReader::read(std::istream& model, std::istream& weights, const std::vector<IExtensionPtr>& exts) const {
|
||||
model.seekg(0, model.beg);
|
||||
weights.seekg(0, weights.beg);
|
||||
pugi::xml_document xmlDoc;
|
||||
pugi::xml_parse_result res = xmlDoc.load(model);
|
||||
if (res.status != pugi::status_ok) {
|
||||
THROW_IE_EXCEPTION << res.description() << "at offset " << res.offset;
|
||||
}
|
||||
pugi::xml_node root = xmlDoc.document_element();
|
||||
|
||||
auto version = GetIRVersion(root);
|
||||
IRParser parser(version, exts);
|
||||
return CNNNetwork(parser.parse(root, weights));
|
||||
}
|
||||
|
||||
INFERENCE_PLUGIN_API(StatusCode) InferenceEngine::CreateReader(IReader*& reader, ResponseDesc *resp) noexcept {
|
||||
try {
|
||||
reader = new IRReader();
|
||||
return OK;
|
||||
}
|
||||
catch (std::exception &ex) {
|
||||
return GENERAL_ERROR;
|
||||
}
|
||||
}
|
72
inference-engine/src/readers/ir_reader/ie_ir_reader.hpp
Normal file
72
inference-engine/src/readers/ir_reader/ie_ir_reader.hpp
Normal file
@ -0,0 +1,72 @@
|
||||
// Copyright (C) 2018-2020 Intel Corporation
|
||||
// SPDX-License-Identifier: Apache-2.0
|
||||
//
|
||||
|
||||
#pragma once
|
||||
|
||||
#include <ie_api.h>
|
||||
#include <ie_blob.h>
|
||||
#include <ie_common.h>
|
||||
#include <ie_iextension.h>
|
||||
|
||||
#include <ie_icnn_network.hpp>
|
||||
#include <ie_reader.hpp>
|
||||
#include <map>
|
||||
#include <memory>
|
||||
#include <string>
|
||||
#include <vector>
|
||||
|
||||
namespace pugi {
|
||||
class xml_node;
|
||||
class xml_document;
|
||||
} // namespace pugi
|
||||
|
||||
namespace ngraph {
|
||||
class Function;
|
||||
} // namespace ngraph
|
||||
|
||||
namespace InferenceEngine {
|
||||
|
||||
/**
|
||||
* @brief This class is the main interface to build and parse a network from a given IR
|
||||
*
|
||||
* All methods here do not throw exceptions and return a StatusCode and ResponseDesc object.
|
||||
* Alternatively, to use methods that throw exceptions, refer to the CNNNetReader wrapper class.
|
||||
*/
|
||||
class IRReader: public IReader {
|
||||
public:
|
||||
IRReader() = default;
|
||||
|
||||
void Release() noexcept override {
|
||||
delete this;
|
||||
}
|
||||
/**
|
||||
* @brief Checks that reader supports format of the model
|
||||
* @param model stream with model
|
||||
* @return true if format is supported
|
||||
*/
|
||||
bool supportModel(std::istream& model) const override;
|
||||
/**
|
||||
* @brief Reads the model to CNNNetwork
|
||||
* @param model stream with model
|
||||
* @param exts vector with extensions
|
||||
*
|
||||
* @return CNNNetwork
|
||||
*/
|
||||
CNNNetwork read(std::istream& model, const std::vector<IExtensionPtr>& exts) const override;
|
||||
/**
|
||||
* @brief Reads the model to CNNNetwork
|
||||
* @param model stream with model
|
||||
* @param weights stream with binary data
|
||||
* @param exts vector with extensions
|
||||
*
|
||||
* @return CNNNetwork
|
||||
*/
|
||||
CNNNetwork read(std::istream& model, std::istream& weights, const std::vector<IExtensionPtr>& exts) const override;
|
||||
|
||||
std::vector<std::string> getDataFileExtensions() const override {
|
||||
return {"bin"};
|
||||
}
|
||||
};
|
||||
|
||||
} // namespace InferenceEngine
|
@ -138,6 +138,18 @@ public:
|
||||
net->getOutputsInfo(out_info_map);
|
||||
|
||||
IE_ASSERT(in_info_map.size() == inputs.size()) << "TI body. There are unlinked inputs";
|
||||
for (auto& it : net->allLayers()) {
|
||||
auto layer = it.second;
|
||||
if (layer->type == "Input" || !layer->insData.empty())
|
||||
continue;
|
||||
if (!holder)
|
||||
holder = std::make_shared<Data>("const_holder", Precision::UNSPECIFIED);
|
||||
holder->getInputTo()[it.first] = layer;
|
||||
}
|
||||
}
|
||||
|
||||
DataPtr getHolder() {
|
||||
return holder;
|
||||
}
|
||||
|
||||
void setWeights(const WBlob& weights) {
|
||||
@ -155,6 +167,7 @@ private:
|
||||
pugi::xml_node& body;
|
||||
FormatParser parser;
|
||||
Precision default_precision;
|
||||
DataPtr holder;
|
||||
|
||||
PortMap inputs;
|
||||
PortMap outputs;
|
||||
@ -253,6 +266,8 @@ CNNLayer::Ptr TILayerCreator::CreateLayer(pugi::xml_node& node, LayerParseParame
|
||||
|
||||
auto res = std::make_shared<TensorIterator>(layerParsePrms.prms);
|
||||
res->body.inputs = inputs;
|
||||
if (auto holder = parser->getHolder())
|
||||
res->body.inputs.emplace_back(holder);
|
||||
res->body.outputs = outputs;
|
||||
res->input_port_map = in_ports_maping;
|
||||
res->output_port_map = out_ports_maping;
|
41
inference-engine/src/readers/onnx_reader/CMakeLists.txt
Normal file
41
inference-engine/src/readers/onnx_reader/CMakeLists.txt
Normal file
@ -0,0 +1,41 @@
|
||||
# Copyright (C) 2020 Intel Corporation
|
||||
# SPDX-License-Identifier: Apache-2.0
|
||||
#
|
||||
|
||||
set(TARGET_NAME "inference_engine_onnx_reader")
|
||||
|
||||
if(ENABLE_LTO)
|
||||
ie_enable_lto()
|
||||
endif()
|
||||
|
||||
set(PUBLIC_HEADERS_DIR "${CMAKE_CURRENT_SOURCE_DIR}/")
|
||||
|
||||
file(GLOB_RECURSE LIBRARY_SRC ${CMAKE_CURRENT_SOURCE_DIR}/*.cpp)
|
||||
file(GLOB_RECURSE PUBLIC_HEADERS ${PUBLIC_HEADERS_DIR}/*.h ${PUBLIC_HEADERS_DIR}/*.hpp)
|
||||
|
||||
# Create named folders for the sources within the .vcproj
|
||||
# Empty name lists them directly under the .vcproj
|
||||
|
||||
source_group("src" FILES ${LIBRARY_SRC})
|
||||
source_group("include" FILES ${PUBLIC_HEADERS})
|
||||
|
||||
# Create shared library
|
||||
|
||||
add_library(${TARGET_NAME} SHARED ${LIBRARY_SRC} ${PUBLIC_HEADERS})
|
||||
|
||||
target_include_directories(${TARGET_NAME} PUBLIC ${PUBLIC_HEADERS_DIR})
|
||||
|
||||
target_compile_definitions(${TARGET_NAME} PRIVATE IMPLEMENT_INFERENCE_ENGINE_PLUGIN)
|
||||
target_link_libraries(${TARGET_NAME} PRIVATE inference_engine_reader_api onnx_importer inference_engine)
|
||||
|
||||
# code style
|
||||
|
||||
add_cpplint_target(${TARGET_NAME}_cpplint FOR_TARGETS ${TARGET_NAME})
|
||||
|
||||
# install
|
||||
|
||||
install(TARGETS ${TARGET_NAME}
|
||||
RUNTIME DESTINATION ${IE_CPACK_LIBRARY_PATH} COMPONENT core
|
||||
ARCHIVE DESTINATION ${IE_CPACK_LIBRARY_PATH} COMPONENT core
|
||||
LIBRARY DESTINATION ${IE_CPACK_LIBRARY_PATH} COMPONENT core)
|
||||
|
36
inference-engine/src/readers/onnx_reader/ie_onnx_reader.cpp
Normal file
36
inference-engine/src/readers/onnx_reader/ie_onnx_reader.cpp
Normal file
@ -0,0 +1,36 @@
|
||||
// Copyright (C) 2020 Intel Corporation
|
||||
// SPDX-License-Identifier: Apache-2.0
|
||||
//
|
||||
|
||||
#include "ie_onnx_reader.hpp"
|
||||
#include <ie_api.h>
|
||||
#include <ngraph/frontend/onnx_import/onnx.hpp>
|
||||
|
||||
using namespace InferenceEngine;
|
||||
|
||||
bool ONNXReader::supportModel(std::istream& model) const {
|
||||
model.seekg(0, model.beg);
|
||||
const int header_size = 128;
|
||||
std::string header(header_size, ' ');
|
||||
model.read(&header[0], header_size);
|
||||
// find 'onnx' substring in the .onnx files
|
||||
// find 'ir_version' and 'graph' for prototxt
|
||||
// return (header.find("onnx") != std::string::npos) || (header.find("pytorch") != std::string::npos) ||
|
||||
// (header.find("ir_version") != std::string::npos && header.find("graph") != std::string::npos);
|
||||
return !((header.find("<net ") != std::string::npos) || (header.find("<Net ") != std::string::npos));
|
||||
}
|
||||
|
||||
CNNNetwork ONNXReader::read(std::istream& model, const std::vector<IExtensionPtr>& exts) const {
|
||||
model.seekg(0, model.beg);
|
||||
return CNNNetwork(ngraph::onnx_import::import_onnx_model(model));
|
||||
}
|
||||
|
||||
INFERENCE_PLUGIN_API(StatusCode) InferenceEngine::CreateReader(IReader*& reader, ResponseDesc *resp) noexcept {
|
||||
try {
|
||||
reader = new ONNXReader();
|
||||
return OK;
|
||||
}
|
||||
catch (std::exception &ex) {
|
||||
return GENERAL_ERROR;
|
||||
}
|
||||
}
|
48
inference-engine/src/readers/onnx_reader/ie_onnx_reader.hpp
Normal file
48
inference-engine/src/readers/onnx_reader/ie_onnx_reader.hpp
Normal file
@ -0,0 +1,48 @@
|
||||
// Copyright (C) 2020 Intel Corporation
|
||||
// SPDX-License-Identifier: Apache-2.0
|
||||
//
|
||||
|
||||
#pragma once
|
||||
|
||||
#include <ie_reader.hpp>
|
||||
|
||||
namespace InferenceEngine {
|
||||
|
||||
class ONNXReader: public IReader {
|
||||
public:
|
||||
void Release() noexcept override {
|
||||
delete this;
|
||||
}
|
||||
/**
|
||||
* @brief Checks that reader supports format of the model
|
||||
* @param model stream with model
|
||||
* @return true if format is supported
|
||||
*/
|
||||
bool supportModel(std::istream& model) const override;
|
||||
/**
|
||||
* @brief Reads the model to CNNNetwork
|
||||
* @param model stream with model
|
||||
* @param exts vector with extensions
|
||||
*
|
||||
* @return CNNNetwork
|
||||
*/
|
||||
CNNNetwork read(std::istream& model, const std::vector<IExtensionPtr>& exts) const override;
|
||||
/**
|
||||
* @brief Reads the model to CNNNetwork
|
||||
* @param model stream with model
|
||||
* @param weights stream with binary data
|
||||
* @param exts vector with extensions
|
||||
*
|
||||
* @return CNNNetwork
|
||||
*/
|
||||
CNNNetwork read(std::istream& model, std::istream& weights, const std::vector<IExtensionPtr>& exts) const override {
|
||||
THROW_IE_EXCEPTION << "ONNX reader cannot read model with weights!";
|
||||
}
|
||||
|
||||
std::vector<std::string> getDataFileExtensions() const override {
|
||||
return {};
|
||||
}
|
||||
};
|
||||
|
||||
} // namespace InferenceEngine
|
||||
|
35
inference-engine/src/readers/reader_api/ie_blob_stream.hpp
Normal file
35
inference-engine/src/readers/reader_api/ie_blob_stream.hpp
Normal file
@ -0,0 +1,35 @@
|
||||
// Copyright (C) 2018-2020 Intel Corporation
|
||||
// SPDX-License-Identifier: Apache-2.0
|
||||
//
|
||||
|
||||
#pragma once
|
||||
|
||||
#include <ie_blob.h>
|
||||
#include <istream>
|
||||
|
||||
namespace InferenceEngine {
|
||||
namespace details {
|
||||
|
||||
class INFERENCE_ENGINE_API_CLASS(BlobStream): public std::istream {
|
||||
private:
|
||||
class BlobBuffer: public std::streambuf {
|
||||
public:
|
||||
BlobBuffer(const Blob::CPtr& blob);
|
||||
~BlobBuffer() override;
|
||||
std::streampos seekpos(std::streampos sp, std::ios_base::openmode which) override;
|
||||
std::streampos seekoff(std::streamoff off, std::ios_base::seekdir way, std::ios_base::openmode which) override;
|
||||
};
|
||||
|
||||
BlobBuffer buffer;
|
||||
Blob::CPtr blob;
|
||||
|
||||
public:
|
||||
BlobStream(const Blob::CPtr& blob);
|
||||
~BlobStream() override;
|
||||
|
||||
Blob::CPtr getBlob();
|
||||
};
|
||||
|
||||
|
||||
} // namespace details
|
||||
} // namespace InferenceEngine
|
63
inference-engine/src/readers/reader_api/ie_reader.hpp
Normal file
63
inference-engine/src/readers/reader_api/ie_reader.hpp
Normal file
@ -0,0 +1,63 @@
|
||||
// Copyright (C) 2020 Intel Corporation
|
||||
// SPDX-License-Identifier: Apache-2.0
|
||||
//
|
||||
|
||||
#pragma once
|
||||
|
||||
#include <details/ie_irelease.hpp>
|
||||
#include <cpp/ie_cnn_network.h>
|
||||
#include <ie_iextension.h>
|
||||
#include <istream>
|
||||
#include <string>
|
||||
#include <vector>
|
||||
#include <ie_api.h>
|
||||
|
||||
namespace InferenceEngine {
|
||||
|
||||
/**
|
||||
* @brief IReader an abstract interface for Inference Engine readers
|
||||
*/
|
||||
class INFERENCE_ENGINE_API_CLASS(IReader): public details::IRelease {
|
||||
public:
|
||||
/**
|
||||
* @brief Checks that reader supports format of the model
|
||||
* @param model stream with model
|
||||
* @return true if format is supported
|
||||
*/
|
||||
virtual bool supportModel(std::istream& model) const = 0;
|
||||
/**
|
||||
* @brief Reads the model to CNNNetwork
|
||||
* @param model stream with model
|
||||
* @param exts vector with extensions
|
||||
*
|
||||
* @return CNNNetwork
|
||||
*/
|
||||
virtual CNNNetwork read(std::istream& model, const std::vector<IExtensionPtr>& exts) const = 0;
|
||||
/**
|
||||
* @brief Reads the model to CNNNetwork
|
||||
* @param model stream with model
|
||||
* @param weights stream with binary data
|
||||
* @param exts vector with extensions
|
||||
*
|
||||
* @return CNNNetwork
|
||||
*/
|
||||
virtual CNNNetwork read(std::istream& model, std::istream& weights, const std::vector<IExtensionPtr>& exts) const = 0;
|
||||
|
||||
/**
|
||||
* @brief Returns all supported extensions for data files
|
||||
*
|
||||
* @return vector of file extensions, for example the reader for OpenVINO IR returns {"bin"}
|
||||
*/
|
||||
virtual std::vector<std::string> getDataFileExtensions() const = 0;
|
||||
};
|
||||
|
||||
/**
|
||||
* @brief Creates the default instance of the reader
|
||||
*
|
||||
* @param reader Reader interface
|
||||
* @param resp Response description
|
||||
* @return Status code
|
||||
*/
|
||||
INFERENCE_PLUGIN_API(StatusCode) CreateReader(IReader*& reader, ResponseDesc* resp) noexcept;
|
||||
|
||||
} // namespace InferenceEngine
|
36
inference-engine/src/readers/reader_api/ie_reader_ptr.hpp
Normal file
36
inference-engine/src/readers/reader_api/ie_reader_ptr.hpp
Normal file
@ -0,0 +1,36 @@
|
||||
// Copyright (C) 2020 Intel Corporation
|
||||
// SPDX-License-Identifier: Apache-2.0
|
||||
//
|
||||
|
||||
#pragma once
|
||||
|
||||
#include <string>
|
||||
|
||||
#include <details/ie_so_pointer.hpp>
|
||||
#include "ie_reader.hpp"
|
||||
|
||||
namespace InferenceEngine {
|
||||
namespace details {
|
||||
|
||||
/**
|
||||
* @brief This class defines the name of the fabric for creating an IReader object in DLL
|
||||
*/
|
||||
template <>
|
||||
class SOCreatorTrait<IReader> {
|
||||
public:
|
||||
/**
|
||||
* @brief A name of the fabric for creating IReader object in DLL
|
||||
*/
|
||||
static constexpr auto name = "CreateReader";
|
||||
};
|
||||
|
||||
} // namespace details
|
||||
|
||||
/**
|
||||
* @brief A C++ helper to work with objects created by the plugin.
|
||||
*
|
||||
* Implements different interfaces.
|
||||
*/
|
||||
using IReaderPtr = InferenceEngine::details::SOPointer<IReader>;
|
||||
|
||||
} // namespace InferenceEngine
|
@ -207,7 +207,9 @@ protected:
|
||||
// This model contains layers with float attributes.
|
||||
// Conversion from string may be affected by locale.
|
||||
std::string model = isLSTM ? _model_LSTM : _model;
|
||||
auto net = core.ReadNetwork(model, InferenceEngine::Blob::CPtr());
|
||||
auto blob = make_shared_blob<uint8_t>(TensorDesc(Precision::U8, {3360}, Layout::C));
|
||||
blob->allocate();
|
||||
auto net = core.ReadNetwork(model, blob);
|
||||
|
||||
IE_SUPPRESS_DEPRECATED_START
|
||||
if (!isLSTM) {
|
||||
|
@ -107,7 +107,7 @@ TEST_P(NetReaderTest, ReadNetworkTwiceSeparately) {
|
||||
|
||||
#ifdef ENABLE_UNICODE_PATH_SUPPORT
|
||||
|
||||
TEST_P(NetReaderTest, ReadCorrectModelWithWeightsUnicodePath) {
|
||||
TEST_P(NetReaderTest, DISABLED_ReadCorrectModelWithWeightsUnicodePath) {
|
||||
GTEST_COUT << "params.modelPath: '" << _modelPath << "'" << std::endl;
|
||||
GTEST_COUT << "params.weightsPath: '" << _weightsPath << "'" << std::endl;
|
||||
GTEST_COUT << "params.netPrc: '" << _netPrc.name() << "'" << std::endl;
|
||||
|
@ -0,0 +1,136 @@
|
||||
// Copyright (C) 2018-2020 Intel Corporation
|
||||
// SPDX-License-Identifier: Apache-2.0
|
||||
//
|
||||
|
||||
#include <gtest/gtest.h>
|
||||
#include <set>
|
||||
#include <string>
|
||||
#include <fstream>
|
||||
|
||||
#include <ie_blob.h>
|
||||
#include <ie_core.hpp>
|
||||
#include <ngraph/ngraph.hpp>
|
||||
|
||||
TEST(ONNX_Reader_Tests, ImportBasicModelToCore) {
|
||||
std::string model = R"V0G0N(
|
||||
ir_version: 3
|
||||
producer_name: "nGraph ONNX Importer"
|
||||
graph {
|
||||
node {
|
||||
output: "B"
|
||||
op_type: "Constant"
|
||||
attribute {
|
||||
name: "value"
|
||||
t {
|
||||
dims: 2
|
||||
dims: 2
|
||||
data_type: 1
|
||||
float_data: 1
|
||||
float_data: 2
|
||||
float_data: 3
|
||||
float_data: 4
|
||||
name: "const_tensor"
|
||||
}
|
||||
type: TENSOR
|
||||
}
|
||||
}
|
||||
node {
|
||||
input: "A"
|
||||
input: "B"
|
||||
output: "X"
|
||||
name: "add_node1"
|
||||
op_type: "Add"
|
||||
}
|
||||
node {
|
||||
input: "X"
|
||||
input: "C"
|
||||
output: "Y"
|
||||
name: "add_node2"
|
||||
op_type: "Add"
|
||||
}
|
||||
name: "test_graph"
|
||||
initializer {
|
||||
dims: 2
|
||||
dims: 2
|
||||
data_type: 1
|
||||
name: "A"
|
||||
raw_data: "\000\000\200?\000\000\000@\000\000@@\000\000\200@"
|
||||
}
|
||||
input {
|
||||
name: "A"
|
||||
type {
|
||||
tensor_type {
|
||||
elem_type: 1
|
||||
shape {
|
||||
dim {
|
||||
dim_value: 2
|
||||
}
|
||||
dim {
|
||||
dim_value: 2
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
input {
|
||||
name: "C"
|
||||
type {
|
||||
tensor_type {
|
||||
elem_type: 1
|
||||
shape {
|
||||
dim {
|
||||
dim_value: 2
|
||||
}
|
||||
dim {
|
||||
dim_value: 2
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
output {
|
||||
name: "Y"
|
||||
type {
|
||||
tensor_type {
|
||||
elem_type: 1
|
||||
shape {
|
||||
dim {
|
||||
dim_value: 2
|
||||
}
|
||||
dim {
|
||||
dim_value: 2
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
opset_import {
|
||||
version: 4
|
||||
}
|
||||
)V0G0N";
|
||||
InferenceEngine::Core ie;
|
||||
InferenceEngine::Blob::CPtr weights;
|
||||
auto cnnNetwork = ie.ReadNetwork(model, weights);
|
||||
auto function = cnnNetwork.getFunction();
|
||||
|
||||
int count_additions = 0;
|
||||
int count_constants = 0;
|
||||
int count_parameters = 0;
|
||||
|
||||
for (auto op : function->get_ops()) {
|
||||
const auto op_type = std::string(op->get_type_name());
|
||||
count_additions += (op_type == "Add" ? 1 : 0);
|
||||
count_constants += (op_type == "Constant" ? 1 : 0);
|
||||
count_parameters += (op_type == "Parameter" ? 1 : 0);
|
||||
}
|
||||
|
||||
ASSERT_EQ(function->get_output_size(), 1);
|
||||
ASSERT_EQ(std::string(function->get_output_op(0)->get_type_name()), "Result");
|
||||
ASSERT_EQ(function->get_output_element_type(0), ngraph::element::f32);
|
||||
ASSERT_EQ(function->get_output_shape(0), ngraph::Shape({2, 2}));
|
||||
ASSERT_EQ(count_additions, 2);
|
||||
ASSERT_EQ(count_constants, 2);
|
||||
ASSERT_EQ(count_parameters, 1);
|
||||
}
|
||||
|
@ -85,7 +85,7 @@ inline void compare_preprocess(const InferenceEngine::PreProcessChannel & lhs, c
|
||||
inline void compare_preprocess_info(const InferenceEngine::PreProcessInfo & lhs, const InferenceEngine::PreProcessInfo & rhs) {
|
||||
ASSERT_EQ(lhs.getMeanVariant(), rhs.getMeanVariant());
|
||||
ASSERT_EQ(lhs.getNumberOfChannels(), rhs.getNumberOfChannels());
|
||||
for (int i = 0; i < lhs.getNumberOfChannels(); i++) {
|
||||
for (size_t i = 0; i < lhs.getNumberOfChannels(); i++) {
|
||||
ASSERT_PREPROCESS_CHANNEL_EQ(*lhs[i].get(), *rhs[i].get());
|
||||
}
|
||||
}
|
||||
@ -95,7 +95,7 @@ inline void compare_outputs_info(const InferenceEngine::OutputsDataMap & lhs, co
|
||||
auto i = lhs.begin();
|
||||
auto j = rhs.begin();
|
||||
|
||||
for (int k =0; k != lhs.size(); k++, i++, j++) {
|
||||
for (size_t k =0; k != lhs.size(); k++, i++, j++) {
|
||||
ASSERT_STREQ(i->first.c_str(), j->first.c_str());
|
||||
ASSERT_DATA_EQ(*i->second.get(), *j->second.get());
|
||||
}
|
||||
@ -106,7 +106,7 @@ inline void compare_inputs_info(const InferenceEngine::InputsDataMap & lhs, cons
|
||||
auto i = lhs.begin();
|
||||
auto j = rhs.begin();
|
||||
|
||||
for (int k = 0; k != lhs.size(); k++, i++, j++) {
|
||||
for (size_t k = 0; k != lhs.size(); k++, i++, j++) {
|
||||
ASSERT_STREQ(i->first.c_str(), j->first.c_str());
|
||||
ASSERT_DIMS_EQ(i->second->getTensorDesc().getDims(), j->second->getTensorDesc().getDims());
|
||||
ASSERT_PREPROCESS_INFO_EQ(i->second->getPreProcess(), j->second->getPreProcess());
|
||||
|
@ -264,9 +264,6 @@ namespace FuncTestUtils {
|
||||
auto back_edges_mp_old = get_port_map(ti_old->back_edges, ti_old->body.outputs, ti_old->body.inputs);
|
||||
compare_port_maps(back_edges_mp_new, back_edges_mp_old);
|
||||
|
||||
auto holder = ti_new->body.inputs.back();
|
||||
ti_new->body.inputs.pop_back();
|
||||
|
||||
// TI body comparison
|
||||
auto nodes_new = InferenceEngine::NetPass::TIBodySortTopologically(ti_new->body);
|
||||
auto nodes_old = InferenceEngine::NetPass::TIBodySortTopologically(ti_old->body);
|
||||
@ -303,8 +300,6 @@ namespace FuncTestUtils {
|
||||
auto old_outputs = get_map(ti_old->body.outputs);
|
||||
compareInfo<std::map<std::string, InferenceEngine::DataPtr>>(new_outputs, old_outputs,
|
||||
"Bodies of TensorIterator have different outputs!");
|
||||
|
||||
ti_new->body.inputs.push_back(holder);
|
||||
IE_SUPPRESS_DEPRECATED_END
|
||||
}
|
||||
|
||||
|
@ -8,7 +8,7 @@ list(APPEND EXPORT_DEPENDENCIES
|
||||
commonTestUtils_s
|
||||
inference_engine_s
|
||||
inference_engine_lp_transformations
|
||||
inference_engine_ir_readers
|
||||
inference_engine_ir_reader
|
||||
gmock)
|
||||
|
||||
addIeTarget(
|
||||
@ -23,4 +23,4 @@ addIeTarget(
|
||||
|
||||
target_link_libraries(${TARGET_NAME}
|
||||
PUBLIC
|
||||
${EXPORT_DEPENDENCIES})
|
||||
${EXPORT_DEPENDENCIES})
|
||||
|
@ -42,10 +42,7 @@ TEST_P(BehaviorPluginTestInferRequest, SetEmptyConfig) {
|
||||
// Load incorrect network to Plugin to get executable network
|
||||
TEST_P(BehaviorPluginTestInferRequest, canNotLoadNetworkToGetExeNetworkWithoutWeights) {
|
||||
InferenceEngine::Core core;
|
||||
CNNNetwork network = core.ReadNetwork(GetParam().model_xml_str, Blob::CPtr());
|
||||
|
||||
ASSERT_THROW(core.LoadNetwork(network, GetParam().device, GetParam().config),
|
||||
InferenceEngineException);
|
||||
ASSERT_THROW(core.ReadNetwork(GetParam().model_xml_str, Blob::CPtr()), InferenceEngineException);
|
||||
}
|
||||
|
||||
// Load correct network to Plugin to get executable network
|
||||
|
@ -349,7 +349,7 @@ protected:
|
||||
V2NetBuilder model = V2NetBuilder::buildNetworkWithOneInput(
|
||||
"Activation_Only", inout.inDims[0], p.precision)
|
||||
.addLayer("Activation", p.precision, ¶ms, inout);
|
||||
return model.finish();
|
||||
return model.finish(false);
|
||||
}
|
||||
};
|
||||
|
||||
|
@ -96,10 +96,7 @@ TEST_F(BehaviorPluginTest, smoke_llocateNullBlob) {
|
||||
// Load incorrect network to Plugin
|
||||
TEST_P(BehaviorPluginTest, canNotLoadNetworkWithoutWeights) {
|
||||
InferenceEngine::Core core;
|
||||
CNNNetwork network = core.ReadNetwork(GetParam().model_xml_str, Blob::CPtr());
|
||||
|
||||
IExecutableNetwork::Ptr exeNetwork;
|
||||
ASSERT_THROW(core.LoadNetwork(network, GetParam().device, {}), InferenceEngineException);
|
||||
ASSERT_THROW(core.ReadNetwork(GetParam().model_xml_str, Blob::CPtr()), InferenceEngineException);
|
||||
}
|
||||
|
||||
bool static compare_two_files_lexicographically(const std::string& name_a, const std::string& name_b) {
|
||||
|
@ -181,7 +181,12 @@ TEST_P(MYRIADWatchdog, watchDogIntervalDefault) {
|
||||
{
|
||||
|
||||
InferenceEngine::Core core;
|
||||
CNNNetwork network = core.ReadNetwork(GetParam().model_xml_str, Blob::CPtr());
|
||||
IE_SUPPRESS_DEPRECATED_START
|
||||
CNNNetReader reader;
|
||||
reader.ReadNetwork(GetParam().model_xml_str.data(), GetParam().model_xml_str.length());
|
||||
|
||||
CNNNetwork network = reader.getNetwork();
|
||||
IE_SUPPRESS_DEPRECATED_END
|
||||
ASSERT_GE(startup_devices.unbooted, 1);
|
||||
|
||||
ExecutableNetwork ret;
|
||||
@ -214,7 +219,12 @@ TEST_P(MYRIADWatchdog, canTurnoffWatchDogViaConfig) {
|
||||
auto ctime = Time::now();
|
||||
{
|
||||
InferenceEngine::Core core;
|
||||
CNNNetwork network = core.ReadNetwork(GetParam().model_xml_str, Blob::CPtr());
|
||||
IE_SUPPRESS_DEPRECATED_START
|
||||
CNNNetReader reader;
|
||||
reader.ReadNetwork(GetParam().model_xml_str.data(), GetParam().model_xml_str.length());
|
||||
|
||||
CNNNetwork network = reader.getNetwork();
|
||||
IE_SUPPRESS_DEPRECATED_END
|
||||
ASSERT_GE(startup_devices.unbooted, 1);
|
||||
|
||||
ExecutableNetwork ret;
|
||||
|
@ -7,7 +7,6 @@
|
||||
#include <tests_common_func.hpp>
|
||||
#include <memory>
|
||||
#include "xml_helper.hpp"
|
||||
#include <ie_ir_reader.hpp>
|
||||
#include <ie_core.hpp>
|
||||
|
||||
#define XBYAK_NO_OP_NAMES
|
||||
@ -54,12 +53,9 @@ struct ngraph_network_param {
|
||||
class smoke_NGraphNetworkTest : public TestsCommon, public TestsCommonFunc {
|
||||
protected:
|
||||
Blob::Ptr classifyV7(ngraph_network_param p, size_t batch_size = 1, float threshold = 0.005f) {
|
||||
IRReader reader;
|
||||
auto ngraph = reader.read(p.v7model());
|
||||
|
||||
auto network = CNNNetwork(ngraph);
|
||||
|
||||
Core ie;
|
||||
CNNNetwork network = ie.ReadNetwork(p.v7model());
|
||||
|
||||
ExecutableNetwork exeNetwork = ie.LoadNetwork(network, "CPU");
|
||||
InferRequest inferRequest = exeNetwork.CreateInferRequest();
|
||||
|
||||
|
@ -21,7 +21,7 @@ function(add_helpers target_name)
|
||||
|
||||
target_include_directories(${target_name} PUBLIC "${CMAKE_CURRENT_SOURCE_DIR}"
|
||||
"${IE_MAIN_SOURCE_DIR}/src/inference_engine"
|
||||
$<TARGET_PROPERTY:inference_engine_ir_readers,INTERFACE_INCLUDE_DIRECTORIES>
|
||||
$<TARGET_PROPERTY:inference_engine_ir_reader,INTERFACE_INCLUDE_DIRECTORIES>
|
||||
$<TARGET_PROPERTY:inference_engine_lp_transformations,INTERFACE_INCLUDE_DIRECTORIES>
|
||||
$<TARGET_PROPERTY:pugixml,INTERFACE_INCLUDE_DIRECTORIES>
|
||||
"${IE_MAIN_SOURCE_DIR}/src/vpu/"
|
||||
|
@ -148,8 +148,8 @@ target_link_libraries(${TARGET_NAME} PRIVATE
|
||||
|
||||
# dynamic libraries
|
||||
inference_engine_lp_transformations
|
||||
inference_engine_ir_readers
|
||||
inference_engine_transformations
|
||||
inference_engine_ir_reader
|
||||
${CMAKE_DL_LIBS})
|
||||
|
||||
if(TARGET libGNAStubs)
|
||||
|
@ -2291,7 +2291,9 @@ TEST_F(CNNNetReaderImplTest, ReadInThreads) {
|
||||
threads.push_back(std::thread([i, model]{
|
||||
InferenceEngine::Core core;
|
||||
/** Read network model **/
|
||||
CNNNetwork network = core.ReadNetwork(model, InferenceEngine::Blob::CPtr());
|
||||
auto blob = make_shared_blob<uint8_t>(TensorDesc(Precision::U8, {9728}, Layout::C));
|
||||
blob->allocate();
|
||||
CNNNetwork network = core.ReadNetwork(model, blob);
|
||||
// -----------------------------------------------------------------------------------------------------
|
||||
|
||||
// --------------------------- 3. Configure input & output ---------------------------------------------
|
||||
|
@ -1102,8 +1102,9 @@ protected:
|
||||
std::string model = getModel(p);
|
||||
|
||||
InferenceEngine::Core core;
|
||||
ASSERT_THROW(core.ReadNetwork(model, InferenceEngine::Blob::CPtr()),
|
||||
InferenceEngine::details::InferenceEngineException);
|
||||
// TODO: check InferenceEngine::details::InferenceEngineException when RTTI issue will be resolved
|
||||
ASSERT_THROW(core.ReadNetwork(model, InferenceEngine::Blob::CPtr()),
|
||||
std::exception);
|
||||
} catch (const InferenceEngine::details::InferenceEngineException &e) {
|
||||
FAIL() << e.what();
|
||||
}
|
||||
|
@ -278,6 +278,7 @@ TEST(CNNSpecificGraphCopyTests, copyPreprocess) {
|
||||
|
||||
InferenceEngine::Core core;
|
||||
InferenceEngine::CNNNetwork network;
|
||||
|
||||
ASSERT_NO_THROW(network = core.ReadNetwork(SINGLE_LAYER_MODEL, InferenceEngine::Blob::CPtr()));
|
||||
|
||||
//copy the network
|
||||
@ -330,7 +331,7 @@ TEST(CNNSpecificGraphCopyTests, copyNetworkWithDeconvolution) {
|
||||
<dim>4</dim>
|
||||
</port>
|
||||
</output>
|
||||
<weights offset="5517824" size="12288"/>
|
||||
<weights offset="0" size="12288"/>
|
||||
</layer>
|
||||
</layers>
|
||||
<edges>
|
||||
@ -341,7 +342,9 @@ TEST(CNNSpecificGraphCopyTests, copyNetworkWithDeconvolution) {
|
||||
|
||||
InferenceEngine::Core core;
|
||||
InferenceEngine::CNNNetwork network;
|
||||
ASSERT_NO_THROW(network = core.ReadNetwork(SINGLE_LAYER_MODEL, InferenceEngine::Blob::CPtr()));
|
||||
auto blob = make_shared_blob<uint8_t>(TensorDesc(Precision::U8, {12288}, Layout::C));
|
||||
blob->allocate();
|
||||
ASSERT_NO_THROW(network = core.ReadNetwork(SINGLE_LAYER_MODEL, blob));
|
||||
|
||||
// copy the network
|
||||
struct EmptyStruct {};
|
||||
|
@ -87,7 +87,7 @@ class NormalizationConvConvEltwiseTests: public TestsCommon,
|
||||
|
||||
std::string getModel(conv_conv_eltwise_params p) {
|
||||
std::string model = layers_t;
|
||||
|
||||
|
||||
std::string s_dims;
|
||||
for (auto& dim : p.in) {
|
||||
s_dims += "\n <dim>";
|
||||
@ -132,7 +132,10 @@ protected:
|
||||
|
||||
InferenceEngine::Core ie;
|
||||
InferenceEngine::CNNNetwork network;
|
||||
ASSERT_NO_THROW(network = ie.ReadNetwork(model, InferenceEngine::Blob::CPtr()));
|
||||
auto blob = InferenceEngine::make_shared_blob<uint8_t>(InferenceEngine::TensorDesc(InferenceEngine::Precision::U8,
|
||||
{9}, InferenceEngine::Layout::C));
|
||||
blob->allocate();
|
||||
ASSERT_NO_THROW(network = ie.ReadNetwork(model, blob));
|
||||
|
||||
int maxSign = 0x7F;
|
||||
int maxUnsign = 0xFF;
|
||||
@ -156,7 +159,7 @@ TEST_P(NormalizationConvConvEltwiseTests, TestsConvConvEltwise) {}
|
||||
INSTANTIATE_TEST_CASE_P(
|
||||
TestsConvConvEltwise, NormalizationConvConvEltwiseTests,
|
||||
::testing::Values(
|
||||
conv_conv_eltwise_params{{1, 16, 4, 4},
|
||||
conv_conv_eltwise_params{{1, 16, 4, 4},
|
||||
{ {1, 1}, {1, 1}, {0, 0}, {0, 0}, {1, 1}, "", 1, 32, true },
|
||||
{"sum", {}} },
|
||||
conv_conv_eltwise_params{{1, 16, 4, 4, 4},
|
||||
|
Loading…
Reference in New Issue
Block a user