Removal of old offline transformations from python

This commit is contained in:
tomdol 2021-10-14 18:19:29 +02:00
parent acb040bf0d
commit f9a0551ead
7 changed files with 0 additions and 270 deletions

View File

@ -66,7 +66,6 @@ endfunction()
set (PYTHON_BRIDGE_SRC_ROOT ${CMAKE_CURRENT_SOURCE_DIR}) set (PYTHON_BRIDGE_SRC_ROOT ${CMAKE_CURRENT_SOURCE_DIR})
add_subdirectory (src/openvino/inference_engine) add_subdirectory (src/openvino/inference_engine)
add_subdirectory (src/openvino/offline_transformations)
if (ENABLE_TESTS) if (ENABLE_TESTS)
add_subdirectory(src/openvino/test_utils) add_subdirectory(src/openvino/test_utils)

View File

@ -1,67 +0,0 @@
# Copyright (C) 2018-2021 Intel Corporation
# SPDX-License-Identifier: Apache-2.0
#
set(TARGET_NAME "offline_transformations_api")
set(CMAKE_LIBRARY_OUTPUT_DIRECTORY ${PYTHON_BRIDGE_OUTPUT_DIRECTORY}/offline_transformations)
set(CMAKE_ARCHIVE_OUTPUT_DIRECTORY ${PYTHON_BRIDGE_OUTPUT_DIRECTORY}/offline_transformations)
set(CMAKE_COMPILE_PDB_OUTPUT_DIRECTORY ${PYTHON_BRIDGE_OUTPUT_DIRECTORY}/offline_transformations)
set(CMAKE_PDB_OUTPUT_DIRECTORY ${PYTHON_BRIDGE_OUTPUT_DIRECTORY}/offline_transformations)
set(SOURCES ${CMAKE_CURRENT_SOURCE_DIR}/offline_transformations_api_impl_defs.pxd
${CMAKE_CURRENT_SOURCE_DIR}/offline_transformations_api.pyx
${CMAKE_CURRENT_SOURCE_DIR}/offline_transformations_api_impl.hpp
${CMAKE_CURRENT_SOURCE_DIR}/offline_transformations_api_impl.cpp)
set_source_files_properties(${CMAKE_CURRENT_SOURCE_DIR}/offline_transformations_api.pyx
PROPERTIES CYTHON_IS_CXX ON)
# create target
cython_add_module(${TARGET_NAME} ${SOURCES})
add_dependencies(${TARGET_NAME} ie_api)
ov_python_disable_intel_warnings(${TARGET_NAME})
if(COMMAND ie_add_vs_version_file)
ie_add_vs_version_file(NAME ${TARGET_NAME}
FILEDESCRIPTION "Offline Transformatoins Python library")
endif()
if(TARGET offline_transformations)
list(APPEND link_libraries offline_transformations)
else()
list(APPEND link_libraries IE::offline_transformations)
endif()
target_include_directories(${TARGET_NAME} SYSTEM PRIVATE "${CMAKE_CURRENT_SOURCE_DIR}/../inference_engine")
target_include_directories(${TARGET_NAME} PRIVATE "${CMAKE_CURRENT_SOURCE_DIR}")
target_link_libraries(${TARGET_NAME} PRIVATE openvino::runtime ${link_libraries})
# Compatibility with python 2.7 which has deprecated "register" specifier
if(CMAKE_CXX_COMPILER_ID STREQUAL "Clang")
target_compile_options(${TARGET_NAME} PRIVATE "-Wno-error=register")
endif()
add_clang_format_target(${TARGET_NAME}_clang FOR_TARGETS ${TARGET_NAME}
EXCLUDE_PATTERNS ".*\\.cxx;.*\\.pxd;.*\\.pyx")
# perform copy
add_custom_command(TARGET ${TARGET_NAME}
POST_BUILD
COMMAND ${CMAKE_COMMAND} -E copy ${PYTHON_BRIDGE_SRC_ROOT}/src/openvino/offline_transformations/__init__.py ${CMAKE_LIBRARY_OUTPUT_DIRECTORY}/__init__.py
)
# install
# TODO: use ${PYTHON_VERSION}_dev component below
# ie_cpack_add_component(${PYTHON_VERSION}_dev DEPENDS ${PYTHON_COMPONENT})
install(TARGETS ${TARGET_NAME}
RUNTIME DESTINATION ${PYTHON_BRIDGE_CPACK_PATH}/${PYTHON_VERSION}/openvino/offline_transformations COMPONENT ${PYTHON_COMPONENT}
LIBRARY DESTINATION ${PYTHON_BRIDGE_CPACK_PATH}/${PYTHON_VERSION}/openvino/offline_transformations COMPONENT ${PYTHON_COMPONENT})
install(PROGRAMS __init__.py
DESTINATION ${PYTHON_BRIDGE_CPACK_PATH}/${PYTHON_VERSION}/openvino/offline_transformations
COMPONENT ${PYTHON_COMPONENT})

View File

@ -1,32 +0,0 @@
# -*- coding: utf-8 -*-
# Copyright (C) 2018-2021 Intel Corporation
# SPDX-License-Identifier: Apache-2.0
import os
import sys
if sys.platform == 'win32':
# Installer, yum, pip installs openvino dlls to the different directories
# and those paths need to be visible to the openvino modules
#
# If you're using a custom installation of openvino,
# add the location of openvino dlls to your system PATH.
#
# looking for the libs in the pip installation path by default.
openvino_libs = [os.path.join(os.path.dirname(__file__), '..', '..', 'openvino', 'libs')]
# setupvars.bat script set all libs paths to OPENVINO_LIB_PATHS environment variable.
openvino_libs_installer = os.getenv('OPENVINO_LIB_PATHS')
if openvino_libs_installer:
openvino_libs.extend(openvino_libs_installer.split(';'))
for lib in openvino_libs:
lib_path = os.path.join(os.path.dirname(__file__), lib)
if os.path.isdir(lib_path):
# On Windows, with Python >= 3.8, DLLs are no longer imported from the PATH.
if (3, 8) <= sys.version_info:
os.add_dll_directory(os.path.abspath(lib_path))
else:
os.environ['PATH'] = os.path.abspath(lib_path) + ';' + os.environ['PATH']
from .offline_transformations_api import *
__all__ = ['ApplyMOCTransformations']

View File

@ -1,43 +0,0 @@
# Copyright (C) 2018-2021 Intel Corporation
# SPDX-License-Identifier: Apache-2.0
from .cimport offline_transformations_api_impl_defs as C
from ..inference_engine.ie_api cimport IENetwork
from libcpp cimport bool
from libcpp.string cimport string
from libcpp.map cimport map
from libc.stdint cimport int64_t
def ApplyMOCTransformations(IENetwork network, bool cf):
C.ApplyMOCTransformations(network.impl, cf)
def ApplyPOTTransformations(IENetwork network, string device):
C.ApplyPOTTransformations(network.impl, device)
def ApplyMakeStatefulTransformation(IENetwork network, param_res_names : dict):
cdef map[string, string] c_param_res_names
for param_name, res_name in param_res_names.items():
if type(param_name) != str or type(res_name) != str:
raise TypeError("Only string keys and values are allowed!")
c_param_res_names[param_name.encode()] = res_name.encode()
C.ApplyMakeStatefulTransformation(network.impl, c_param_res_names)
def ApplyLowLatencyTransformation(IENetwork network, bool use_const_initializer = True):
C.ApplyLowLatencyTransformation(network.impl, use_const_initializer)
def ApplyPruningTransformation(IENetwork network):
C.ApplyPruningTransformation(network.impl)
def GenerateMappingFile(IENetwork network, string path, bool extract_names):
C.GenerateMappingFile(network.impl, path, extract_names)
def CheckAPI():
C.CheckAPI()

View File

@ -1,74 +0,0 @@
// Copyright (C) 2018-2021 Intel Corporation
// SPDX-License-Identifier: Apache-2.0
//
#include "offline_transformations_api_impl.hpp"
#include <generate_mapping_file.hpp>
#include <ngraph/opsets/opset6.hpp>
#include <ngraph/pass/constant_folding.hpp>
#include <ngraph/pass/low_latency.hpp>
#include <ngraph/pass/manager.hpp>
#include <openvino/pass/make_stateful.hpp>
#include <pot_transformations.hpp>
#include <pruning.hpp>
#include <transformations/common_optimizations/moc_transformations.hpp>
#include <transformations/control_flow/unroll_tensor_iterator.hpp>
void InferenceEnginePython::ApplyMOCTransformations(InferenceEnginePython::IENetwork network, bool cf) {
ngraph::pass::Manager manager;
manager.register_pass<ngraph::pass::MOCTransformations>(cf);
manager.run_passes(network.actual->getFunction());
}
void InferenceEnginePython::ApplyPOTTransformations(InferenceEnginePython::IENetwork network, std::string device) {
ngraph::pass::Manager manager;
manager.register_pass<ngraph::pass::POTTransformations>(std::move(device));
manager.run_passes(network.actual->getFunction());
}
void InferenceEnginePython::ApplyLowLatencyTransformation(InferenceEnginePython::IENetwork network,
bool use_const_initializer) {
ngraph::pass::Manager manager;
manager.register_pass<ngraph::pass::LowLatency2>(use_const_initializer);
manager.run_passes(network.actual->getFunction());
}
void InferenceEnginePython::ApplyMakeStatefulTransformation(InferenceEnginePython::IENetwork network,
std::map<std::string, std::string>& param_res_names) {
ngraph::pass::Manager manager;
manager.register_pass<ov::pass::MakeStateful>(param_res_names);
manager.run_passes(network.actual->getFunction());
}
void InferenceEnginePython::ApplyPruningTransformation(InferenceEnginePython::IENetwork network) {
ngraph::pass::Manager manager;
manager.register_pass<ngraph::pass::Pruning>();
manager.run_passes(network.actual->getFunction());
}
void InferenceEnginePython::GenerateMappingFile(InferenceEnginePython::IENetwork network,
std::string path,
bool extract_names) {
ngraph::pass::Manager manager;
manager.register_pass<ngraph::pass::GenerateMappingFile>(path, extract_names);
manager.run_passes(network.actual->getFunction());
}
void InferenceEnginePython::CheckAPI() {
std::shared_ptr<ngraph::Function> f;
{
auto input = std::make_shared<ngraph::opset6::Parameter>(ngraph::element::f32, ngraph::Shape{1, 1000, 4});
auto reshape =
std::make_shared<ngraph::opset6::Reshape>(input, std::make_shared<ngraph::opset6::ShapeOf>(input), true);
f = std::make_shared<ngraph::Function>(ngraph::NodeVector{reshape}, ngraph::ParameterVector{input});
}
ngraph::pass::Manager m;
m.register_pass<ngraph::pass::ConstantFolding>();
m.run_passes(f);
assert(f->get_results().size() == 1);
auto reshape = f->get_result()->input_value(0).get_node_shared_ptr();
assert(std::dynamic_pointer_cast<ngraph::opset6::Parameter>(reshape->input_value(0).get_node_shared_ptr()));
assert(std::dynamic_pointer_cast<ngraph::opset6::Constant>(reshape->input_value(1).get_node_shared_ptr()));
}

View File

@ -1,30 +0,0 @@
// Copyright (C) 2018-2021 Intel Corporation
// SPDX-License-Identifier: Apache-2.0
//
#pragma once
#include <map>
#include <string>
#include "Python.h"
#include "ie_api_impl.hpp"
namespace InferenceEnginePython {
void ApplyMOCTransformations(InferenceEnginePython::IENetwork network, bool cf);
void ApplyPOTTransformations(InferenceEnginePython::IENetwork network, std::string device);
void ApplyLowLatencyTransformation(InferenceEnginePython::IENetwork network, bool use_const_initializer = true);
void ApplyMakeStatefulTransformation(InferenceEnginePython::IENetwork network,
std::map<std::string, std::string>& param_res_names);
void ApplyPruningTransformation(InferenceEnginePython::IENetwork network);
void GenerateMappingFile(InferenceEnginePython::IENetwork network, std::string path, bool extract_names);
void CheckAPI();
}; // namespace InferenceEnginePython

View File

@ -1,23 +0,0 @@
# Copyright (C) 2018-2021 Intel Corporation
# SPDX-License-Identifier: Apache-2.0
from libcpp cimport bool
from libcpp.string cimport string
from libcpp.map cimport map
from ..inference_engine.ie_api_impl_defs cimport IENetwork
cdef extern from "offline_transformations_api_impl.hpp" namespace "InferenceEnginePython":
cdef void ApplyMOCTransformations(IENetwork network, bool cf)
cdef void ApplyPOTTransformations(IENetwork network, string device)
cdef void ApplyLowLatencyTransformation(IENetwork network, bool use_const_initializer)
cdef void ApplyMakeStatefulTransformation(IENetwork network, map[string, string]& in_out_names)
cdef void ApplyPruningTransformation(IENetwork network)
cdef void GenerateMappingFile(IENetwork network, string path, bool extract_names)
cdef void CheckAPI()