Fixes according to new api
This commit is contained in:
parent
9ff7886d23
commit
fd171ee275
@ -47,7 +47,7 @@ protected:
|
||||
Config configuration;
|
||||
|
||||
std::shared_ptr<ov::Function> function, functionRefs = nullptr;
|
||||
std::map<std::string, ov::runtime::Tensor> inputs;
|
||||
std::map<std::shared_ptr<ov::Node>, ov::runtime::Tensor> inputs;
|
||||
std::vector<ngraph::PartialShape> inputDynamicShapes;
|
||||
std::vector<std::vector<ngraph::Shape>> targetStaticShapes;
|
||||
ElementType inType = ov::element::undefined, outType = ov::element::undefined;
|
||||
|
@ -155,11 +155,11 @@ void SubgraphBaseTest::compile_model() {
|
||||
|
||||
void SubgraphBaseTest::generate_inputs(const std::vector<ov::Shape>& targetInputStaticShapes) {
|
||||
inputs.clear();
|
||||
const auto& params = function->inputs();
|
||||
for (int i = 0; i < params.size(); ++i) {
|
||||
const auto& param = params[i];
|
||||
ov::runtime::Tensor tensor = ov::test::utils::create_and_fill_tensor(param.get_element_type(), targetInputStaticShapes[i]);
|
||||
inputs.insert({param.get_any_name(), tensor});
|
||||
const auto& funcInputs = function->inputs();
|
||||
for (int i = 0; i < funcInputs.size(); ++i) {
|
||||
const auto& funcInput = funcInputs[i];
|
||||
ov::runtime::Tensor tensor = ov::test::utils::create_and_fill_tensor(funcInput.get_element_type(), targetInputStaticShapes[i]);
|
||||
inputs.insert({funcInput.get_node_shared_ptr(), tensor});
|
||||
}
|
||||
}
|
||||
|
||||
@ -179,7 +179,7 @@ std::vector<ov::runtime::Tensor> SubgraphBaseTest::calculate_refs() {
|
||||
std::vector<ov::runtime::Tensor> SubgraphBaseTest::get_plugin_outputs() {
|
||||
auto outputs = std::vector<ov::runtime::Tensor>{};
|
||||
for (const auto& output : function->outputs()) {
|
||||
outputs.push_back(inferRequest.get_tensor(output.get_any_name()));
|
||||
outputs.push_back(inferRequest.get_tensor(output));
|
||||
}
|
||||
return outputs;
|
||||
}
|
||||
|
@ -44,32 +44,32 @@ std::string EltwiseLayerTest::getTestCaseName(const testing::TestParamInfo<Eltwi
|
||||
void EltwiseLayerTest::generate_inputs(const std::vector<ngraph::Shape>& targetInputStaticShapes) {
|
||||
inputs.clear();
|
||||
const auto opType = std::get<1>(GetParam());
|
||||
const auto& params = function->inputs();
|
||||
for (int i = 0; i < params.size(); ++i) {
|
||||
const auto& param = params[i];
|
||||
const auto& funcInputs = function->inputs();
|
||||
for (int i = 0; i < funcInputs.size(); ++i) {
|
||||
const auto& funcInput = funcInputs[i];
|
||||
ov::runtime::Tensor tensor;
|
||||
bool isReal = param.get_element_type().is_real();
|
||||
bool isReal = funcInput.get_element_type().is_real();
|
||||
switch (opType) {
|
||||
case ngraph::helpers::EltwiseTypes::POWER:
|
||||
case ngraph::helpers::EltwiseTypes::MOD:
|
||||
case ngraph::helpers::EltwiseTypes::FLOOR_MOD:
|
||||
tensor = isReal ?
|
||||
ov::test::utils::create_and_fill_tensor(param.get_element_type(), targetInputStaticShapes[i], 2, 2, 128) :
|
||||
ov::test::utils::create_and_fill_tensor(param.get_element_type(), targetInputStaticShapes[i], 4, 2);
|
||||
ov::test::utils::create_and_fill_tensor(funcInput.get_element_type(), targetInputStaticShapes[i], 2, 2, 128) :
|
||||
ov::test::utils::create_and_fill_tensor(funcInput.get_element_type(), targetInputStaticShapes[i], 4, 2);
|
||||
break;
|
||||
case ngraph::helpers::EltwiseTypes::DIVIDE:
|
||||
tensor = isReal ?
|
||||
ov::test::utils::create_and_fill_tensor(param.get_element_type(), targetInputStaticShapes[i], 2, 2, 128) :
|
||||
ov::test::utils::create_and_fill_tensor(param.get_element_type(), targetInputStaticShapes[i], 100, 101);
|
||||
ov::test::utils::create_and_fill_tensor(funcInput.get_element_type(), targetInputStaticShapes[i], 2, 2, 128) :
|
||||
ov::test::utils::create_and_fill_tensor(funcInput.get_element_type(), targetInputStaticShapes[i], 100, 101);
|
||||
break;
|
||||
case ngraph::helpers::EltwiseTypes::ERF:
|
||||
tensor = ov::test::utils::create_and_fill_tensor(param.get_element_type(), targetInputStaticShapes[i], 6, -3);
|
||||
tensor = ov::test::utils::create_and_fill_tensor(funcInput.get_element_type(), targetInputStaticShapes[i], 6, -3);
|
||||
break;
|
||||
default:
|
||||
tensor = ov::test::utils::create_and_fill_tensor(param.get_element_type(), targetInputStaticShapes[i]);
|
||||
tensor = ov::test::utils::create_and_fill_tensor(funcInput.get_element_type(), targetInputStaticShapes[i]);
|
||||
break;
|
||||
}
|
||||
inputs.insert({param.get_any_name(), tensor});
|
||||
inputs.insert({funcInput.get_node_shared_ptr(), tensor});
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -271,7 +271,7 @@ std::vector<std::pair<ngraph::element::Type, std::vector<std::uint8_t>>>
|
||||
|
||||
std::vector<ov::runtime::Tensor>
|
||||
interpretFunction(const std::shared_ptr<Function> &function,
|
||||
const std::map<std::string, ov::runtime::Tensor>& inputs);
|
||||
const std::map<std::shared_ptr<ov::Node>, ov::runtime::Tensor>& inputs);
|
||||
|
||||
//
|
||||
// This function compares two nGraph functions and requires them to have exactly one output
|
||||
|
@ -145,38 +145,41 @@ std::vector<std::pair<ngraph::element::Type, std::vector<std::uint8_t>>>
|
||||
}
|
||||
|
||||
std::vector<ov::runtime::Tensor> interpretFunction(const std::shared_ptr<Function> &function,
|
||||
const std::map<std::string, ov::runtime::Tensor>& inputs) {
|
||||
const std::map<std::shared_ptr<ov::Node>, ov::runtime::Tensor>& inputs) {
|
||||
runtime::Backend::set_backend_shared_library_search_directory("");
|
||||
auto backend = runtime::Backend::create("INTERPRETER");
|
||||
|
||||
const auto ¶meters = function->inputs();
|
||||
const auto ¶metersNumber = parameters.size();
|
||||
const auto &funcInputs = function->inputs();
|
||||
const auto &funcInputsNumber = funcInputs.size();
|
||||
const auto &inputsNumber = inputs.size();
|
||||
NGRAPH_CHECK(parametersNumber == inputsNumber,
|
||||
"Got function (", function->get_friendly_name(), ") with ", parametersNumber, " parameters, but ",
|
||||
NGRAPH_CHECK(funcInputsNumber == inputsNumber,
|
||||
"Got function (", function->get_friendly_name(), ") with ", funcInputsNumber, " parameters, but ",
|
||||
inputsNumber, " input blobs");
|
||||
|
||||
auto inputTensors = std::vector<std::shared_ptr<runtime::Tensor>>{};
|
||||
for (size_t i = 0; i < parametersNumber; ++i) {
|
||||
const auto ¶meter = parameters[i];
|
||||
const auto ¶meterShape = parameter.get_shape();
|
||||
const auto ¶meterType = parameter.get_element_type();
|
||||
const auto ¶meterSize = shape_size(parameterShape) * parameterType.size();
|
||||
for (size_t i = 0; i < funcInputsNumber; ++i) {
|
||||
const auto &input = funcInputs[i];
|
||||
const auto &inputShape = input.get_shape();
|
||||
const auto &inputType = input.get_element_type();
|
||||
const auto &inputSize = shape_size(inputShape) * inputType.size();
|
||||
|
||||
auto inputIt = inputs.find(parameter.get_any_name());
|
||||
auto inputIt = std::find_if(inputs.begin(), inputs.end(),
|
||||
[&input](std::pair<std::shared_ptr<ov::Node>, ov::runtime::Tensor> elem) {
|
||||
return elem.first->get_friendly_name() == input.get_node_shared_ptr()->get_friendly_name();
|
||||
});
|
||||
if (inputIt == inputs.end()) {
|
||||
throw std::runtime_error("Parameter: " + parameter.get_any_name()+ " was not find in input parameters");
|
||||
throw std::runtime_error("Parameter: " + input.get_node_shared_ptr()->get_friendly_name() + " was not find in input parameters");
|
||||
}
|
||||
auto input = inputIt->second;
|
||||
auto inputTensor = inputIt->second;
|
||||
|
||||
const auto &inputSize = input.get_byte_size();
|
||||
NGRAPH_CHECK(parameterSize == inputSize,
|
||||
"Got parameter (", parameter.get_any_name(), ") of size ", parameterSize,
|
||||
const auto &inputTensorSize = inputTensor.get_byte_size();
|
||||
NGRAPH_CHECK(inputSize == inputTensorSize,
|
||||
"Got parameter (", input.get_node_shared_ptr()->get_friendly_name(), ") of size ", inputSize,
|
||||
" bytes, but corresponding input ",
|
||||
" has ", inputSize, " bytes");
|
||||
" has ", inputTensorSize, " bytes");
|
||||
|
||||
auto tensor = backend->create_tensor(parameterType, parameterShape);
|
||||
tensor->write(input.data(), parameterSize);
|
||||
auto tensor = backend->create_tensor(inputType, inputShape);
|
||||
tensor->write(inputTensor.data(), inputSize);
|
||||
inputTensors.push_back(tensor);
|
||||
}
|
||||
|
||||
@ -902,14 +905,14 @@ std::ostream& operator<<(std::ostream & os, MemoryTransformation type) {
|
||||
|
||||
void resize_function(std::shared_ptr<ov::Function> function,
|
||||
const std::vector<ov::Shape>& targetInputStaticShapes) {
|
||||
auto params = function->get_parameters();
|
||||
std::map<std::string, ov::PartialShape> shapes;
|
||||
if (params.size() > targetInputStaticShapes.size()) {
|
||||
throw std::runtime_error("targetInputStaticShapes.size() = " + std::to_string(targetInputStaticShapes.size()) + " != params.size() = "
|
||||
+ std::to_string(params.size()));
|
||||
auto inputs = function->inputs();
|
||||
std::map<ov::Output<ov::Node>, ov::PartialShape> shapes;
|
||||
if (inputs.size() > targetInputStaticShapes.size()) {
|
||||
throw std::runtime_error("targetInputStaticShapes.size() = " + std::to_string(targetInputStaticShapes.size()) + " != inputs.size() = "
|
||||
+ std::to_string(inputs.size()));
|
||||
}
|
||||
for (size_t i = 0; i < params.size(); i++) {
|
||||
shapes.insert({params[i]->get_output_tensor(0).get_any_name(), targetInputStaticShapes[i]});
|
||||
for (size_t i = 0; i < inputs.size(); i++) {
|
||||
shapes.insert({inputs[i], targetInputStaticShapes[i]});
|
||||
}
|
||||
function->reshape(shapes);
|
||||
}
|
||||
|
Loading…
Reference in New Issue
Block a user