some nGraph KW fixes (#2176)
* Removed redundant methods * Fixed KW for linux
This commit is contained in:
parent
fef1803a86
commit
a4dc5c89f3
@ -54,44 +54,6 @@ namespace ngraph
|
|||||||
const std::string& s3,
|
const std::string& s3,
|
||||||
const std::string& s4);
|
const std::string& s4);
|
||||||
|
|
||||||
/// \brief Returns the size in bytes of filename
|
|
||||||
/// \param filename The name of the file
|
|
||||||
NGRAPH_API
|
|
||||||
size_t get_file_size(const std::string& filename);
|
|
||||||
|
|
||||||
/// \brief Removes all files and directories starting at dir
|
|
||||||
/// \param dir The path of the directory to remove
|
|
||||||
NGRAPH_API
|
|
||||||
void remove_directory(const std::string& dir);
|
|
||||||
|
|
||||||
/// \brief Create a directory
|
|
||||||
/// \param dir Path of the directory to create
|
|
||||||
/// \return true if the directory was created, false otherwise
|
|
||||||
NGRAPH_API
|
|
||||||
bool make_directory(const std::string& dir);
|
|
||||||
|
|
||||||
/// \brief Gets the path of the system temporary directory
|
|
||||||
/// \return the path to the system temporary directory
|
|
||||||
NGRAPH_API
|
|
||||||
std::string get_temp_directory_path();
|
|
||||||
|
|
||||||
/// \brief Removes a file from the filesystem
|
|
||||||
/// \param file The path to the file to be removed
|
|
||||||
NGRAPH_API
|
|
||||||
void remove_file(const std::string& file);
|
|
||||||
|
|
||||||
/// \brief Reads the contents of a file
|
|
||||||
/// \param path The path of the file to read
|
|
||||||
/// \return vector<char> of the file's contents
|
|
||||||
NGRAPH_API
|
|
||||||
std::vector<char> read_file_contents(const std::string& path);
|
|
||||||
|
|
||||||
/// \brief Reads the contents of a file
|
|
||||||
/// \param path The path of the file to read
|
|
||||||
/// \return string of the file's contents
|
|
||||||
NGRAPH_API
|
|
||||||
std::string read_file_to_string(const std::string& path);
|
|
||||||
|
|
||||||
/// \brief Iterate through files and optionally directories. Symbolic links are skipped.
|
/// \brief Iterate through files and optionally directories. Symbolic links are skipped.
|
||||||
/// \param path The path to iterate over
|
/// \param path The path to iterate over
|
||||||
/// \param func A callback function called with each file or directory encountered
|
/// \param func A callback function called with each file or directory encountered
|
||||||
@ -101,17 +63,5 @@ namespace ngraph
|
|||||||
std::function<void(const std::string& file, bool is_dir)> func,
|
std::function<void(const std::string& file, bool is_dir)> func,
|
||||||
bool recurse = false,
|
bool recurse = false,
|
||||||
bool include_links = false);
|
bool include_links = false);
|
||||||
|
|
||||||
/// \brief Create a temporary file
|
|
||||||
/// \param extension Optional extension for the temporary file
|
|
||||||
/// \return Name of the temporary file
|
|
||||||
NGRAPH_API
|
|
||||||
std::string tmp_filename(const std::string& extension = "");
|
|
||||||
|
|
||||||
/// \brief Test for the existence of a path or file
|
|
||||||
/// \param path The path to test
|
|
||||||
/// \return true if the path exists, false otherwise
|
|
||||||
NGRAPH_API
|
|
||||||
bool exists(const std::string& path);
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -563,8 +563,12 @@ namespace ngraph
|
|||||||
{
|
{
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
|
if (confScores.find(c) == confScores.end())
|
||||||
|
continue;
|
||||||
const std::vector<dataType>& scores = confScores.find(c)->second;
|
const std::vector<dataType>& scores = confScores.find(c)->second;
|
||||||
int label = attrs.share_location ? -1 : c;
|
int label = attrs.share_location ? -1 : c;
|
||||||
|
if (decodeBboxesImage.find(label) == decodeBboxesImage.end())
|
||||||
|
continue;
|
||||||
const std::vector<NormalizedBBox>& bboxes =
|
const std::vector<NormalizedBBox>& bboxes =
|
||||||
decodeBboxesImage.find(label)->second;
|
decodeBboxesImage.find(label)->second;
|
||||||
caffeNMS(bboxes, scores, indices[c]);
|
caffeNMS(bboxes, scores, indices[c]);
|
||||||
@ -585,6 +589,8 @@ namespace ngraph
|
|||||||
{
|
{
|
||||||
int label = it->first;
|
int label = it->first;
|
||||||
const std::vector<int>& labelIndices = it->second;
|
const std::vector<int>& labelIndices = it->second;
|
||||||
|
if (confScores.find(label) == confScores.end())
|
||||||
|
continue;
|
||||||
const std::vector<dataType>& scores =
|
const std::vector<dataType>& scores =
|
||||||
confScores.find(label)->second;
|
confScores.find(label)->second;
|
||||||
for (int j = 0; j < labelIndices.size(); ++j)
|
for (int j = 0; j < labelIndices.size(); ++j)
|
||||||
@ -625,6 +631,8 @@ namespace ngraph
|
|||||||
int label = it->first;
|
int label = it->first;
|
||||||
const std::vector<dataType>& scores = confScores.find(label)->second;
|
const std::vector<dataType>& scores = confScores.find(label)->second;
|
||||||
int loc_label = attrs.share_location ? -1 : label;
|
int loc_label = attrs.share_location ? -1 : label;
|
||||||
|
if (decodeBboxesImage.find(loc_label) == decodeBboxesImage.end())
|
||||||
|
continue;
|
||||||
const std::vector<NormalizedBBox>& bboxes =
|
const std::vector<NormalizedBBox>& bboxes =
|
||||||
decodeBboxesImage.find(loc_label)->second;
|
decodeBboxesImage.find(loc_label)->second;
|
||||||
std::vector<int>& indices = it->second;
|
std::vector<int>& indices = it->second;
|
||||||
|
@ -94,6 +94,8 @@ namespace ngraph
|
|||||||
params_end_corner,
|
params_end_corner,
|
||||||
params_strides,
|
params_strides,
|
||||||
params_axis_order);
|
params_axis_order);
|
||||||
|
if (out_coord_iter == out_transform.end())
|
||||||
|
break;
|
||||||
auto out_index = out_transform.index(*out_coord_iter);
|
auto out_index = out_transform.index(*out_coord_iter);
|
||||||
for (const Coordinate& params_coord : params_transform)
|
for (const Coordinate& params_coord : params_transform)
|
||||||
{
|
{
|
||||||
|
@ -283,7 +283,7 @@ namespace ngraph
|
|||||||
struct LinearModeInnerIterationResult
|
struct LinearModeInnerIterationResult
|
||||||
{
|
{
|
||||||
bool condition;
|
bool condition;
|
||||||
float w;
|
float w = 0;
|
||||||
Coordinate inner_coord;
|
Coordinate inner_coord;
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -146,13 +146,14 @@ namespace ngraph
|
|||||||
// Inputs are 2D and below, perform dot directly
|
// Inputs are 2D and below, perform dot directly
|
||||||
if (arg0_rank <= 2 && arg1_rank <= 2)
|
if (arg0_rank <= 2 && arg1_rank <= 2)
|
||||||
{
|
{
|
||||||
return dot(arg0_update,
|
dot(arg0_update,
|
||||||
arg1_update,
|
arg1_update,
|
||||||
out,
|
out,
|
||||||
wip_arg0_shape,
|
wip_arg0_shape,
|
||||||
wip_arg1_shape,
|
wip_arg1_shape,
|
||||||
out_shape,
|
out_shape,
|
||||||
1);
|
1);
|
||||||
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
// Check and perform auto-broadcast if needed
|
// Check and perform auto-broadcast if needed
|
||||||
|
@ -57,6 +57,8 @@ namespace ngraph
|
|||||||
|
|
||||||
for (const Coordinate& input_coord : input_transform)
|
for (const Coordinate& input_coord : input_transform)
|
||||||
{
|
{
|
||||||
|
if (output_it == output_transform.end())
|
||||||
|
break;
|
||||||
const Coordinate& output_coord = *output_it;
|
const Coordinate& output_coord = *output_it;
|
||||||
|
|
||||||
out[output_transform.index(output_coord)] =
|
out[output_transform.index(output_coord)] =
|
||||||
|
@ -89,6 +89,8 @@ namespace ngraph
|
|||||||
|
|
||||||
// Define the CoordinateTransform for updates coordinates.
|
// Define the CoordinateTransform for updates coordinates.
|
||||||
// All except indices-dimensions.
|
// All except indices-dimensions.
|
||||||
|
if (updates_indices_coord_iter == updates_indices_transform.end())
|
||||||
|
break;
|
||||||
Coordinate updates_update_start_corner = *updates_indices_coord_iter;
|
Coordinate updates_update_start_corner = *updates_indices_coord_iter;
|
||||||
Coordinate updates_update_end_corner(updates_shape);
|
Coordinate updates_update_end_corner(updates_shape);
|
||||||
for (size_t i = 0; i < indices_ndim; ++i)
|
for (size_t i = 0; i < indices_ndim; ++i)
|
||||||
@ -105,6 +107,8 @@ namespace ngraph
|
|||||||
auto updates_update_coord_iter = updates_update_transform.begin();
|
auto updates_update_coord_iter = updates_update_transform.begin();
|
||||||
for (const Coordinate& out_cord : out_transform)
|
for (const Coordinate& out_cord : out_transform)
|
||||||
{
|
{
|
||||||
|
if (updates_update_coord_iter == updates_update_transform.end())
|
||||||
|
break;
|
||||||
const auto src_idx =
|
const auto src_idx =
|
||||||
updates_update_transform.index(*updates_update_coord_iter) * elem_size;
|
updates_update_transform.index(*updates_update_coord_iter) * elem_size;
|
||||||
std::copy(updates + src_idx,
|
std::copy(updates + src_idx,
|
||||||
|
@ -64,6 +64,8 @@ namespace ngraph
|
|||||||
|
|
||||||
for (const Coordinate& in_coord : input_transform)
|
for (const Coordinate& in_coord : input_transform)
|
||||||
{
|
{
|
||||||
|
if (output_it == output_transform.end())
|
||||||
|
break;
|
||||||
const Coordinate& out_coord = *output_it;
|
const Coordinate& out_coord = *output_it;
|
||||||
|
|
||||||
std::fill(v.begin(), v.end(), 0);
|
std::fill(v.begin(), v.end(), 0);
|
||||||
|
@ -46,6 +46,8 @@ void runtime::reference::reshape(const char* arg,
|
|||||||
|
|
||||||
for (const Coordinate& input_coord : input_transform)
|
for (const Coordinate& input_coord : input_transform)
|
||||||
{
|
{
|
||||||
|
if (output_it == output_transform.end())
|
||||||
|
break;
|
||||||
const Coordinate& output_coord = *output_it;
|
const Coordinate& output_coord = *output_it;
|
||||||
|
|
||||||
memcpy(out + output_transform.index(output_coord) * elem_size,
|
memcpy(out + output_transform.index(output_coord) * elem_size,
|
||||||
|
@ -45,6 +45,8 @@ namespace ngraph
|
|||||||
|
|
||||||
for (const Coordinate& in_coord : input_transform)
|
for (const Coordinate& in_coord : input_transform)
|
||||||
{
|
{
|
||||||
|
if (output_it == output_transform.end())
|
||||||
|
break;
|
||||||
const Coordinate& out_coord = *output_it;
|
const Coordinate& out_coord = *output_it;
|
||||||
|
|
||||||
memcpy(out + output_transform.index(out_coord) * elem_size,
|
memcpy(out + output_transform.index(out_coord) * elem_size,
|
||||||
|
@ -25,8 +25,7 @@ using namespace std;
|
|||||||
std::string ngraph::getenv_string(const char* env_var)
|
std::string ngraph::getenv_string(const char* env_var)
|
||||||
{
|
{
|
||||||
const char* env_p = ::getenv(env_var);
|
const char* env_p = ::getenv(env_var);
|
||||||
string env_string = env_p ? env_p : "";
|
return env_p != nullptr ? string(env_p) : "";
|
||||||
return env_string;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
int32_t ngraph::getenv_int(const char* env_var, int32_t default_value)
|
int32_t ngraph::getenv_int(const char* env_var, int32_t default_value)
|
||||||
|
@ -125,118 +125,6 @@ string file_util::path_join(const string& s1, const string& s2)
|
|||||||
return rc;
|
return rc;
|
||||||
}
|
}
|
||||||
|
|
||||||
size_t file_util::get_file_size(const string& filename)
|
|
||||||
{
|
|
||||||
// ensure that filename exists and get its size
|
|
||||||
|
|
||||||
struct stat stats;
|
|
||||||
if (stat(filename.c_str(), &stats) == -1)
|
|
||||||
{
|
|
||||||
throw runtime_error("Could not find file: \"" + filename + "\"");
|
|
||||||
}
|
|
||||||
|
|
||||||
return stats.st_size;
|
|
||||||
}
|
|
||||||
|
|
||||||
void file_util::remove_directory(const string& dir)
|
|
||||||
{
|
|
||||||
struct stat status;
|
|
||||||
if (stat(dir.c_str(), &status) != -1)
|
|
||||||
{
|
|
||||||
iterate_files(dir,
|
|
||||||
[](const string& file, bool is_dir) {
|
|
||||||
if (is_dir)
|
|
||||||
{
|
|
||||||
RMDIR(file.c_str());
|
|
||||||
}
|
|
||||||
else
|
|
||||||
{
|
|
||||||
RMFILE(file.c_str());
|
|
||||||
}
|
|
||||||
},
|
|
||||||
true);
|
|
||||||
RMDIR(dir.c_str());
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
void file_util::remove_file(const string& file)
|
|
||||||
{
|
|
||||||
remove(file.c_str());
|
|
||||||
}
|
|
||||||
|
|
||||||
bool file_util::make_directory(const string& dir)
|
|
||||||
{
|
|
||||||
#ifdef _WIN32
|
|
||||||
CreateDirectoryA(dir.c_str(), nullptr);
|
|
||||||
#else
|
|
||||||
if (mkdir(dir.c_str(), S_IRWXU | S_IRWXG | S_IROTH | S_IXOTH))
|
|
||||||
{
|
|
||||||
if (errno == EEXIST)
|
|
||||||
{
|
|
||||||
// not really an error, the directory already exists
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
throw runtime_error("error making directory " + dir + " " + strerror(errno));
|
|
||||||
}
|
|
||||||
#endif
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
|
|
||||||
string file_util::get_temp_directory_path()
|
|
||||||
{
|
|
||||||
const vector<string> potential_tmps = {"NGRAPH_TMP", "TMPDIR", "TMP", "TEMP", "TEMPDIR"};
|
|
||||||
|
|
||||||
string path;
|
|
||||||
for (const string& var : potential_tmps)
|
|
||||||
{
|
|
||||||
path = getenv_string(var.c_str());
|
|
||||||
if (!path.empty())
|
|
||||||
{
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if (path.empty())
|
|
||||||
{
|
|
||||||
path = "/tmp";
|
|
||||||
}
|
|
||||||
|
|
||||||
return path;
|
|
||||||
}
|
|
||||||
|
|
||||||
vector<char> file_util::read_file_contents(const string& path)
|
|
||||||
{
|
|
||||||
size_t file_size = get_file_size(path);
|
|
||||||
vector<char> data(file_size);
|
|
||||||
|
|
||||||
FILE* f = fopen(path.c_str(), "rb");
|
|
||||||
if (f)
|
|
||||||
{
|
|
||||||
char* p = data.data();
|
|
||||||
size_t remainder = file_size;
|
|
||||||
size_t offset = 0;
|
|
||||||
while (f && remainder > 0)
|
|
||||||
{
|
|
||||||
size_t rc = fread(&p[offset], 1, remainder, f);
|
|
||||||
offset += rc;
|
|
||||||
remainder -= rc;
|
|
||||||
}
|
|
||||||
fclose(f);
|
|
||||||
}
|
|
||||||
else
|
|
||||||
{
|
|
||||||
throw runtime_error("error opening file '" + path + "'");
|
|
||||||
}
|
|
||||||
return data;
|
|
||||||
}
|
|
||||||
|
|
||||||
string file_util::read_file_to_string(const string& path)
|
|
||||||
{
|
|
||||||
ifstream f(path);
|
|
||||||
stringstream ss;
|
|
||||||
ss << f.rdbuf();
|
|
||||||
return ss.str();
|
|
||||||
}
|
|
||||||
|
|
||||||
#ifndef _WIN32
|
#ifndef _WIN32
|
||||||
static void iterate_files_worker(const string& path,
|
static void iterate_files_worker(const string& path,
|
||||||
function<void(const string& file, bool is_dir)> func,
|
function<void(const string& file, bool is_dir)> func,
|
||||||
@ -352,30 +240,3 @@ void file_util::iterate_files(const string& path,
|
|||||||
func(f, true);
|
func(f, true);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
string file_util::tmp_filename(const string& extension)
|
|
||||||
{
|
|
||||||
string rc;
|
|
||||||
#ifdef _WIN32
|
|
||||||
rc = _tempnam(file_util::get_temp_directory_path().c_str(), "ngraph_");
|
|
||||||
#else
|
|
||||||
string tmp_template =
|
|
||||||
file_util::path_join(file_util::get_temp_directory_path(), "ngraph_XXXXXX" + extension);
|
|
||||||
char* tmpname = strdup(tmp_template.c_str());
|
|
||||||
if (tmpname != nullptr)
|
|
||||||
{
|
|
||||||
// mkstemp opens the file with open() so we need to close it
|
|
||||||
close(mkstemps(tmpname, static_cast<int>(extension.size())));
|
|
||||||
|
|
||||||
rc = tmpname;
|
|
||||||
free(tmpname);
|
|
||||||
}
|
|
||||||
#endif
|
|
||||||
return rc;
|
|
||||||
}
|
|
||||||
|
|
||||||
bool file_util::exists(const string& filename)
|
|
||||||
{
|
|
||||||
struct stat buffer;
|
|
||||||
return (stat(filename.c_str(), &buffer) == 0);
|
|
||||||
}
|
|
||||||
|
@ -193,6 +193,12 @@ shared_ptr<Node> op::v0::ShapeOf::clone_with_new_inputs(const OutputVector& new_
|
|||||||
{
|
{
|
||||||
check_new_args_count(this, new_args);
|
check_new_args_count(this, new_args);
|
||||||
auto new_shape_of = make_shared<op::v0::ShapeOf>(new_args.at(0));
|
auto new_shape_of = make_shared<op::v0::ShapeOf>(new_args.at(0));
|
||||||
|
NGRAPH_CHECK(new_shape_of.get(),
|
||||||
|
new_shape_of != nullptr,
|
||||||
|
"Cannot clone ",
|
||||||
|
description(),
|
||||||
|
" operation with name ",
|
||||||
|
get_friendly_name());
|
||||||
new_shape_of->set_is_foldable(m_is_foldable);
|
new_shape_of->set_is_foldable(m_is_foldable);
|
||||||
return new_shape_of;
|
return new_shape_of;
|
||||||
}
|
}
|
||||||
|
@ -618,6 +618,12 @@ std::shared_ptr<Node>
|
|||||||
op::v0::TensorIterator::clone_with_new_inputs(const OutputVector& new_args) const
|
op::v0::TensorIterator::clone_with_new_inputs(const OutputVector& new_args) const
|
||||||
{
|
{
|
||||||
auto op = make_shared<op::v0::TensorIterator>(new_args);
|
auto op = make_shared<op::v0::TensorIterator>(new_args);
|
||||||
|
NGRAPH_CHECK(op.get(),
|
||||||
|
op != nullptr,
|
||||||
|
"Cannot clone ",
|
||||||
|
description(),
|
||||||
|
" operation with name ",
|
||||||
|
get_friendly_name());
|
||||||
op->set_output_size(m_output_descriptions.size());
|
op->set_output_size(m_output_descriptions.size());
|
||||||
|
|
||||||
std::vector<::ngraph::element::Type> types(m_body->get_parameters().size());
|
std::vector<::ngraph::element::Type> types(m_body->get_parameters().size());
|
||||||
|
@ -79,54 +79,51 @@ void op::util::ScatterBase::validate_and_infer_types()
|
|||||||
data_shape.rank().get_length() - 1,
|
data_shape.rank().get_length() - 1,
|
||||||
"Updates rank is expected to be indices rank + data rank - 1.");
|
"Updates rank is expected to be indices rank + data rank - 1.");
|
||||||
|
|
||||||
bool compatible = true;
|
|
||||||
int64_t axis;
|
|
||||||
bool is_axis_constant = op::is_constant(input_value(AXIS).get_node());
|
bool is_axis_constant = op::is_constant(input_value(AXIS).get_node());
|
||||||
|
|
||||||
// Get axis value if possible.
|
// Get axis value if possible.
|
||||||
if (is_axis_constant && data_shape.rank().is_static())
|
if (is_axis_constant && data_shape.rank().is_static())
|
||||||
{
|
{
|
||||||
|
bool compatible = true;
|
||||||
const auto axis_const_input =
|
const auto axis_const_input =
|
||||||
as_type_ptr<op::v0::Constant>(input_value(AXIS).get_node_shared_ptr());
|
as_type_ptr<op::v0::Constant>(input_value(AXIS).get_node_shared_ptr());
|
||||||
axis = axis_const_input->cast_vector<int64_t>().at(0);
|
int64_t axis = axis_const_input->cast_vector<int64_t>().at(0);
|
||||||
axis = normalize_axis(this, axis, data_shape.rank().get_length());
|
axis = normalize_axis(this, axis, data_shape.rank().get_length());
|
||||||
|
|
||||||
|
if (indices_shape.rank().is_static() && updates_shape.rank().is_static())
|
||||||
|
{
|
||||||
|
for (int64_t i = 0; i < indices_shape.rank().get_length(); ++i)
|
||||||
|
{
|
||||||
|
compatible = compatible && updates_shape[axis + i].compatible(indices_shape[i]);
|
||||||
|
}
|
||||||
|
|
||||||
|
int64_t indices_rank = indices_shape.rank().get_length();
|
||||||
|
// Check [d_0, d_1, ... d_(axis - 1)] updates dimensions
|
||||||
|
for (int64_t i = 0; i < axis; ++i)
|
||||||
|
{
|
||||||
|
compatible = compatible && updates_shape[i].compatible(data_shape[i]);
|
||||||
|
}
|
||||||
|
// Check [d_(axis + k + 1), ..., d_n] updates dimensions
|
||||||
|
for (int64_t i = axis + 1; i < data_shape.rank().get_length(); ++i)
|
||||||
|
{
|
||||||
|
compatible =
|
||||||
|
compatible && updates_shape[indices_rank - 1 + i].compatible(data_shape[i]);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
NODE_VALIDATION_CHECK(this,
|
||||||
|
compatible,
|
||||||
|
"Updates shape must have appropriate dimensions equal to indices and "
|
||||||
|
"data dimensions. Updates shape:",
|
||||||
|
updates_shape,
|
||||||
|
", data shape: ",
|
||||||
|
data_shape,
|
||||||
|
", indices_shape: ",
|
||||||
|
indices_shape,
|
||||||
|
", axis: ",
|
||||||
|
axis,
|
||||||
|
".");
|
||||||
}
|
}
|
||||||
|
|
||||||
if (is_axis_constant && data_shape.rank().is_static() && indices_shape.rank().is_static() &&
|
|
||||||
updates_shape.rank().is_static())
|
|
||||||
{
|
|
||||||
for (int64_t i = 0; i < indices_shape.rank().get_length(); ++i)
|
|
||||||
{
|
|
||||||
compatible = compatible && updates_shape[axis + i].compatible(indices_shape[i]);
|
|
||||||
}
|
|
||||||
|
|
||||||
int64_t indices_rank = indices_shape.rank().get_length();
|
|
||||||
// Check [d_0, d_1, ... d_(axis - 1)] updates dimensions
|
|
||||||
for (int64_t i = 0; i < axis; ++i)
|
|
||||||
{
|
|
||||||
compatible = compatible && updates_shape[i].compatible(data_shape[i]);
|
|
||||||
}
|
|
||||||
// Check [d_(axis + k + 1), ..., d_n] updates dimensions
|
|
||||||
for (int64_t i = axis + 1; i < data_shape.rank().get_length(); ++i)
|
|
||||||
{
|
|
||||||
compatible =
|
|
||||||
compatible && updates_shape[indices_rank - 1 + i].compatible(data_shape[i]);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
NODE_VALIDATION_CHECK(this,
|
|
||||||
compatible,
|
|
||||||
"Updates shape must have appropriate dimensions equal to indices and "
|
|
||||||
"data dimensions. Updates shape:",
|
|
||||||
updates_shape,
|
|
||||||
", data shape: ",
|
|
||||||
data_shape,
|
|
||||||
", indices_shape: ",
|
|
||||||
indices_shape,
|
|
||||||
", axis: ",
|
|
||||||
axis,
|
|
||||||
".");
|
|
||||||
|
|
||||||
if (data_shape.is_dynamic())
|
if (data_shape.is_dynamic())
|
||||||
{
|
{
|
||||||
set_input_is_relevant_to_shape(0);
|
set_input_is_relevant_to_shape(0);
|
||||||
|
@ -37,7 +37,7 @@ runtime::AlignedBuffer::AlignedBuffer(size_t byte_size, size_t alignment)
|
|||||||
size_t allocation_size = m_byte_size + alignment;
|
size_t allocation_size = m_byte_size + alignment;
|
||||||
m_allocated_buffer = static_cast<char*>(ngraph_malloc(allocation_size));
|
m_allocated_buffer = static_cast<char*>(ngraph_malloc(allocation_size));
|
||||||
m_aligned_buffer = m_allocated_buffer;
|
m_aligned_buffer = m_allocated_buffer;
|
||||||
size_t mod = size_t(m_aligned_buffer) % alignment;
|
size_t mod = (alignment != 0) ? size_t(m_aligned_buffer) % alignment : 0;
|
||||||
|
|
||||||
if (mod != 0)
|
if (mod != 0)
|
||||||
{
|
{
|
||||||
@ -67,6 +67,10 @@ runtime::AlignedBuffer& runtime::AlignedBuffer::operator=(AlignedBuffer&& other)
|
|||||||
{
|
{
|
||||||
if (this != &other)
|
if (this != &other)
|
||||||
{
|
{
|
||||||
|
if (m_allocated_buffer != nullptr)
|
||||||
|
{
|
||||||
|
free(m_allocated_buffer);
|
||||||
|
}
|
||||||
m_allocated_buffer = other.m_allocated_buffer;
|
m_allocated_buffer = other.m_allocated_buffer;
|
||||||
m_aligned_buffer = other.m_aligned_buffer;
|
m_aligned_buffer = other.m_aligned_buffer;
|
||||||
m_byte_size = other.m_byte_size;
|
m_byte_size = other.m_byte_size;
|
||||||
|
@ -38,6 +38,10 @@ runtime::HostTensor::HostTensor(const ngraph::element::Type& element_type,
|
|||||||
{
|
{
|
||||||
allocate_buffer();
|
allocate_buffer();
|
||||||
}
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
m_buffer_size = 0;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
runtime::HostTensor::HostTensor(const element::Type& element_type,
|
runtime::HostTensor::HostTensor(const element::Type& element_type,
|
||||||
@ -52,6 +56,7 @@ runtime::HostTensor::HostTensor(const element::Type& element_type,
|
|||||||
const std::string& name)
|
const std::string& name)
|
||||||
: runtime::Tensor(
|
: runtime::Tensor(
|
||||||
std::make_shared<ngraph::descriptor::Tensor>(element_type, partial_shape, name))
|
std::make_shared<ngraph::descriptor::Tensor>(element_type, partial_shape, name))
|
||||||
|
, m_buffer_size(0)
|
||||||
{
|
{
|
||||||
// Defer allocation until ptr is requested
|
// Defer allocation until ptr is requested
|
||||||
}
|
}
|
||||||
|
@ -157,7 +157,6 @@ namespace ngraph
|
|||||||
const std::map<std::int64_t, Operator>::const_iterator
|
const std::map<std::int64_t, Operator>::const_iterator
|
||||||
find(std::int64_t version, const std::map<std::int64_t, Operator>& map)
|
find(std::int64_t version, const std::map<std::int64_t, Operator>& map)
|
||||||
{
|
{
|
||||||
std::map<std::int64_t, Operator>::const_iterator it{};
|
|
||||||
// Get the latest version.
|
// Get the latest version.
|
||||||
if (version == -1)
|
if (version == -1)
|
||||||
{
|
{
|
||||||
@ -165,13 +164,13 @@ namespace ngraph
|
|||||||
}
|
}
|
||||||
while (version > 0)
|
while (version > 0)
|
||||||
{
|
{
|
||||||
it = map.find(version--);
|
std::map<std::int64_t, Operator>::const_iterator it = map.find(version--);
|
||||||
if (it != std::end(map))
|
if (it != std::end(map))
|
||||||
{
|
{
|
||||||
return it;
|
return it;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return it;
|
return std::end(map);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -89,9 +89,3 @@ TEST(file_util, path_join)
|
|||||||
EXPECT_STREQ("/test1/test2", file_util::path_join(s1, s2).c_str());
|
EXPECT_STREQ("/test1/test2", file_util::path_join(s1, s2).c_str());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
TEST(file_util, get_temp_directory_path)
|
|
||||||
{
|
|
||||||
string tmp = file_util::get_temp_directory_path();
|
|
||||||
EXPECT_NE(0, tmp.size());
|
|
||||||
}
|
|
||||||
|
@ -73,6 +73,8 @@ shared_ptr<runtime::Tensor> runtime::ie::IE_Backend::create_tensor(
|
|||||||
const element::Type& element_type, const Shape& shape, void* data)
|
const element::Type& element_type, const Shape& shape, void* data)
|
||||||
{
|
{
|
||||||
shared_ptr<runtime::Tensor> tensor = make_shared<IETensor>(element_type, shape);
|
shared_ptr<runtime::Tensor> tensor = make_shared<IETensor>(element_type, shape);
|
||||||
|
if (tensor == nullptr)
|
||||||
|
throw runtime_error("Cannot create IETensor!");
|
||||||
tensor->write(data, shape_size(shape) * element_type.size());
|
tensor->write(data, shape_size(shape) * element_type.size());
|
||||||
return tensor;
|
return tensor;
|
||||||
}
|
}
|
||||||
|
@ -150,7 +150,10 @@ bool runtime::ie::IE_Executable::call(const vector<shared_ptr<runtime::Tensor>>&
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Prepare output blobs
|
// Prepare output blobs
|
||||||
string output_name = m_network.getOutputsInfo().begin()->first;
|
auto outInfo = m_network.getOutputsInfo();
|
||||||
|
if (outInfo.size() != 1)
|
||||||
|
THROW_IE_EXCEPTION << "Networks should contain only one output!";
|
||||||
|
string output_name = outInfo.begin()->first;
|
||||||
|
|
||||||
infer_request.Infer();
|
infer_request.Infer();
|
||||||
InferenceEngine::Blob::Ptr output = infer_request.GetBlob(output_name);
|
InferenceEngine::Blob::Ptr output = infer_request.GetBlob(output_name);
|
||||||
|
Loading…
Reference in New Issue
Block a user