some nGraph KW fixes (#2176)

* Removed redundant methods

* Fixed KW for linux
This commit is contained in:
Ilya Churaev 2020-09-15 13:59:42 +03:00 committed by GitHub
parent fef1803a86
commit a4dc5c89f3
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
21 changed files with 97 additions and 248 deletions

View File

@ -54,44 +54,6 @@ namespace ngraph
const std::string& s3,
const std::string& s4);
/// \brief Returns the size in bytes of filename
/// \param filename The name of the file
NGRAPH_API
size_t get_file_size(const std::string& filename);
/// \brief Removes all files and directories starting at dir
/// \param dir The path of the directory to remove
NGRAPH_API
void remove_directory(const std::string& dir);
/// \brief Create a directory
/// \param dir Path of the directory to create
/// \return true if the directory was created, false otherwise
NGRAPH_API
bool make_directory(const std::string& dir);
/// \brief Gets the path of the system temporary directory
/// \return the path to the system temporary directory
NGRAPH_API
std::string get_temp_directory_path();
/// \brief Removes a file from the filesystem
/// \param file The path to the file to be removed
NGRAPH_API
void remove_file(const std::string& file);
/// \brief Reads the contents of a file
/// \param path The path of the file to read
/// \return vector<char> of the file's contents
NGRAPH_API
std::vector<char> read_file_contents(const std::string& path);
/// \brief Reads the contents of a file
/// \param path The path of the file to read
/// \return string of the file's contents
NGRAPH_API
std::string read_file_to_string(const std::string& path);
/// \brief Iterate through files and optionally directories. Symbolic links are skipped.
/// \param path The path to iterate over
/// \param func A callback function called with each file or directory encountered
@ -101,17 +63,5 @@ namespace ngraph
std::function<void(const std::string& file, bool is_dir)> func,
bool recurse = false,
bool include_links = false);
/// \brief Create a temporary file
/// \param extension Optional extension for the temporary file
/// \return Name of the temporary file
NGRAPH_API
std::string tmp_filename(const std::string& extension = "");
/// \brief Test for the existence of a path or file
/// \param path The path to test
/// \return true if the path exists, false otherwise
NGRAPH_API
bool exists(const std::string& path);
}
}

View File

@ -563,8 +563,12 @@ namespace ngraph
{
continue;
}
if (confScores.find(c) == confScores.end())
continue;
const std::vector<dataType>& scores = confScores.find(c)->second;
int label = attrs.share_location ? -1 : c;
if (decodeBboxesImage.find(label) == decodeBboxesImage.end())
continue;
const std::vector<NormalizedBBox>& bboxes =
decodeBboxesImage.find(label)->second;
caffeNMS(bboxes, scores, indices[c]);
@ -585,6 +589,8 @@ namespace ngraph
{
int label = it->first;
const std::vector<int>& labelIndices = it->second;
if (confScores.find(label) == confScores.end())
continue;
const std::vector<dataType>& scores =
confScores.find(label)->second;
for (int j = 0; j < labelIndices.size(); ++j)
@ -625,6 +631,8 @@ namespace ngraph
int label = it->first;
const std::vector<dataType>& scores = confScores.find(label)->second;
int loc_label = attrs.share_location ? -1 : label;
if (decodeBboxesImage.find(loc_label) == decodeBboxesImage.end())
continue;
const std::vector<NormalizedBBox>& bboxes =
decodeBboxesImage.find(loc_label)->second;
std::vector<int>& indices = it->second;

View File

@ -94,6 +94,8 @@ namespace ngraph
params_end_corner,
params_strides,
params_axis_order);
if (out_coord_iter == out_transform.end())
break;
auto out_index = out_transform.index(*out_coord_iter);
for (const Coordinate& params_coord : params_transform)
{

View File

@ -283,7 +283,7 @@ namespace ngraph
struct LinearModeInnerIterationResult
{
bool condition;
float w;
float w = 0;
Coordinate inner_coord;
};

View File

@ -146,13 +146,14 @@ namespace ngraph
// Inputs are 2D and below, perform dot directly
if (arg0_rank <= 2 && arg1_rank <= 2)
{
return dot(arg0_update,
dot(arg0_update,
arg1_update,
out,
wip_arg0_shape,
wip_arg1_shape,
out_shape,
1);
return;
}
// Check and perform auto-broadcast if needed

View File

@ -57,6 +57,8 @@ namespace ngraph
for (const Coordinate& input_coord : input_transform)
{
if (output_it == output_transform.end())
break;
const Coordinate& output_coord = *output_it;
out[output_transform.index(output_coord)] =

View File

@ -89,6 +89,8 @@ namespace ngraph
// Define the CoordinateTransform for updates coordinates.
// All except indices-dimensions.
if (updates_indices_coord_iter == updates_indices_transform.end())
break;
Coordinate updates_update_start_corner = *updates_indices_coord_iter;
Coordinate updates_update_end_corner(updates_shape);
for (size_t i = 0; i < indices_ndim; ++i)
@ -105,6 +107,8 @@ namespace ngraph
auto updates_update_coord_iter = updates_update_transform.begin();
for (const Coordinate& out_cord : out_transform)
{
if (updates_update_coord_iter == updates_update_transform.end())
break;
const auto src_idx =
updates_update_transform.index(*updates_update_coord_iter) * elem_size;
std::copy(updates + src_idx,

View File

@ -64,6 +64,8 @@ namespace ngraph
for (const Coordinate& in_coord : input_transform)
{
if (output_it == output_transform.end())
break;
const Coordinate& out_coord = *output_it;
std::fill(v.begin(), v.end(), 0);

View File

@ -46,6 +46,8 @@ void runtime::reference::reshape(const char* arg,
for (const Coordinate& input_coord : input_transform)
{
if (output_it == output_transform.end())
break;
const Coordinate& output_coord = *output_it;
memcpy(out + output_transform.index(output_coord) * elem_size,

View File

@ -45,6 +45,8 @@ namespace ngraph
for (const Coordinate& in_coord : input_transform)
{
if (output_it == output_transform.end())
break;
const Coordinate& out_coord = *output_it;
memcpy(out + output_transform.index(out_coord) * elem_size,

View File

@ -25,8 +25,7 @@ using namespace std;
std::string ngraph::getenv_string(const char* env_var)
{
const char* env_p = ::getenv(env_var);
string env_string = env_p ? env_p : "";
return env_string;
return env_p != nullptr ? string(env_p) : "";
}
int32_t ngraph::getenv_int(const char* env_var, int32_t default_value)

View File

@ -125,118 +125,6 @@ string file_util::path_join(const string& s1, const string& s2)
return rc;
}
size_t file_util::get_file_size(const string& filename)
{
// ensure that filename exists and get its size
struct stat stats;
if (stat(filename.c_str(), &stats) == -1)
{
throw runtime_error("Could not find file: \"" + filename + "\"");
}
return stats.st_size;
}
void file_util::remove_directory(const string& dir)
{
struct stat status;
if (stat(dir.c_str(), &status) != -1)
{
iterate_files(dir,
[](const string& file, bool is_dir) {
if (is_dir)
{
RMDIR(file.c_str());
}
else
{
RMFILE(file.c_str());
}
},
true);
RMDIR(dir.c_str());
}
}
void file_util::remove_file(const string& file)
{
remove(file.c_str());
}
bool file_util::make_directory(const string& dir)
{
#ifdef _WIN32
CreateDirectoryA(dir.c_str(), nullptr);
#else
if (mkdir(dir.c_str(), S_IRWXU | S_IRWXG | S_IROTH | S_IXOTH))
{
if (errno == EEXIST)
{
// not really an error, the directory already exists
return false;
}
throw runtime_error("error making directory " + dir + " " + strerror(errno));
}
#endif
return true;
}
string file_util::get_temp_directory_path()
{
const vector<string> potential_tmps = {"NGRAPH_TMP", "TMPDIR", "TMP", "TEMP", "TEMPDIR"};
string path;
for (const string& var : potential_tmps)
{
path = getenv_string(var.c_str());
if (!path.empty())
{
break;
}
}
if (path.empty())
{
path = "/tmp";
}
return path;
}
vector<char> file_util::read_file_contents(const string& path)
{
size_t file_size = get_file_size(path);
vector<char> data(file_size);
FILE* f = fopen(path.c_str(), "rb");
if (f)
{
char* p = data.data();
size_t remainder = file_size;
size_t offset = 0;
while (f && remainder > 0)
{
size_t rc = fread(&p[offset], 1, remainder, f);
offset += rc;
remainder -= rc;
}
fclose(f);
}
else
{
throw runtime_error("error opening file '" + path + "'");
}
return data;
}
string file_util::read_file_to_string(const string& path)
{
ifstream f(path);
stringstream ss;
ss << f.rdbuf();
return ss.str();
}
#ifndef _WIN32
static void iterate_files_worker(const string& path,
function<void(const string& file, bool is_dir)> func,
@ -352,30 +240,3 @@ void file_util::iterate_files(const string& path,
func(f, true);
}
}
string file_util::tmp_filename(const string& extension)
{
string rc;
#ifdef _WIN32
rc = _tempnam(file_util::get_temp_directory_path().c_str(), "ngraph_");
#else
string tmp_template =
file_util::path_join(file_util::get_temp_directory_path(), "ngraph_XXXXXX" + extension);
char* tmpname = strdup(tmp_template.c_str());
if (tmpname != nullptr)
{
// mkstemp opens the file with open() so we need to close it
close(mkstemps(tmpname, static_cast<int>(extension.size())));
rc = tmpname;
free(tmpname);
}
#endif
return rc;
}
bool file_util::exists(const string& filename)
{
struct stat buffer;
return (stat(filename.c_str(), &buffer) == 0);
}

View File

@ -193,6 +193,12 @@ shared_ptr<Node> op::v0::ShapeOf::clone_with_new_inputs(const OutputVector& new_
{
check_new_args_count(this, new_args);
auto new_shape_of = make_shared<op::v0::ShapeOf>(new_args.at(0));
NGRAPH_CHECK(new_shape_of.get(),
new_shape_of != nullptr,
"Cannot clone ",
description(),
" operation with name ",
get_friendly_name());
new_shape_of->set_is_foldable(m_is_foldable);
return new_shape_of;
}

View File

@ -618,6 +618,12 @@ std::shared_ptr<Node>
op::v0::TensorIterator::clone_with_new_inputs(const OutputVector& new_args) const
{
auto op = make_shared<op::v0::TensorIterator>(new_args);
NGRAPH_CHECK(op.get(),
op != nullptr,
"Cannot clone ",
description(),
" operation with name ",
get_friendly_name());
op->set_output_size(m_output_descriptions.size());
std::vector<::ngraph::element::Type> types(m_body->get_parameters().size());

View File

@ -79,21 +79,18 @@ void op::util::ScatterBase::validate_and_infer_types()
data_shape.rank().get_length() - 1,
"Updates rank is expected to be indices rank + data rank - 1.");
bool compatible = true;
int64_t axis;
bool is_axis_constant = op::is_constant(input_value(AXIS).get_node());
// Get axis value if possible.
if (is_axis_constant && data_shape.rank().is_static())
{
bool compatible = true;
const auto axis_const_input =
as_type_ptr<op::v0::Constant>(input_value(AXIS).get_node_shared_ptr());
axis = axis_const_input->cast_vector<int64_t>().at(0);
int64_t axis = axis_const_input->cast_vector<int64_t>().at(0);
axis = normalize_axis(this, axis, data_shape.rank().get_length());
}
if (is_axis_constant && data_shape.rank().is_static() && indices_shape.rank().is_static() &&
updates_shape.rank().is_static())
if (indices_shape.rank().is_static() && updates_shape.rank().is_static())
{
for (int64_t i = 0; i < indices_shape.rank().get_length(); ++i)
{
@ -113,7 +110,6 @@ void op::util::ScatterBase::validate_and_infer_types()
compatible && updates_shape[indices_rank - 1 + i].compatible(data_shape[i]);
}
}
NODE_VALIDATION_CHECK(this,
compatible,
"Updates shape must have appropriate dimensions equal to indices and "
@ -126,6 +122,7 @@ void op::util::ScatterBase::validate_and_infer_types()
", axis: ",
axis,
".");
}
if (data_shape.is_dynamic())
{

View File

@ -37,7 +37,7 @@ runtime::AlignedBuffer::AlignedBuffer(size_t byte_size, size_t alignment)
size_t allocation_size = m_byte_size + alignment;
m_allocated_buffer = static_cast<char*>(ngraph_malloc(allocation_size));
m_aligned_buffer = m_allocated_buffer;
size_t mod = size_t(m_aligned_buffer) % alignment;
size_t mod = (alignment != 0) ? size_t(m_aligned_buffer) % alignment : 0;
if (mod != 0)
{
@ -67,6 +67,10 @@ runtime::AlignedBuffer& runtime::AlignedBuffer::operator=(AlignedBuffer&& other)
{
if (this != &other)
{
if (m_allocated_buffer != nullptr)
{
free(m_allocated_buffer);
}
m_allocated_buffer = other.m_allocated_buffer;
m_aligned_buffer = other.m_aligned_buffer;
m_byte_size = other.m_byte_size;

View File

@ -38,6 +38,10 @@ runtime::HostTensor::HostTensor(const ngraph::element::Type& element_type,
{
allocate_buffer();
}
else
{
m_buffer_size = 0;
}
}
runtime::HostTensor::HostTensor(const element::Type& element_type,
@ -52,6 +56,7 @@ runtime::HostTensor::HostTensor(const element::Type& element_type,
const std::string& name)
: runtime::Tensor(
std::make_shared<ngraph::descriptor::Tensor>(element_type, partial_shape, name))
, m_buffer_size(0)
{
// Defer allocation until ptr is requested
}

View File

@ -157,7 +157,6 @@ namespace ngraph
const std::map<std::int64_t, Operator>::const_iterator
find(std::int64_t version, const std::map<std::int64_t, Operator>& map)
{
std::map<std::int64_t, Operator>::const_iterator it{};
// Get the latest version.
if (version == -1)
{
@ -165,13 +164,13 @@ namespace ngraph
}
while (version > 0)
{
it = map.find(version--);
std::map<std::int64_t, Operator>::const_iterator it = map.find(version--);
if (it != std::end(map))
{
return it;
}
}
return it;
return std::end(map);
}
}

View File

@ -89,9 +89,3 @@ TEST(file_util, path_join)
EXPECT_STREQ("/test1/test2", file_util::path_join(s1, s2).c_str());
}
}
TEST(file_util, get_temp_directory_path)
{
string tmp = file_util::get_temp_directory_path();
EXPECT_NE(0, tmp.size());
}

View File

@ -73,6 +73,8 @@ shared_ptr<runtime::Tensor> runtime::ie::IE_Backend::create_tensor(
const element::Type& element_type, const Shape& shape, void* data)
{
shared_ptr<runtime::Tensor> tensor = make_shared<IETensor>(element_type, shape);
if (tensor == nullptr)
throw runtime_error("Cannot create IETensor!");
tensor->write(data, shape_size(shape) * element_type.size());
return tensor;
}

View File

@ -150,7 +150,10 @@ bool runtime::ie::IE_Executable::call(const vector<shared_ptr<runtime::Tensor>>&
}
// Prepare output blobs
string output_name = m_network.getOutputsInfo().begin()->first;
auto outInfo = m_network.getOutputsInfo();
if (outInfo.size() != 1)
THROW_IE_EXCEPTION << "Networks should contain only one output!";
string output_name = outInfo.begin()->first;
infer_request.Infer();
InferenceEngine::Blob::Ptr output = infer_request.GetBlob(output_name);