Clean-up files in tests helpers (#1173)

This commit is contained in:
Ilya Lavrenov
2020-07-01 22:34:43 +03:00
committed by GitHub
parent acaab888f2
commit c9749ce397
9 changed files with 254 additions and 289 deletions

View File

@@ -23,6 +23,21 @@
TEST_P(ModelTransformationsTest, LPT) {}
static void checkLayerInputPrecision(const ICNNNetwork& network, const std::string& layerName, Precision expectedPrecision, int inputIndex = -1) {
CNNLayerPtr layer = getLayer(network, layerName);
if (layer == nullptr) {
THROW_IE_EXCEPTION << "layer '" << layerName << "' was not found";
}
for (size_t index = 0ul; index < layer->insData.size(); ++index) {
if ((inputIndex != -1) && (index != inputIndex)) {
continue;
}
const DataWeakPtr weakData = layer->insData[index];
ASSERT_EQ(expectedPrecision, weakData.lock()->getPrecision()) << " unexpected precision " << weakData.lock()->getPrecision() << " for layer " << layerName;
}
}
ModelParams getModelParams(const std::string modelName) {
std::map<std::string, ModelParams> modelParams = {
{
@@ -68,7 +83,7 @@ std::map<std::string, ModelParams> modelParams = {
for (const std::pair<std::string, std::string> item : fakeQuantizeAndConcolutionItems) {
TestsCommonFunc::checkLayerOuputPrecision(*usedNetwork, item.first, Precision::U8);
if (!item.second.empty()) {
TestsCommonFunc::checkLayerInputPrecision(*usedNetwork, item.second, Precision::U8, 0);
checkLayerInputPrecision(*usedNetwork, item.second, Precision::U8, 0);
}
}
}

View File

@@ -90,6 +90,19 @@ class BinaryConvolutionOnlyTest : public TestsCommon,
protected:
static void fill_data_bin(float *data, size_t size) {
for (size_t i = 0; i < size; i++) {
data[i] = sinf((float)i) > 0.f ? 1.f : -1.f;
}
}
static void fill_data_bin_packed(int8_t *data, size_t size) {
int nbits = 8;
for (size_t i = 0; i < div_up(size, nbits); i++) {
data[i] = static_cast<int8_t>(i % 255);
}
}
size_t calculateOutDim(size_t in_dim, size_t kernel, size_t stride, size_t pad_begin) {
return (in_dim + 2lu * pad_begin - kernel) / stride + 1lu;
}

View File

@@ -181,6 +181,35 @@ void SingleLayerTransformationsTest::compareInDetails(
}
}
static void relative_compare(
const float* res,
const float* ref,
size_t size,
float max_diff = 0.01f,
const std::string assertDetails = "",
float zero_diff = 1e-7f) {
for (size_t i = 0lu; i < size; i++) {
if (std::isnan(res[i]) && std::isnan(ref[i])) {
continue;
}
if ((ref[i] == 0.f) || (res[i] == 0.f)) {
const float diff = fabs(res[i] - ref[i]);
ASSERT_TRUE(diff < zero_diff) <<
"\nAbsolute comparison of values ref: " << ref[i] << " and res: " << res[i] <<
", diff: " << diff <<
", index: " << i << "\n" << assertDetails;
} else {
const float diff = fabs((res[i] - ref[i]) / (std::max)(ref[i], res[i]));
ASSERT_LT(diff, max_diff) <<
"\nRelative comparison of values ref: " << ref[i] << " and res: " << res[i] <<
", diff: " << diff <<
", max_diff: " << max_diff <<
", index: " << i << "\n" << assertDetails;
}
}
}
void SingleLayerTransformationsTest::SetUp() {
try {
const SingleLayerTransformationsTestParams p = ::testing::WithParamInterface<SingleLayerTransformationsTestParams>::GetParam();