Minor changes to integrate AA algorithm into NNCF (#13765)

This commit is contained in:
Andrey Churkin 2022-11-10 10:17:50 +00:00 committed by GitHub
parent e2c320a455
commit dbb91c5e69
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
2 changed files with 24 additions and 11 deletions

View File

@ -181,8 +181,8 @@ class AccuracyAwareCommon(Algorithm):
default_quantization_config = self._preset_conversion_algo.config
if not self._original_per_sample_metrics:
_, self._original_per_sample_metrics = \
self._evaluate_model(model=model, subset_indices=self._diff_subset_indices)
self._original_per_sample_metrics = self._calculate_per_sample_metrics(model,
self._diff_subset_indices)
# change quantization parameters of the model
if self._config.tune_hyperparams:
@ -373,8 +373,8 @@ class AccuracyAwareCommon(Algorithm):
"""
if qmodel_per_sample_metrics is None:
# get quantized model predictions
_, qmodel_per_sample_metrics = self._evaluate_model(model=model,
subset_indices=self._diff_subset_indices)
qmodel_per_sample_metrics = self._calculate_per_sample_metrics(model,
self._diff_subset_indices)
ranking_subset = self._get_ranking_subset(qmodel_per_sample_metrics, metric_name) # not sorted
node_importance_score = self._calculate_node_importance_scores(model,
@ -427,19 +427,25 @@ class AccuracyAwareCommon(Algorithm):
logger.debug('Changed\\Removed a block of %d FQ layers: %s', len(modified_fq_layers),
modified_fq_layers)
change_fqs += modified_fq_layers
self._engine.set_model(modified_model)
self._engine.allow_pairwise_subset = True
index_sampler = create_sampler(self._engine, samples=list(ranking_subset))
metrics, *_ = self._engine.predict(sampler=index_sampler)
self._engine.allow_pairwise_subset = False
logger.update_progress(self._config.ranking_subset_size)
ranking_metric = self._metrics_config[metric_name].ranking
node_importance_score[node.fullname] = ranking_metric.comparator(metrics[ranking_metric.name])
node_importance_score[node.fullname] = self._get_score(modified_model,
list(ranking_subset),
metric_name)
eu.reset_dataset_to_default(self._engine)
return node_importance_score
def _get_score(self, model, ranking_subset, metric_name):
self._engine.set_model(model)
self._engine.allow_pairwise_subset = True
index_sampler = create_sampler(self._engine, samples=list(ranking_subset))
metrics, *_ = self._engine.predict(sampler=index_sampler)
self._engine.allow_pairwise_subset = False
ranking_metric = self._metrics_config[metric_name].ranking
score = ranking_metric.comparator(metrics[ranking_metric.name])
return score
def _modify_model_in_scope(self, model, nodes_names):
return self._graph_transformer.remove_fq_nodes(deepcopy(model), nodes_names)
@ -533,6 +539,10 @@ class AccuracyAwareCommon(Algorithm):
logger.update_progress(predict_step_size)
return metrics, metrics_per_sample
def _calculate_per_sample_metrics(self, model, subset_indices):
_, per_sample_metrics = self._evaluate_model(model, subset_indices=subset_indices)
return per_sample_metrics
def _request_alt_statistics(self, model):
pass

View File

@ -175,10 +175,13 @@ def sort_by_logit_distance(u, v, reverse=False, distance='cosine'):
scipy.special.softmax(v))
mse_distance = lambda u, v: np.mean((u - v) ** 2)
nmse_distance = lambda u, v: np.dot(u - v, u - v) / np.dot(u, u)
distance_function = {
'cosine': scipy.spatial.distance.cosine,
'kd': kd_distance,
'mse': mse_distance,
'nmse': nmse_distance,
}
distance_between_samples = np.array([distance_function[distance](ui.flatten(), vi.flatten())