Performance: Standardize lodash imports to use destructured members (#33040)

* Performance: Standardize lodash imports to use destructured members
Changes lodash imports of the form `import x from 'lodash/x'` to
`import { x } from 'lodash'` to reduce bundle size.

* Remove unnecessary _ import from Graph component

* Enforce lodash import style

* Fix remaining lodash imports
This commit is contained in:
kay delaney
2021-04-21 08:38:00 +01:00
committed by GitHub
parent 2bb7eb18d1
commit bad048b7ba
299 changed files with 1135 additions and 1137 deletions

View File

@@ -1,4 +1,4 @@
import _ from 'lodash';
import { map } from 'lodash';
import { Observable, of, throwError } from 'rxjs';
import {
ArrayVector,
@@ -458,7 +458,7 @@ describe('ElasticDatasource', function (this: any) {
await expect(ds.getFields()).toEmitValuesWith((received) => {
expect(received.length).toBe(1);
const fieldObjects = received[0];
const fields = _.map(fieldObjects, 'text');
const fields = map(fieldObjects, 'text');
expect(fields).toEqual([
'@timestamp',
@@ -481,7 +481,7 @@ describe('ElasticDatasource', function (this: any) {
await expect(ds.getFields('number')).toEmitValuesWith((received) => {
expect(received.length).toBe(1);
const fieldObjects = received[0];
const fields = _.map(fieldObjects, 'text');
const fields = map(fieldObjects, 'text');
expect(fields).toEqual(['system.cpu.system', 'system.cpu.user', 'system.process.cpu.total']);
});
@@ -493,7 +493,7 @@ describe('ElasticDatasource', function (this: any) {
await expect(ds.getFields('date')).toEmitValuesWith((received) => {
expect(received.length).toBe(1);
const fieldObjects = received[0];
const fields = _.map(fieldObjects, 'text');
const fields = map(fieldObjects, 'text');
expect(fields).toEqual(['@timestamp', '__timestamp', '@timestampnano']);
});
@@ -556,7 +556,7 @@ describe('ElasticDatasource', function (this: any) {
await expect(ds.getFields(undefined, range)).toEmitValuesWith((received) => {
expect(received.length).toBe(1);
const fieldObjects = received[0];
const fields = _.map(fieldObjects, 'text');
const fields = map(fieldObjects, 'text');
expect(fields).toEqual(['@timestamp', 'beat.hostname']);
});
});
@@ -693,7 +693,7 @@ describe('ElasticDatasource', function (this: any) {
expect(received.length).toBe(1);
const fieldObjects = received[0];
const fields = _.map(fieldObjects, 'text');
const fields = map(fieldObjects, 'text');
expect(fields).toEqual([
'@timestamp_millis',
'classification_terms',
@@ -720,7 +720,7 @@ describe('ElasticDatasource', function (this: any) {
expect(received.length).toBe(1);
const fieldObjects = received[0];
const fields = _.map(fieldObjects, 'text');
const fields = map(fieldObjects, 'text');
expect(fields).toEqual([
'justification_blob.overall_vote_score',
'justification_blob.shallow.jsi.sdb.dsel2.bootlegged-gille.botness',
@@ -739,7 +739,7 @@ describe('ElasticDatasource', function (this: any) {
expect(received.length).toBe(1);
const fieldObjects = received[0];
const fields = _.map(fieldObjects, 'text');
const fields = map(fieldObjects, 'text');
expect(fields).toEqual(['@timestamp_millis']);
});
});

View File

@@ -1,4 +1,4 @@
import _ from 'lodash';
import { cloneDeep, find, isNumber, isObject, isString, first as _first, map as _map } from 'lodash';
import {
DataFrame,
DataLink,
@@ -302,7 +302,7 @@ export class ElasticDatasource extends DataSourceApi<ElasticsearchQuery, Elastic
let time = getFieldFromSource(source, timeField);
if (typeof hits[i].fields !== 'undefined') {
const fields = hits[i].fields;
if (_.isString(fields[timeField]) || _.isNumber(fields[timeField])) {
if (isString(fields[timeField]) || isNumber(fields[timeField])) {
time = fields[timeField];
}
}
@@ -380,7 +380,7 @@ export class ElasticDatasource extends DataSourceApi<ElasticsearchQuery, Elastic
return this.getFields('date')
.pipe(
mergeMap((dateFields) => {
const timeField: any = _.find(dateFields, { text: this.timeField });
const timeField: any = find(dateFields, { text: this.timeField });
if (!timeField) {
return of({ status: 'error', message: 'No date field named ' + this.timeField + ' found' });
}
@@ -508,7 +508,7 @@ export class ElasticDatasource extends DataSourceApi<ElasticsearchQuery, Elastic
const targets: ElasticsearchQuery[] = [{ refId: `${row.dataFrame.refId}`, metrics: [{ type: 'logs', id: '1' }] }];
const elasticResponse = new ElasticResponse(targets, transformHitsBasedOnDirection(response, sort));
const logResponse = elasticResponse.getLogs(this.logMessageField, this.logLevelField);
const dataFrame = _.first(logResponse.data);
const dataFrame = _first(logResponse.data);
if (!dataFrame) {
return { data: [] };
}
@@ -536,7 +536,7 @@ export class ElasticDatasource extends DataSourceApi<ElasticsearchQuery, Elastic
query(options: DataQueryRequest<ElasticsearchQuery>): Observable<DataQueryResponse> {
let payload = '';
const targets = this.interpolateVariablesInQueries(_.cloneDeep(options.targets), options.scopedVars);
const targets = this.interpolateVariablesInQueries(cloneDeep(options.targets), options.scopedVars);
const sentTargets: ElasticsearchQuery[] = [];
let targetsContainsLogsQuery = targets.some((target) => hasMetricOfType(target, 'logs'));
@@ -661,17 +661,17 @@ export class ElasticDatasource extends DataSourceApi<ElasticsearchQuery, Elastic
const subObj = obj[key];
// Check mapping field for nested fields
if (_.isObject(subObj.properties)) {
if (isObject(subObj.properties)) {
fieldNameParts.push(key);
getFieldsRecursively(subObj.properties);
}
if (_.isObject(subObj.fields)) {
if (isObject(subObj.fields)) {
fieldNameParts.push(key);
getFieldsRecursively(subObj.fields);
}
if (_.isString(subObj.type)) {
if (isString(subObj.type)) {
const fieldName = fieldNameParts.concat(key).join('.');
// Hide meta-fields and check field type
@@ -704,7 +704,7 @@ export class ElasticDatasource extends DataSourceApi<ElasticsearchQuery, Elastic
}
// transform to array
return _.map(fields, (value) => {
return _map(fields, (value) => {
return value;
});
})
@@ -729,7 +729,7 @@ export class ElasticDatasource extends DataSourceApi<ElasticsearchQuery, Elastic
}
const buckets = res.responses[0].aggregations['1'].buckets;
return _.map(buckets, (bucket) => {
return _map(buckets, (bucket) => {
return {
text: bucket.key_as_string || bucket.key,
value: bucket.key,

View File

@@ -1,4 +1,4 @@
import _ from 'lodash';
import { clone, filter, find, identity, isArray, keys, map, uniq, values as _values } from 'lodash';
import flatten from 'app/core/utils/flatten';
import * as queryDef from './query_def';
import TableModel from 'app/core/table_model';
@@ -144,7 +144,7 @@ export class ElasticResponse {
) {
// add columns
if (table.columns.length === 0) {
for (const propKey of _.keys(props)) {
for (const propKey of keys(props)) {
table.addColumn({ text: propKey, filterable: true });
}
table.addColumn({ text: aggDef.field, filterable: true });
@@ -155,11 +155,11 @@ export class ElasticResponse {
table.addColumn({ text: metricName });
values.push(value);
};
const buckets = _.isArray(esAgg.buckets) ? esAgg.buckets : [esAgg.buckets];
const buckets = isArray(esAgg.buckets) ? esAgg.buckets : [esAgg.buckets];
for (const bucket of buckets) {
const values = [];
for (const propValues of _.values(props)) {
for (const propValues of _values(props)) {
values.push(propValues);
}
@@ -197,7 +197,7 @@ export class ElasticResponse {
}
default: {
let metricName = this.getMetricName(metric.type);
const otherMetrics = _.filter(target.metrics, { type: metric.type });
const otherMetrics = filter(target.metrics, { type: metric.type });
// if more of the same metric type include field field name in property
if (otherMetrics.length > 1) {
@@ -228,7 +228,7 @@ export class ElasticResponse {
const maxDepth = target.bucketAggs!.length - 1;
for (aggId in aggs) {
aggDef = _.find(target.bucketAggs, { id: aggId });
aggDef = find(target.bucketAggs, { id: aggId });
esAgg = aggs[aggId];
if (!aggDef) {
@@ -244,7 +244,7 @@ export class ElasticResponse {
} else {
for (const nameIndex in esAgg.buckets) {
bucket = esAgg.buckets[nameIndex];
props = _.clone(props);
props = clone(props);
if (bucket.key !== void 0) {
props[aggDef.field] = bucket.key;
} else {
@@ -304,12 +304,12 @@ export class ElasticResponse {
if (queryDef.isPipelineAgg(series.metric)) {
if (series.metric && queryDef.isPipelineAggWithMultipleBucketPaths(series.metric)) {
const agg: any = _.find(target.metrics, { id: series.metricId });
const agg: any = find(target.metrics, { id: series.metricId });
if (agg && agg.settings.script) {
metricName = getScriptValue(agg);
for (const pv of agg.pipelineVariables) {
const appliedAgg: any = _.find(target.metrics, { id: pv.pipelineAgg });
const appliedAgg: any = find(target.metrics, { id: pv.pipelineAgg });
if (appliedAgg) {
metricName = metricName.replace('params.' + pv.name, describeMetric(appliedAgg));
}
@@ -318,7 +318,7 @@ export class ElasticResponse {
metricName = 'Unset';
}
} else {
const appliedAgg: any = _.find(target.metrics, { id: series.field });
const appliedAgg: any = find(target.metrics, { id: series.field });
if (appliedAgg) {
metricName += ' ' + describeMetric(appliedAgg);
} else {
@@ -329,7 +329,7 @@ export class ElasticResponse {
metricName += ' ' + series.field;
}
const propKeys = _.keys(series.props);
const propKeys = keys(series.props);
if (propKeys.length === 0) {
return metricName;
}
@@ -347,7 +347,7 @@ export class ElasticResponse {
}
nameSeries(seriesList: any, target: ElasticsearchQuery) {
const metricTypeCount = _.uniq(_.map(seriesList, 'metric')).length;
const metricTypeCount = uniq(map(seriesList, 'metric')).length;
for (let i = 0; i < seriesList.length; i++) {
const series = seriesList[i];
@@ -394,7 +394,7 @@ export class ElasticResponse {
}
trimDatapoints(aggregations: any, target: ElasticsearchQuery) {
const histogram: any = _.find(target.bucketAggs, { type: 'date_histogram' });
const histogram: any = find(target.bucketAggs, { type: 'date_histogram' });
const shouldDropFirstAndLast = histogram && histogram.settings && histogram.settings.trimEdges;
if (shouldDropFirstAndLast) {
@@ -488,11 +488,11 @@ export class ElasticResponse {
});
});
})
.filter(_.identity);
.filter(identity);
// If meta and searchWords already exists, add the words and
// deduplicate otherwise create a new set of search words.
const searchWords = series.meta?.searchWords
? _.uniq([...series.meta.searchWords, ...newSearchWords])
? uniq([...series.meta.searchWords, ...newSearchWords])
: [...newSearchWords];
series.meta = series.meta ? { ...series.meta, searchWords } : { searchWords };
}