FieldValues: Use standard array functions (#67012)

This commit is contained in:
Ryan McKinley 2023-04-20 22:03:38 -07:00 committed by GitHub
parent 405bda7e99
commit 607b94aaa2
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
23 changed files with 70 additions and 69 deletions

View File

@ -284,8 +284,7 @@ exports[`better eslint`] = {
[0, 0, 0, "Unexpected any. Specify a different type.", "1"]
],
"packages/grafana-data/src/transformations/transformers/groupingToMatrix.ts:5381": [
[0, 0, 0, "Unexpected any. Specify a different type.", "0"],
[0, 0, 0, "Unexpected any. Specify a different type.", "1"]
[0, 0, 0, "Unexpected any. Specify a different type.", "0"]
],
"packages/grafana-data/src/transformations/transformers/histogram.ts:5381": [
[0, 0, 0, "Do not use any type assertions.", "0"],
@ -293,6 +292,9 @@ exports[`better eslint`] = {
[0, 0, 0, "Unexpected any. Specify a different type.", "2"],
[0, 0, 0, "Unexpected any. Specify a different type.", "3"]
],
"packages/grafana-data/src/transformations/transformers/joinDataFrames.ts:5381": [
[0, 0, 0, "Unexpected any. Specify a different type.", "0"]
],
"packages/grafana-data/src/transformations/transformers/merge.ts:5381": [
[0, 0, 0, "Unexpected any. Specify a different type.", "0"],
[0, 0, 0, "Unexpected any. Specify a different type.", "1"],

View File

@ -119,7 +119,7 @@ export class MutableDataFrame<T = any> extends FunctionalVector<T> implements Da
// Make sure the field starts with a given length
if (startLength) {
while (field.values.length < startLength) {
field.values.add(MISSING_VALUE);
field.values.push(MISSING_VALUE);
}
} else {
this.validate();
@ -137,7 +137,7 @@ export class MutableDataFrame<T = any> extends FunctionalVector<T> implements Da
// Add empty elements until everything matches
for (const field of this.fields) {
while (field.values.length !== length) {
field.values.add(MISSING_VALUE);
field.values.push(MISSING_VALUE);
}
}
}
@ -191,7 +191,7 @@ export class MutableDataFrame<T = any> extends FunctionalVector<T> implements Da
if (f.type !== FieldType.string && isString(v)) {
v = this.parseValue(f, v);
}
f.values.add(v);
f.values.push(v);
}
}
@ -227,7 +227,7 @@ export class MutableDataFrame<T = any> extends FunctionalVector<T> implements Da
val = MISSING_VALUE;
}
field.values.add(val);
field.values.push(val);
}
}
@ -238,7 +238,7 @@ export class MutableDataFrame<T = any> extends FunctionalVector<T> implements Da
const obj = (value as any) || {};
for (const field of this.fields) {
field.values.set(index, obj[field.name]);
field.values[index] = obj[field.name];
}
}
@ -248,7 +248,7 @@ export class MutableDataFrame<T = any> extends FunctionalVector<T> implements Da
get(idx: number): T {
const v: any = {};
for (const field of this.fields) {
v[field.name] = field.values.get(idx);
v[field.name] = field.values[idx];
}
return v as T;
}

View File

@ -2,7 +2,6 @@ import { isNumber } from 'lodash';
import { dateTime, isDateTimeInput } from '../datetime';
import { Field, FieldType } from '../types/dataFrame';
import { Vector } from '../types/vector';
type IndexComparer = (a: number, b: number) => number;
@ -79,12 +78,12 @@ const falsyComparer = (a: unknown, b: unknown): number => {
return 0;
};
const timestampIndexComparer = (values: Vector<number>, reverse: boolean): IndexComparer => {
const timestampIndexComparer = (values: number[], reverse: boolean): IndexComparer => {
let mult = reverse ? -1 : 1;
return (a: number, b: number): number => mult * (values[a] - values[b]);
};
const timeIndexComparer = (values: Vector<unknown>, reverse: boolean): IndexComparer => {
const timeIndexComparer = (values: unknown[], reverse: boolean): IndexComparer => {
return (a: number, b: number): number => {
const vA = values[a];
const vB = values[b];
@ -92,7 +91,7 @@ const timeIndexComparer = (values: Vector<unknown>, reverse: boolean): IndexComp
};
};
const booleanIndexComparer = (values: Vector<boolean>, reverse: boolean): IndexComparer => {
const booleanIndexComparer = (values: boolean[], reverse: boolean): IndexComparer => {
return (a: number, b: number): number => {
const vA = values[a];
const vB = values[b];
@ -100,7 +99,7 @@ const booleanIndexComparer = (values: Vector<boolean>, reverse: boolean): IndexC
};
};
const numericIndexComparer = (values: Vector<number>, reverse: boolean): IndexComparer => {
const numericIndexComparer = (values: number[], reverse: boolean): IndexComparer => {
return (a: number, b: number): number => {
const vA = values[a];
const vB = values[b];
@ -108,7 +107,7 @@ const numericIndexComparer = (values: Vector<number>, reverse: boolean): IndexCo
};
};
const stringIndexComparer = (values: Vector<string>, reverse: boolean): IndexComparer => {
const stringIndexComparer = (values: string[], reverse: boolean): IndexComparer => {
return (a: number, b: number): number => {
const vA = values[a];
const vB = values[b];

View File

@ -290,7 +290,7 @@ export function doStandardCalcs(field: Field, ignoreNulls: boolean, nullAsZero:
previousDeltaUp: true,
};
const data = field.values.toArray(); // toArray() ensures we handle all vector types
const data = field.values;
calcs.count = ignoreNulls ? data.length : data.filter((val) => val != null).length;
const isNumberField = field.type === FieldType.number || FieldType.time;

View File

@ -2,7 +2,7 @@ import { map } from 'rxjs/operators';
import { MutableDataFrame } from '../../dataframe';
import { getFieldDisplayName } from '../../field/fieldState';
import { DataFrame, DataTransformerInfo, Field, FieldType, SpecialValue, Vector } from '../../types';
import { DataFrame, DataTransformerInfo, Field, FieldType, SpecialValue } from '../../types';
import { fieldMatchers } from '../matchers';
import { FieldMatcherID } from '../matchers/ids';
@ -111,8 +111,8 @@ export const groupingToMatrixTransformer: DataTransformerInfo<GroupingToMatrixTr
),
};
function uniqueValues(values: Vector): any[] {
const unique = new Set();
function uniqueValues<T>(values: T[]): T[] {
const unique = new Set<T>();
for (let index = 0; index < values.length; index++) {
unique.add(values[index]);

View File

@ -1,7 +1,7 @@
import intersect from 'fast_array_intersect';
import { getTimeField, sortDataFrame } from '../../dataframe';
import { DataFrame, Field, FieldMatcher, FieldType, Vector } from '../../types';
import { DataFrame, Field, FieldMatcher, FieldType } from '../../types';
import { fieldMatchers } from '../matchers';
import { FieldMatcherID } from '../matchers/ids';
@ -351,7 +351,7 @@ export function join(tables: AlignedData[], nullModes?: number[][], mode: JoinMo
// Test a few samples to see if the values are ascending
// Only exported for tests
export function isLikelyAscendingVector(data: Vector | [], samples = 50) {
export function isLikelyAscendingVector(data: any[], samples = 50) {
const len = data.length;
// empty or single value

View File

@ -23,8 +23,8 @@ export function getFlotPairs({ xField, yField, nullValueMode }: FlotPairsOptions
const pairs: any[][] = [];
for (let i = 0; i < length; i++) {
const x = vX.get(i);
let y = vY.get(i);
const x = vX[i];
let y = vY[i];
if (y === null) {
if (ignoreNulls) {

View File

@ -135,7 +135,7 @@ export class Sparkline extends PureComponent<SparklineProps, State> {
return [sparkline.timeRange.from.valueOf(), sparkline.timeRange.to.valueOf()];
}
const vals = sparkline.x.values;
return [vals.get(0), vals.get(vals.length - 1)];
return [vals[0], vals[vals.length - 1]];
}
return [0, sparkline.y.values.length - 1];
},

View File

@ -27,8 +27,8 @@ export function mapDataFrameToAlertPreview({ fields }: DataFrame): AlertPreview
for (let index = 0; index < instanceStatusCount; index++) {
const labelValues = labelIndexes.map((labelIndex) => [fields[labelIndex].name, fields[labelIndex].values[index]]);
const state = fields[stateFieldIndex]?.values?.get(index);
const info = fields[infoFieldIndex]?.values?.get(index);
const state = fields[stateFieldIndex]?.values?.[index];
const info = fields[infoFieldIndex]?.values?.[index];
if (isGrafanaAlertState(state)) {
instances.push({

View File

@ -125,7 +125,7 @@ export function getLastNotNullFieldValue<T>(field: Field): T {
const data = field.values;
let idx = data.length - 1;
while (idx >= 0) {
const v = data.get(idx--);
const v = data[idx--];
if (v != null) {
return v;
}

View File

@ -85,7 +85,7 @@ export function useSearchKeyboardNavigation(
setHighlightIndex({ ...highlightIndexRef.current });
break;
}
const url = urlsRef.current.values?.get(idx) as string;
const url = urlsRef.current.values?.[idx] as string;
if (url) {
locationService.push(locationUtil.stripBaseFromUrl(url));
}

View File

@ -60,8 +60,8 @@ export function ExplainScorePopup({ name, explain, frame, row }: Props) {
showLineNumbers={false}
showMiniMap={false}
value={(() => {
const allowedActions = frame.fields.find((f) => f.name === 'allowed_actions')?.values?.get(row);
const dsUids = frame.fields.find((f) => f.name === 'ds_uid')?.values?.get(row);
const allowedActions = frame.fields.find((f) => f.name === 'allowed_actions')?.values?.[row];
const dsUids = frame.fields.find((f) => f.name === 'ds_uid')?.values?.[row];
return JSON.stringify({ dsUids: dsUids ?? [], allowedActions: allowedActions ?? [] }, null, 2);
})()}
readOnly={false}

View File

@ -240,7 +240,7 @@ export const generateColumns = (
Cell: (p) => {
return (
<div {...p.cellProps} className={styles.explainItem} onClick={() => showExplainPopup(p.row.index)}>
{vals.get(p.row.index)}
{vals[p.row.index]}
</div>
);
},

View File

@ -226,7 +226,7 @@ function getTermCountsFrom(frame: DataFrame): TermCount[] {
const vals = frame.fields[1].values;
const counts: TermCount[] = [];
for (let i = 0; i < frame.length; i++) {
counts.push({ term: keys.get(i), count: vals.get(i) });
counts.push({ term: keys[i], count: vals[i] });
}
return counts;
}

View File

@ -81,10 +81,10 @@ export function timeSeriesToTableTransform(options: TimeSeriesTableTransformerOp
const labels = frame.fields[1].labels;
for (const labelKey of Object.keys(labelFields)) {
const labelValue = labels?.[labelKey] ?? null;
labelFields[labelKey].values.add(labelValue!);
labelFields[labelKey].values.push(labelValue!);
}
frameField.values.add(frame);
frameField.values.push(frame);
}
return result;
}

View File

@ -25,7 +25,7 @@ export function getDerivedFields(dataFrame: DataFrame, derivedFieldConfigs: Deri
lineField.values.forEach((line) => {
for (const field of newFields) {
const logMatch = line.match(derivedFieldsGrouped[field.name][0].matcherRegex);
field.values.add(logMatch && logMatch[1]);
field.values.push(logMatch && logMatch[1]);
}
});

View File

@ -38,9 +38,9 @@ export function appendResponseToBufferedData(response: LokiTailResponse, data: M
// Add each line
for (const [ts, line] of stream.values) {
tsField.values.add(new Date(parseInt(ts.slice(0, -6), 10)).toISOString());
lineField.values.add(line);
idField.values.add(createUid(ts, allLabelsString, line, usedUids, data.refId));
tsField.values.push(new Date(parseInt(ts.slice(0, -6), 10)).toISOString());
lineField.values.push(line);
idField.values.push(createUid(ts, allLabelsString, line, usedUids, data.refId));
}
}
}

View File

@ -235,11 +235,11 @@ export function transformDFToTable(dfs: DataFrame[]): DataFrame[] {
dataFramesByRefId[refId].forEach((df) => {
const timeFields = df.fields[0]?.values ?? [];
const dataFields = df.fields[1]?.values ?? [];
timeFields.forEach((value) => timeField.values.add(value));
timeFields.forEach((value) => timeField.values.push(value));
dataFields.forEach((value) => {
valueField.values.add(parseSampleValue(value));
valueField.values.push(parseSampleValue(value));
const labelsForField = df.fields[1].labels ?? {};
labelFields.forEach((field) => field.values.add(getLabelValue(labelsForField, field.name)));
labelFields.forEach((field) => field.values.push(getLabelValue(labelsForField, field.name)));
});
});
@ -527,14 +527,14 @@ function transformMetricDataToTable(md: MatrixOrVectorResult[], options: Transfo
md.forEach((d) => {
if (isMatrixData(d)) {
d.values.forEach((val) => {
timeField.values.add(val[0] * 1000);
timeField.values.push(val[0] * 1000);
metricFields.forEach((metricField) => metricField.values.push(getLabelValue(d.metric, metricField.name)));
valueField.values.add(parseSampleValue(val[1]));
valueField.values.push(parseSampleValue(val[1]));
});
} else {
timeField.values.add(d.value[0] * 1000);
timeField.values.push(d.value[0] * 1000);
metricFields.forEach((metricField) => metricField.values.push(getLabelValue(d.metric, metricField.name)));
valueField.values.add(parseSampleValue(d.value[1]));
valueField.values.push(parseSampleValue(d.value[1]));
}
});

View File

@ -86,9 +86,9 @@ export function createTableFrame(
if (match) {
const traceId = match[1];
const time = timeField ? timeField.values[i] : null;
tableFrame.fields[0].values.add(time);
tableFrame.fields[1].values.add(traceId);
tableFrame.fields[2].values.add(line);
tableFrame.fields[0].values.push(time);
tableFrame.fields[1].values.push(traceId);
tableFrame.fields[2].values.push(line);
hasMatch = true;
}
}

View File

@ -118,15 +118,15 @@ export function generateRandomNodes(count = 10) {
const edgesSet = new Set();
for (const node of nodes) {
nodeFields.id.values.add(node.id);
nodeFields.title.values.add(node.title);
nodeFields[NodeGraphDataFrameFieldNames.subTitle].values.add(node.subTitle);
nodeFields[NodeGraphDataFrameFieldNames.mainStat].values.add(node.stat1);
nodeFields[NodeGraphDataFrameFieldNames.secondaryStat].values.add(node.stat2);
nodeFields.arc__success.values.add(node.success);
nodeFields.arc__errors.values.add(node.error);
nodeFields.id.values.push(node.id);
nodeFields.title.values.push(node.title);
nodeFields[NodeGraphDataFrameFieldNames.subTitle].values.push(node.subTitle);
nodeFields[NodeGraphDataFrameFieldNames.mainStat].values.push(node.stat1);
nodeFields[NodeGraphDataFrameFieldNames.secondaryStat].values.push(node.stat2);
nodeFields.arc__success.values.push(node.success);
nodeFields.arc__errors.values.push(node.error);
const rnd = Math.random();
nodeFields[NodeGraphDataFrameFieldNames.icon].values.add(rnd > 0.9 ? 'database' : rnd < 0.1 ? 'cloud' : '');
nodeFields[NodeGraphDataFrameFieldNames.icon].values.push(rnd > 0.9 ? 'database' : rnd < 0.1 ? 'cloud' : '');
for (const edge of node.edges) {
const id = `${node.id}--${edge}`;
// We can have duplicate edges when we added some more by random
@ -134,10 +134,10 @@ export function generateRandomNodes(count = 10) {
continue;
}
edgesSet.add(id);
edgesFrame.fields[0].values.add(`${node.id}--${edge}`);
edgesFrame.fields[1].values.add(node.id);
edgesFrame.fields[2].values.add(edge);
edgesFrame.fields[3].values.add(Math.random() * 100);
edgesFrame.fields[0].values.push(`${node.id}--${edge}`);
edgesFrame.fields[1].values.push(node.id);
edgesFrame.fields[2].values.push(edge);
edgesFrame.fields[3].values.push(Math.random() * 100);
}
}

View File

@ -151,8 +151,8 @@ export function runLogsStream(
let timeoutId: ReturnType<typeof setTimeout>;
const pushNextEvent = () => {
data.fields[0].values.add(getRandomLine());
data.fields[1].values.add(Date.now());
data.fields[0].values.push(getRandomLine());
data.fields[1].values.push(Date.now());
subscriber.next({
data: [data],

View File

@ -251,13 +251,13 @@ function normalizeStatsForNodes(nodesMap: { [id: string]: NodeDatumFromEdge }, e
if (mainStatField) {
newNode.mainStat = mainStatField;
mainStatValues.add(node.mainStatNumeric);
mainStatValues.push(node.mainStatNumeric);
newNode.dataFrameRowIndex = index;
}
if (secondaryStatField) {
newNode.secondaryStat = secondaryStatField;
secondaryStatValues.add(node.secondaryStatNumeric);
secondaryStatValues.push(node.secondaryStatNumeric);
newNode.dataFrameRowIndex = index;
}
return newNode;

View File

@ -41,7 +41,7 @@ export const StateTimelineTooltip = ({
const linkLookup = new Set<string>();
if (field.getLinks) {
const v = field.values.get(datapointIdx);
const v = field.values[datapointIdx];
const disp = field.display ? field.display(v) : { text: `${v}`, numeric: +v };
field.getLinks({ calculatedValue: disp, valueRowIndex: datapointIdx }).forEach((link) => {
const key = `${link.title}/${link.href}`;
@ -57,7 +57,7 @@ export const StateTimelineTooltip = ({
const dataFrameFieldIndex = field.state?.origin;
const fieldFmt = field.display || getDisplayProcessor({ field, timeZone, theme });
const value = field.values.get(datapointIdx!);
const value = field.values[datapointIdx!];
const display = fieldFmt(value);
const fieldDisplayName = dataFrameFieldIndex
? getFieldDisplayName(
@ -70,10 +70,10 @@ export const StateTimelineTooltip = ({
const nextStateIdx = findNextStateIndex(field, datapointIdx!);
let nextStateTs;
if (nextStateIdx) {
nextStateTs = xField.values.get(nextStateIdx!);
nextStateTs = xField.values[nextStateIdx!];
}
const stateTs = xField.values.get(datapointIdx!);
const stateTs = xField.values[datapointIdx!];
let toFragment = null;
let durationFragment = null;
@ -88,7 +88,7 @@ export const StateTimelineTooltip = ({
);
toFragment = (
<>
{' to'} <strong>{xFieldFmt(xField.values.get(nextStateIdx!)).text}</strong>
{' to'} <strong>{xFieldFmt(xField.values[nextStateIdx!]).text}</strong>
</>
);
}
@ -99,7 +99,7 @@ export const StateTimelineTooltip = ({
{fieldDisplayName}
<br />
<SeriesTableRow label={display.text} color={display.color || FALLBACK_COLOR} isActive />
From <strong>{xFieldFmt(xField.values.get(datapointIdx!)).text}</strong>
From <strong>{xFieldFmt(xField.values[datapointIdx!]).text}</strong>
{toFragment}
{durationFragment}
</div>