FieldValues: Use plain arrays instead of Vector (part 3 of 2) (#66612)

Co-authored-by: Ryan McKinley <ryantxu@gmail.com>
This commit is contained in:
Leon Sorokin 2023-04-20 09:59:18 -05:00 committed by GitHub
parent 24696d593b
commit b24ba7b7ae
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
109 changed files with 594 additions and 648 deletions

View File

@ -4627,9 +4627,6 @@ exports[`better eslint`] = {
[0, 0, 0, "Unexpected any. Specify a different type.", "5"],
[0, 0, 0, "Unexpected any. Specify a different type.", "6"]
],
"public/app/plugins/datasource/loki/getDerivedFields.ts:5381": [
[0, 0, 0, "Unexpected any. Specify a different type.", "0"]
],
"public/app/plugins/datasource/loki/queryUtils.ts:5381": [
[0, 0, 0, "Do not use any type assertions.", "0"]
],
@ -5089,7 +5086,8 @@ exports[`better eslint`] = {
],
"public/app/plugins/datasource/testdata/nodeGraphUtils.ts:5381": [
[0, 0, 0, "Do not use any type assertions.", "0"],
[0, 0, 0, "Unexpected any. Specify a different type.", "1"]
[0, 0, 0, "Unexpected any. Specify a different type.", "1"],
[0, 0, 0, "Unexpected any. Specify a different type.", "2"]
],
"public/app/plugins/datasource/testdata/runStreams.ts:5381": [
[0, 0, 0, "Unexpected any. Specify a different type.", "0"]

View File

@ -72,7 +72,7 @@ const y = frame.fields.find((field) => field.name === yField);
const size = frame.fields.find((field) => field.name === sizeField);
for (let i = 0; i < frame.length; i++) {
const row = [x?.values.get(i), y?.values.get(i), size?.values.get(i)];
const row = [x?.values[i], y?.values[i], size?.values[i]];
// ...
}
@ -100,7 +100,7 @@ const valueField = frame.fields.find((field) => field.type === FieldType.number)
return (
<div>
{valueField
? valueField.values.toArray().map((value) => {
? valueField.values.map((value) => {
const displayValue = valueField.display!(value);
return (
<p style={{ color: displayValue.color }}>

View File

@ -24,6 +24,7 @@ describe('Array DataFrame', () => {
// Check map
expect(frame.map((row) => row.name)).toEqual(expectedNames);
expect(frame[0].name).toEqual(input[0].name);
let names: string[] = [];
for (const row of frame) {

View File

@ -1,4 +1,4 @@
import { QueryResultMeta } from '../types';
import { makeArrayIndexableVector, QueryResultMeta } from '../types';
import { Field, FieldType, DataFrame } from '../types/dataFrame';
import { FunctionalVector } from '../vector/FunctionalVector';
import { vectorToArray } from '../vector/vectorToArray';
@ -15,6 +15,7 @@ class ArrayPropertyVector<T = any> extends FunctionalVector<T> {
constructor(private source: any[], private prop: string) {
super();
return makeArrayIndexableVector(this);
}
get length(): number {
@ -64,6 +65,7 @@ export class ArrayDataFrame<T = any> extends FunctionalVector<T> implements Data
} else {
this.setFieldsFromObject(first);
}
return makeArrayIndexableVector(this);
}
/**

View File

@ -49,7 +49,7 @@ export function getFieldDisplayValuesProxy(options: {
// TODO: we could supply the field here for the getDisplayProcessor fallback but we would also need theme which
// we do not have access to here
const displayProcessor = field.display ?? getDisplayProcessor();
const raw = field.values.get(options.rowIndex);
const raw = field.values[options.rowIndex];
const disp = displayProcessor(raw);
disp.toString = () => formattedValueToString(disp);
return disp;

View File

@ -10,7 +10,7 @@ const isEqualValueMatcher: ValueMatcherInfo<BasicValueMatcherOptions> = {
description: 'Match where value for given field is equal to options value.',
get: (options) => {
return (valueIndex: number, field: Field) => {
const value = field.values.get(valueIndex);
const value = field.values[valueIndex];
// eslint-disable-next-line eqeqeq
return value == options.value;
};
@ -28,7 +28,7 @@ const isNotEqualValueMatcher: ValueMatcherInfo<BasicValueMatcherOptions> = {
description: 'Match where value for given field is not equal to options value.',
get: (options) => {
return (valueIndex: number, field: Field) => {
const value = field.values.get(valueIndex);
const value = field.values[valueIndex];
// eslint-disable-next-line eqeqeq
return value != options.value;
};

View File

@ -10,7 +10,7 @@ const isNullValueMatcher: ValueMatcherInfo<ValueMatcherOptions> = {
description: 'Match where value for given field is null.',
get: () => {
return (valueIndex: number, field: Field) => {
const value = field.values.get(valueIndex);
const value = field.values[valueIndex];
return value == null;
};
},
@ -27,7 +27,7 @@ const isNotNullValueMatcher: ValueMatcherInfo<ValueMatcherOptions> = {
description: 'Match where value for given field is not null.',
get: () => {
return (valueIndex: number, field: Field) => {
const value = field.values.get(valueIndex);
const value = field.values[valueIndex];
return value != null;
};
},

View File

@ -10,7 +10,7 @@ const isGreaterValueMatcher: ValueMatcherInfo<BasicValueMatcherOptions<number>>
description: 'Match when field value is greater than option.',
get: (options) => {
return (valueIndex: number, field: Field) => {
const value = field.values.get(valueIndex);
const value = field.values[valueIndex];
if (isNaN(value)) {
return false;
}
@ -30,7 +30,7 @@ const isGreaterOrEqualValueMatcher: ValueMatcherInfo<BasicValueMatcherOptions<nu
description: 'Match when field value is greater than or equal to option.',
get: (options) => {
return (valueIndex: number, field: Field) => {
const value = field.values.get(valueIndex);
const value = field.values[valueIndex];
if (isNaN(value)) {
return false;
}
@ -50,7 +50,7 @@ const isLowerValueMatcher: ValueMatcherInfo<BasicValueMatcherOptions<number>> =
description: 'Match when field value is lower than option.',
get: (options) => {
return (valueIndex: number, field: Field) => {
const value = field.values.get(valueIndex);
const value = field.values[valueIndex];
if (isNaN(value)) {
return false;
}
@ -70,7 +70,7 @@ const isLowerOrEqualValueMatcher: ValueMatcherInfo<BasicValueMatcherOptions<numb
description: 'Match when field value is lower or equal than option.',
get: (options) => {
return (valueIndex: number, field: Field) => {
const value = field.values.get(valueIndex);
const value = field.values[valueIndex];
if (isNaN(value)) {
return false;
}

View File

@ -10,7 +10,7 @@ const isBetweenValueMatcher: ValueMatcherInfo<RangeValueMatcherOptions<number>>
description: 'Match when field value is between given option values.',
get: (options) => {
return (valueIndex: number, field: Field) => {
const value = field.values.get(valueIndex);
const value = field.values[valueIndex];
if (isNaN(value)) {
return false;
}

View File

@ -12,7 +12,7 @@ const regexValueMatcher: ValueMatcherInfo<BasicValueMatcherOptions<string>> = {
const regex = new RegExp(options.value);
return (valueIndex: number, field: Field) => {
const value = field.values.get(valueIndex);
const value = field.values[valueIndex];
return regex.test(value);
};
},

View File

@ -138,7 +138,7 @@ const createKeyFactory = (
return (frameIndex: number, valueIndex: number): string => {
return factoryIndex[frameIndex].reduce((key: string, fieldIndex: number) => {
return key + data[frameIndex].fields[fieldIndex].values.get(valueIndex);
return key + data[frameIndex].fields[fieldIndex].values[valueIndex];
}, '');
};
};
@ -173,7 +173,7 @@ const createValueMapper = (
continue;
}
value[fieldName] = field.values.get(valueIndex);
value[fieldName] = field.values[valueIndex];
}
return value;

View File

@ -75,9 +75,9 @@ export const seriesToRowsTransformer: DataTransformerInfo<SeriesToRowsTransforme
const valueFieldIndex = timeFieldIndex === 0 ? 1 : 0;
dataFrame.add({
[TIME_SERIES_TIME_FIELD_NAME]: frame.fields[timeFieldIndex].values.get(valueIndex),
[TIME_SERIES_TIME_FIELD_NAME]: frame.fields[timeFieldIndex].values[valueIndex],
[TIME_SERIES_METRIC_FIELD_NAME]: getFrameDisplayName(frame),
[TIME_SERIES_VALUE_FIELD_NAME]: frame.fields[valueFieldIndex].values.get(valueIndex),
[TIME_SERIES_VALUE_FIELD_NAME]: frame.fields[valueFieldIndex].values[valueIndex],
});
}
}

View File

@ -1,12 +1,12 @@
declare global {
interface Array<T> {
/** @deprecated this only exists to help migrate Vector to Array */
/** @deprecated Use [idx]. This only exists to help migrate Vector to Array */
get(idx: number): T;
/** @deprecated this only exists to help migrate Vector to Array */
/** @deprecated Use [idx]. This only exists to help migrate Vector to Array */
set(idx: number, value: T): void;
/** @deprecated this only exists to help migrate Vector to Array */
/** @deprecated Use .push(value). This only exists to help migrate Vector to Array */
add(value: T): void;
/** @deprecated this only exists to help migrate Vector to Array */
/** @deprecated this is not necessary. This only exists to help migrate Vector to Array */
toArray(): T[];
}
}
@ -108,17 +108,21 @@ export interface MutableVector<T = any> extends ReadWriteVector<T> {}
export function makeArrayIndexableVector<T extends Vector>(v: T): T {
return new Proxy(v, {
get(target: Vector, property: string, receiver: Vector) {
const idx = +property;
if (String(idx) === property) {
return target.get(idx);
if (typeof property !== 'symbol') {
const idx = +property;
if (String(idx) === property) {
return target.get(idx);
}
}
return Reflect.get(target, property, receiver);
},
set(target: Vector, property: string, value: any, receiver: Vector) {
const idx = +property;
if (String(idx) === property) {
target.set(idx, value);
return true;
if (typeof property !== 'symbol') {
const idx = +property;
if (String(idx) === property) {
target.set(idx, value);
return true;
}
}
return Reflect.set(target, property, value, receiver);
},

View File

@ -234,7 +234,7 @@ export function frameToMetricFindValue(frame: DataFrame): MetricFindValue[] {
}
if (field) {
for (let i = 0; i < field.values.length; i++) {
values.push({ text: '' + field.values.get(i) });
values.push({ text: '' + field.values[i] });
}
}
return values;

View File

@ -220,7 +220,7 @@ export class UPlotConfigBuilder {
// interpolate for gradients/thresholds
if (typeof s !== 'string') {
let field = this.frames![0].fields[seriesIdx];
s = field.display!(field.values.get(u.cursor.idxs![seriesIdx]!)).color!;
s = field.display!(field.values[u.cursor.idxs![seriesIdx]!]).color!;
}
return s + alphaHex;

View File

@ -183,7 +183,7 @@ export const TooltipPlugin = ({
const xFieldFmt = xField.display || getDisplayProcessor({ field: xField, timeZone, theme });
let tooltip: React.ReactNode = null;
let xVal = xFieldFmt(xField!.values.get(focusedPointIdx)).text;
let xVal = xFieldFmt(xField!.values[focusedPointIdx]).text;
if (!renderTooltip) {
// when interacting with a point in single mode
@ -195,9 +195,9 @@ export const TooltipPlugin = ({
}
const dataIdx = focusedPointIdxs?.[focusedSeriesIdx] ?? focusedPointIdx;
xVal = xFieldFmt(xField!.values.get(dataIdx)).text;
xVal = xFieldFmt(xField!.values[dataIdx]).text;
const fieldFmt = field.display || getDisplayProcessor({ field, timeZone, theme });
const display = fieldFmt(field.values.get(dataIdx));
const display = fieldFmt(field.values[dataIdx]);
tooltip = (
<SeriesTable
@ -232,7 +232,7 @@ export const TooltipPlugin = ({
continue;
}
const v = otherProps.data.fields[i].values.get(focusedPointIdxs[i]!);
const v = otherProps.data.fields[i].values[focusedPointIdxs[i]!];
const display = field.display!(v);
sortIdx.push(v);

View File

@ -4,7 +4,7 @@ import React, { ReactNode } from 'react';
import { TestProvider } from 'test/helpers/TestProvider';
import { getGrafanaContextMock } from 'test/mocks/getGrafanaContextMock';
import { ArrayVector, DataFrame, DataFrameView, FieldType, NavModelItem } from '@grafana/data';
import { DataFrame, DataFrameView, FieldType, NavModelItem } from '@grafana/data';
import { config } from '@grafana/runtime';
import { HOME_NAV_ID } from 'app/core/reducers/navModel';
import { DashboardQueryResult, getGrafanaSearcher, QueryResponse } from 'app/features/search/service';
@ -23,12 +23,12 @@ const pageNav: NavModelItem = {
const searchData: DataFrame = {
fields: [
{ name: 'kind', type: FieldType.string, config: {}, values: new ArrayVector([]) },
{ name: 'name', type: FieldType.string, config: {}, values: new ArrayVector([]) },
{ name: 'uid', type: FieldType.string, config: {}, values: new ArrayVector([]) },
{ name: 'url', type: FieldType.string, config: {}, values: new ArrayVector([]) },
{ name: 'tags', type: FieldType.other, config: {}, values: new ArrayVector([]) },
{ name: 'location', type: FieldType.string, config: {}, values: new ArrayVector([]) },
{ name: 'kind', type: FieldType.string, config: {}, values: [] },
{ name: 'name', type: FieldType.string, config: {}, values: [] },
{ name: 'uid', type: FieldType.string, config: {}, values: [] },
{ name: 'url', type: FieldType.string, config: {}, values: [] },
{ name: 'tags', type: FieldType.other, config: {}, values: [] },
{ name: 'location', type: FieldType.string, config: {}, values: [] },
],
length: 0,
};

View File

@ -611,13 +611,13 @@ export function findNextStateIndex(field: Field, datapointIdx: number) {
return null;
}
const startValue = field.values.get(datapointIdx);
const startValue = field.values[datapointIdx];
while (end === undefined) {
if (rightPointer >= field.values.length) {
return null;
}
const rightValue = field.values.get(rightPointer);
const rightValue = field.values[rightPointer];
if (rightValue === undefined || rightValue === startValue) {
rightPointer++;

View File

@ -329,7 +329,7 @@ function getAllLabels(fields: LogFields): Labels[] {
const { stringField, labelsField } = fields;
if (labelsField !== undefined) {
return labelsField.values.toArray();
return labelsField.values;
} else {
return [stringField.labels ?? {}];
}
@ -342,7 +342,7 @@ function getLabelsForFrameRow(fields: LogFields, index: number): Labels {
const { stringField, labelsField } = fields;
if (labelsField !== undefined) {
return labelsField.values.get(index);
return labelsField.values[index];
} else {
return stringField.labels ?? {};
}
@ -407,13 +407,13 @@ export function logSeriesToLogsModel(logSeries: DataFrame[], queries: DataQuery[
const { timeField, timeNanosecondField, stringField, logLevelField, idField, series } = info;
for (let j = 0; j < series.length; j++) {
const ts = timeField.values.get(j);
const ts = timeField.values[j];
const time = toUtc(ts);
const tsNs = timeNanosecondField ? timeNanosecondField.values.get(j) : undefined;
const tsNs = timeNanosecondField ? timeNanosecondField.values[j] : undefined;
const timeEpochNs = tsNs ? tsNs : time.valueOf() + '000000';
// In edge cases, this can be undefined. If undefined, we want to replace it with empty string.
const messageValue: unknown = stringField.values.get(j) ?? '';
const messageValue: unknown = stringField.values[j] ?? '';
// This should be string but sometimes isn't (eg elastic) because the dataFrame is not strongly typed.
const message: string = typeof messageValue === 'string' ? messageValue : JSON.stringify(messageValue);
@ -433,7 +433,7 @@ export function logSeriesToLogsModel(logSeries: DataFrame[], queries: DataQuery[
}
let logLevel = LogLevel.unknown;
const logLevelKey = (logLevelField && logLevelField.values.get(j)) || (labels && labels['level']);
const logLevelKey = (logLevelField && logLevelField.values[j]) || (labels && labels['level']);
if (logLevelKey) {
logLevel = getLogLevelFromKey(logLevelKey);
} else {
@ -459,7 +459,7 @@ export function logSeriesToLogsModel(logSeries: DataFrame[], queries: DataQuery[
entry,
raw: message,
labels: labels || {},
uid: idField ? idField.values.get(j) : j.toString(),
uid: idField ? idField.values[j] : j.toString(),
datasourceType,
});
}

View File

@ -329,10 +329,10 @@ const TimeseriesRow: FC<FrameProps & { index: number }> = ({ frame, index }) =>
const displayNameFromDS = valueField.config?.displayNameFromDS;
const name = displayNameFromDS ?? (hasLabels ? formatLabels(valueField.labels ?? {}) : 'Series ' + index);
const timestamps = frame.fields[0].values.toArray();
const timestamps = frame.fields[0].values;
const getTimestampFromIndex = (index: number) => frame.fields[0].values.get(index);
const getValueFromIndex = (index: number) => frame.fields[1].values.get(index);
const getTimestampFromIndex = (index: number) => frame.fields[0].values[index];
const getValueFromIndex = (index: number) => frame.fields[1].values[index];
return (
<div className={styles.expression.resultsRow}>

View File

@ -17,7 +17,7 @@ const getSeriesName = (frame: DataFrame): string => {
};
const getSeriesValue = (frame: DataFrame) => {
const value = frame.fields[0]?.values.get(0);
const value = frame.fields[0]?.values[0];
if (Number.isFinite(value)) {
return roundDecimals(value, 5);
@ -33,9 +33,7 @@ const formatLabels = (labels: Labels): string => {
};
const isEmptySeries = (series: DataFrame[]): boolean => {
const isEmpty = series.every((serie) =>
serie.fields.every((field) => field.values.toArray().every((value) => value == null))
);
const isEmpty = series.every((serie) => serie.fields.every((field) => field.values.every((value) => value == null)));
return isEmpty;
};

View File

@ -26,10 +26,7 @@ export function mapDataFrameToAlertPreview({ fields }: DataFrame): AlertPreview
const instances: AlertPreviewInstance[] = [];
for (let index = 0; index < instanceStatusCount; index++) {
const labelValues = labelIndexes.map((labelIndex) => [
fields[labelIndex].name,
fields[labelIndex].values.get(index),
]);
const labelValues = labelIndexes.map((labelIndex) => [fields[labelIndex].name, fields[labelIndex].values[index]]);
const state = fields[stateFieldIndex]?.values?.get(index);
const info = fields[infoFieldIndex]?.values?.get(index);

View File

@ -57,7 +57,7 @@ export class AnnotationFieldMapper extends PureComponent<Props, State> {
description += '...';
break;
}
description += f.values.get(i);
description += f.values[i];
}
if (description.length > 50) {

View File

@ -223,7 +223,7 @@ export function getAnnotationsFromData(
if (f.text) {
v = f.text; // TODO support templates!
} else if (f.field) {
v = f.field.values.get(i);
v = f.field.values[i];
if (v !== undefined && f.regex) {
const match = f.regex.exec(v);
if (match) {

View File

@ -1,4 +1,4 @@
import { ArrayVector, DataFrame, DataFrameView, FieldType } from '@grafana/data';
import { DataFrame, DataFrameView, FieldType } from '@grafana/data';
import { config } from '@grafana/runtime';
import { ContextSrv, contextSrv } from 'app/core/services/context_srv';
import impressionSrv from 'app/core/services/impression_srv';
@ -13,12 +13,12 @@ describe('dashboardActions', () => {
const searchData: DataFrame = {
fields: [
{ name: 'kind', type: FieldType.string, config: {}, values: new ArrayVector(['dashboard']) },
{ name: 'name', type: FieldType.string, config: {}, values: new ArrayVector(['My dashboard 1']) },
{ name: 'uid', type: FieldType.string, config: {}, values: new ArrayVector(['my-dashboard-1']) },
{ name: 'url', type: FieldType.string, config: {}, values: new ArrayVector(['/my-dashboard-1']) },
{ name: 'tags', type: FieldType.other, config: {}, values: new ArrayVector([['foo', 'bar']]) },
{ name: 'location', type: FieldType.string, config: {}, values: new ArrayVector(['my-folder-1']) },
{ name: 'kind', type: FieldType.string, config: {}, values: ['dashboard'] },
{ name: 'name', type: FieldType.string, config: {}, values: ['My dashboard 1'] },
{ name: 'uid', type: FieldType.string, config: {}, values: ['my-dashboard-1'] },
{ name: 'url', type: FieldType.string, config: {}, values: ['/my-dashboard-1'] },
{ name: 'tags', type: FieldType.other, config: {}, values: [['foo', 'bar']] },
{ name: 'location', type: FieldType.string, config: {}, values: ['my-folder-1'] },
],
meta: {
custom: {

View File

@ -2,14 +2,7 @@ import { render, screen } from '@testing-library/react';
import userEvent from '@testing-library/user-event';
import React, { ComponentProps } from 'react';
import {
ArrayVector,
FieldType,
LoadingState,
MutableDataFrame,
SupplementaryQueryType,
DataSourceApi,
} from '@grafana/data';
import { FieldType, LoadingState, MutableDataFrame, SupplementaryQueryType, DataSourceApi } from '@grafana/data';
import { DataQuery } from '@grafana/schema';
import { LogsSamplePanel } from './LogsSamplePanel';
@ -43,20 +36,20 @@ const sampleDataFrame = new MutableDataFrame({
{
name: 'labels',
type: FieldType.other,
values: new ArrayVector([
values: [
{ place: 'luna', source: 'data' },
{ place: 'luna', source: 'data' },
]),
],
},
{
name: 'Time',
type: FieldType.time,
values: new ArrayVector(['2022-02-22T09:28:11.352440161Z', '2022-02-22T14:42:50.991981292Z']),
values: ['2022-02-22T09:28:11.352440161Z', '2022-02-22T14:42:50.991981292Z'],
},
{
name: 'Line',
type: FieldType.string,
values: new ArrayVector(['line1 ', 'line2']),
values: ['line1 ', 'line2'],
},
],
});

View File

@ -9,7 +9,7 @@ export function transformDataFrames(frame?: DataFrame): Trace | null {
let data: TraceResponse =
frame.fields.length === 1
? // For backward compatibility when we sent whole json response in a single field/value
frame.fields[0].values.get(0)
frame.fields[0].values[0]
: transformTraceDataFrame(frame);
return transformTraceData(data);
}

View File

@ -4,7 +4,6 @@ import { thunkTester } from 'test/core/thunk/thunkTester';
import { assertIsDefined } from 'test/helpers/asserts';
import {
ArrayVector,
DataQueryResponse,
DataSourceApi,
DataSourceJsonData,
@ -111,7 +110,7 @@ function setupQueryResponse(state: StoreState) {
error: { message: 'test error' },
data: [
new MutableDataFrame({
fields: [{ name: 'test', values: new ArrayVector() }],
fields: [{ name: 'test', values: [] }],
meta: {
preferredVisualisationType: 'graph',
},

View File

@ -267,9 +267,9 @@ describe('decorateWithTableResult', () => {
expect(tableResult?.fields[0].name).toBe('Time');
expect(tableResult?.fields[1].name).toBe('A-series');
expect(tableResult?.fields[2].name).toBe('B-series');
expect(tableResult?.fields[0].values.toArray()).toEqual([100, 200, 300]);
expect(tableResult?.fields[1].values.toArray()).toEqual([4, 5, 6]);
expect(tableResult?.fields[2].values.toArray()).toEqual([4, 5, 6]);
expect(tableResult?.fields[0].values).toEqual([100, 200, 300]);
expect(tableResult?.fields[1].values).toEqual([4, 5, 6]);
expect(tableResult?.fields[2].values).toEqual([4, 5, 6]);
});
it('should not override fields display property when filled', async () => {

View File

@ -18,7 +18,7 @@ export const getRawPrometheusListItemsFromDataFrame = (dataFrame: DataFrame): in
const newFields = dataFrame.fields.filter((field) => !['Time'].includes(field.name));
// Get name from each series
let metricNames: string[] = newFields.find((field) => field.name === '__name__')?.values.toArray() ?? [];
let metricNames: string[] = newFields.find((field) => field.name === '__name__')?.values ?? [];
if (!metricNames.length && newFields.length && newFields[0].values.length) {
// These results do not have series labels
// Matching the native prometheus UI which appears to only show the permutations of the first field in the query result.
@ -38,7 +38,7 @@ export const getRawPrometheusListItemsFromDataFrame = (dataFrame: DataFrame): in
if (label !== 'Time') {
// Initialize the objects
if (typeof field?.display === 'function') {
const stringValue = formattedValueToString(field?.display(field.values.get(i)));
const stringValue = formattedValueToString(field?.display(field.values[i]));
if (stringValue) {
formattedMetric[label] = stringValue;
} else if (label.includes('Value #')) {

View File

@ -1,5 +1,4 @@
import {
ArrayVector,
CoreApp,
DataFrame,
DataLink,
@ -211,7 +210,7 @@ describe('explore links utils', () => {
const { field, range, dataFrame } = setup(noHyphenLink, true, {
name: 'fluxDimensions',
type: FieldType.string,
values: new ArrayVector([ROW_WITH_TEXT_VALUE.value, ROW_WITH_NULL_VALUE.value]),
values: [ROW_WITH_TEXT_VALUE.value, ROW_WITH_NULL_VALUE.value],
config: {
links: [noHyphenLink],
},
@ -242,7 +241,7 @@ describe('explore links utils', () => {
{
name: 'fluxDimensions',
type: FieldType.string,
values: new ArrayVector([ROW_WITH_TEXT_VALUE.value, ROW_WITH_NULL_VALUE.value]),
values: [ROW_WITH_TEXT_VALUE.value, ROW_WITH_NULL_VALUE.value],
config: {
links: [noHyphenLink],
},
@ -251,7 +250,7 @@ describe('explore links utils', () => {
{
name: 'fluxDimension2',
type: FieldType.string,
values: new ArrayVector(['foo2', ROW_WITH_NULL_VALUE.value]),
values: ['foo2', ROW_WITH_NULL_VALUE.value],
config: {
links: [noHyphenLink],
},
@ -286,7 +285,7 @@ describe('explore links utils', () => {
const { field, range, dataFrame } = setup(transformationLink, true, {
name: 'msg',
type: FieldType.string,
values: new ArrayVector(['application=foo host=dev-001', 'application=bar host=prod-003']),
values: ['application=foo host=dev-001', 'application=bar host=prod-003'],
config: {
links: [transformationLink],
},
@ -335,7 +334,7 @@ describe('explore links utils', () => {
const { field, range, dataFrame } = setup(transformationLink, true, {
name: 'msg',
type: FieldType.string,
values: new ArrayVector(['fieldA=asparagus fieldB=banana', 'fieldA=broccoli fieldB=apple']),
values: ['fieldA=asparagus fieldB=banana', 'fieldA=broccoli fieldB=apple'],
config: {
links: [transformationLink],
},
@ -374,7 +373,7 @@ describe('explore links utils', () => {
const { field, range, dataFrame } = setup(transformationLink, true, {
name: 'msg',
type: FieldType.string,
values: new ArrayVector(['application=foo online=true', 'application=bar online=false']),
values: ['application=foo online=true', 'application=bar online=false'],
config: {
links: [transformationLink],
},
@ -417,7 +416,7 @@ describe('explore links utils', () => {
{
name: 'fieldWithLink',
type: FieldType.string,
values: new ArrayVector(['application=link', 'application=link2']),
values: ['application=link', 'application=link2'],
config: {
links: [transformationLink],
},
@ -426,7 +425,7 @@ describe('explore links utils', () => {
{
name: 'fieldNamedInTransformation',
type: FieldType.string,
values: new ArrayVector(['application=transform', 'application=transform2']),
values: ['application=transform', 'application=transform2'],
config: {},
},
]
@ -470,7 +469,7 @@ describe('explore links utils', () => {
const { field, range, dataFrame } = setup(transformationLink, true, {
name: 'msg',
type: FieldType.string,
values: new ArrayVector(['foo loki prod', 'dev bar grafana', 'prod grafana foo']),
values: ['foo loki prod', 'dev bar grafana', 'prod grafana foo'],
config: {
links: [transformationLink],
},
@ -545,7 +544,7 @@ describe('explore links utils', () => {
const { field, range, dataFrame } = setup(transformationLink, true, {
name: 'msg',
type: FieldType.string,
values: new ArrayVector(['application=foo host=dev-001']),
values: ['application=foo host=dev-001'],
config: {
links: [transformationLink],
},
@ -570,7 +569,7 @@ describe('explore links utils', () => {
const { field, range, dataFrame } = setup(transformationLink, true, {
name: 'msg',
type: FieldType.string,
values: new ArrayVector(['application=foo host=dev-001']),
values: ['application=foo host=dev-001'],
config: {
links: [transformationLink],
},
@ -712,7 +711,7 @@ function setup(
const field: Field<string | null> = {
name: 'flux-dimensions',
type: FieldType.string,
values: new ArrayVector([ROW_WITH_TEXT_VALUE.value, ROW_WITH_NULL_VALUE.value]),
values: [ROW_WITH_TEXT_VALUE.value, ROW_WITH_NULL_VALUE.value],
config: {
links: [link],
},

View File

@ -71,7 +71,7 @@ export const getFieldLinksForExplore = (options: {
const scopedVars: ScopedVars = { ...(vars || {}) };
scopedVars['__value'] = {
value: {
raw: field.values.get(rowIndex),
raw: field.values[rowIndex],
},
text: 'Raw value',
};
@ -130,9 +130,9 @@ export const getFieldLinksForExplore = (options: {
let fieldValue;
if (transformation.field) {
const transformField = dataFrame?.fields.find((field) => field.name === transformation.field);
fieldValue = transformField?.values.get(rowIndex);
fieldValue = transformField?.values[rowIndex];
} else {
fieldValue = field.values.get(rowIndex);
fieldValue = field.values[rowIndex];
}
internalLinkSpecificVars = {

View File

@ -98,7 +98,7 @@ describe('LogDetails', () => {
if (field.config && field.config.links) {
return field.config.links.map((link) => {
return {
href: link.url.replace('${__value.text}', field.values.get(rowIndex)),
href: link.url.replace('${__value.text}', field.values[rowIndex]),
title: link.title,
target: '_blank',
origin: field,

View File

@ -117,7 +117,7 @@ class UnThemedLogDetails extends PureComponent<Props> {
onClickHideField={onClickHideField}
onClickFilterOutLabel={onClickFilterOutLabel}
onClickFilterLabel={onClickFilterLabel}
getStats={() => calculateStats(row.dataFrame.fields[fieldIndex].values.toArray())}
getStats={() => calculateStats(row.dataFrame.fields[fieldIndex].values)}
displayedFields={displayedFields}
wrapLogMessage={wrapLogMessage}
row={row}
@ -144,7 +144,7 @@ class UnThemedLogDetails extends PureComponent<Props> {
links={links}
onClickShowField={onClickShowField}
onClickHideField={onClickHideField}
getStats={() => calculateStats(row.dataFrame.fields[fieldIndex].values.toArray())}
getStats={() => calculateStats(row.dataFrame.fields[fieldIndex].values)}
displayedFields={displayedFields}
wrapLogMessage={wrapLogMessage}
row={row}
@ -163,7 +163,7 @@ class UnThemedLogDetails extends PureComponent<Props> {
links={links}
onClickShowField={onClickShowField}
onClickHideField={onClickHideField}
getStats={() => calculateStats(row.dataFrame.fields[fieldIndex].values.toArray())}
getStats={() => calculateStats(row.dataFrame.fields[fieldIndex].values)}
displayedFields={displayedFields}
wrapLogMessage={wrapLogMessage}
row={row}

View File

@ -1,4 +1,4 @@
import { ArrayVector, FieldType, MutableDataFrame } from '@grafana/data';
import { FieldType, MutableDataFrame } from '@grafana/data';
import { ExploreFieldLinkModel } from 'app/features/explore/utils/links';
import { createLogRow } from './__mocks__/logRow';
@ -17,7 +17,7 @@ describe('logParser', () => {
name: 'labels',
type: FieldType.other,
config: {},
values: new ArrayVector([{ place: 'luna', source: 'data' }]),
values: [{ place: 'luna', source: 'data' }],
},
],
}),
@ -39,7 +39,7 @@ describe('logParser', () => {
name: 'labels',
type: FieldType.string,
config: {},
values: new ArrayVector([{ place: 'luna', source: 'data' }]),
values: [{ place: 'luna', source: 'data' }],
},
],
}),
@ -60,7 +60,7 @@ describe('logParser', () => {
name: 'id',
type: FieldType.string,
config: {},
values: new ArrayVector(['1659620138401000000_8b1f7688_']),
values: ['1659620138401000000_8b1f7688_'],
},
],
}),
@ -129,7 +129,7 @@ describe('logParser', () => {
config: { links: [] },
name: 'Line',
type: FieldType.string,
values: new ArrayVector(['a', 'b']),
values: ['a', 'b'],
},
title: 'test',
target: '_self',
@ -163,7 +163,7 @@ describe('logParser', () => {
config: { links: [] },
name: 'Line',
type: FieldType.string,
values: new ArrayVector(['a', 'b']),
values: ['a', 'b'],
},
title: 'test',
target: '_self',
@ -186,12 +186,12 @@ const testStringField = {
name: 'test_field_string',
type: FieldType.string,
config: {},
values: new ArrayVector(['abc']),
values: ['abc'],
};
const testFieldWithNullValue = {
name: 'test_field_null',
type: FieldType.string,
config: {},
values: new ArrayVector([null]),
values: [null],
};

View File

@ -71,7 +71,7 @@ export const getDataframeFields = memoizeOne(
const links = getFieldLinks ? getFieldLinks(field, row.rowIndex, row.dataFrame) : [];
return {
keys: [field.name],
values: [field.values.get(row.rowIndex).toString()],
values: [field.values[row.rowIndex].toString()],
links: links,
fieldIndex: field.index,
};
@ -93,7 +93,7 @@ function shouldRemoveField(field: Field, index: number, row: LogRowModel) {
if (
field.name === firstTimeField?.name &&
field.type === FieldType.time &&
field.values.get(0) === firstTimeField.values.get(0)
field.values[0] === firstTimeField.values[0]
) {
return true;
}
@ -102,7 +102,7 @@ function shouldRemoveField(field: Field, index: number, row: LogRowModel) {
return true;
}
// field that has empty value (we want to keep 0 or empty string)
if (field.values.get(row.rowIndex) == null) {
if (field.values[row.rowIndex] == null) {
return true;
}
return false;

View File

@ -223,8 +223,8 @@ export const mergeLogsVolumeDataFrames = (dataFrames: DataFrame[]): { dataFrames
};
for (let pointIndex = 0; pointIndex < length; pointIndex++) {
const time: number = timeField.values.get(pointIndex);
const value: number = valueField.values.get(pointIndex);
const time: number = timeField.values[pointIndex];
const value: number = valueField.values[pointIndex];
aggregated[level] ??= {};
aggregated[level][time] = (aggregated[level][time] || 0) + value;

View File

@ -3,7 +3,7 @@ import { defaultsDeep } from 'lodash';
import React from 'react';
import { Provider } from 'react-redux';
import { ArrayVector, FieldType, getDefaultTimeRange, LoadingState } from '@grafana/data';
import { FieldType, getDefaultTimeRange, LoadingState } from '@grafana/data';
import { PanelDataErrorViewProps } from '@grafana/runtime';
import { configureStore } from 'app/store/configureStore';
@ -28,7 +28,7 @@ describe('PanelDataErrorView', () => {
name: 'time',
type: FieldType.time,
config: {},
values: new ArrayVector([]),
values: [],
},
],
length: 0,
@ -39,7 +39,7 @@ describe('PanelDataErrorView', () => {
name: 'value',
type: FieldType.number,
config: {},
values: new ArrayVector([]),
values: [],
},
],
length: 0,

View File

@ -108,7 +108,7 @@ describe('getFieldLinksSupplier', () => {
view: new DataFrameView(data),
rowIndex,
colIndex,
display: field.display!(field.values.get(rowIndex)),
display: field.display!(field.values[rowIndex]),
hasLinks: true,
};

View File

@ -85,10 +85,10 @@ export const getFieldLinksSupplier = (value: FieldDisplay): LinkModelSupplier<Fi
const { timeField } = getTimeField(dataFrame);
scopedVars['__value'] = {
value: {
raw: field.values.get(value.rowIndex),
raw: field.values[value.rowIndex],
numeric: value.display.numeric,
text: formattedValueToString(value.display),
time: timeField ? timeField.values.get(value.rowIndex) : undefined,
time: timeField ? timeField.values[value.rowIndex] : undefined,
},
text: 'Value',
};

View File

@ -11,11 +11,11 @@ export class ResponseParser implements ResponseParserType {
if (textField && valueField) {
for (let i = 0; i < textField.values.length; i++) {
values.push({ text: '' + textField.values.get(i), value: '' + valueField.values.get(i) });
values.push({ text: '' + textField.values[i], value: '' + valueField.values[i] });
}
} else {
for (const field of frame.fields) {
for (const value of field.values.toArray()) {
for (const value of field.values) {
values.push({ text: value });
}
}

View File

@ -2,7 +2,7 @@ import { render, screen, act } from '@testing-library/react';
import userEvent from '@testing-library/user-event';
import React from 'react';
import { ArrayVector, DataFrame, DataFrameView, FieldType } from '@grafana/data';
import { DataFrame, DataFrameView, FieldType } from '@grafana/data';
import { DashboardQueryResult, getGrafanaSearcher, QueryResponse } from '../../service';
import { DashboardSearchItemType, DashboardViewItem } from '../../types';
@ -29,12 +29,12 @@ describe('FolderSection', () => {
describe('when there are no results', () => {
const emptySearchData: DataFrame = {
fields: [
{ name: 'kind', type: FieldType.string, config: {}, values: new ArrayVector([]) },
{ name: 'name', type: FieldType.string, config: {}, values: new ArrayVector([]) },
{ name: 'uid', type: FieldType.string, config: {}, values: new ArrayVector([]) },
{ name: 'url', type: FieldType.string, config: {}, values: new ArrayVector([]) },
{ name: 'tags', type: FieldType.other, config: {}, values: new ArrayVector([]) },
{ name: 'location', type: FieldType.string, config: {}, values: new ArrayVector([]) },
{ name: 'kind', type: FieldType.string, config: {}, values: [] },
{ name: 'name', type: FieldType.string, config: {}, values: [] },
{ name: 'uid', type: FieldType.string, config: {}, values: [] },
{ name: 'url', type: FieldType.string, config: {}, values: [] },
{ name: 'tags', type: FieldType.other, config: {}, values: [] },
{ name: 'location', type: FieldType.string, config: {}, values: [] },
],
length: 0,
};
@ -95,12 +95,12 @@ describe('FolderSection', () => {
describe('when there are results', () => {
const searchData: DataFrame = {
fields: [
{ name: 'kind', type: FieldType.string, config: {}, values: new ArrayVector([DashboardSearchItemType.DashDB]) },
{ name: 'name', type: FieldType.string, config: {}, values: new ArrayVector(['My dashboard 1']) },
{ name: 'uid', type: FieldType.string, config: {}, values: new ArrayVector(['my-dashboard-1']) },
{ name: 'url', type: FieldType.string, config: {}, values: new ArrayVector(['/my-dashboard-1']) },
{ name: 'tags', type: FieldType.other, config: {}, values: new ArrayVector([['foo', 'bar']]) },
{ name: 'location', type: FieldType.string, config: {}, values: new ArrayVector(['my-folder-1']) },
{ name: 'kind', type: FieldType.string, config: {}, values: [DashboardSearchItemType.DashDB] },
{ name: 'name', type: FieldType.string, config: {}, values: ['My dashboard 1'] },
{ name: 'uid', type: FieldType.string, config: {}, values: ['my-dashboard-1'] },
{ name: 'url', type: FieldType.string, config: {}, values: ['/my-dashboard-1'] },
{ name: 'tags', type: FieldType.other, config: {}, values: [['foo', 'bar']] },
{ name: 'location', type: FieldType.string, config: {}, values: ['my-folder-1'] },
],
meta: {
custom: {

View File

@ -1,7 +1,7 @@
import { render, screen, act } from '@testing-library/react';
import React from 'react';
import { ArrayVector, DataFrame, DataFrameView, FieldType } from '@grafana/data';
import { DataFrame, DataFrameView, FieldType } from '@grafana/data';
import { selectors } from '@grafana/e2e-selectors';
import { ContextSrv, setContextSrv } from '../../../../core/services/context_srv';
@ -30,11 +30,11 @@ describe('RootFolderView', () => {
name: 'kind',
type: FieldType.string,
config: {},
values: new ArrayVector([DashboardSearchItemType.DashFolder]),
values: [DashboardSearchItemType.DashFolder],
},
{ name: 'name', type: FieldType.string, config: {}, values: new ArrayVector(['My folder 1']) },
{ name: 'uid', type: FieldType.string, config: {}, values: new ArrayVector(['my-folder-1']) },
{ name: 'url', type: FieldType.string, config: {}, values: new ArrayVector(['/my-folder-1']) },
{ name: 'name', type: FieldType.string, config: {}, values: ['My folder 1'] },
{ name: 'uid', type: FieldType.string, config: {}, values: ['my-folder-1'] },
{ name: 'url', type: FieldType.string, config: {}, values: ['/my-folder-1'] },
],
length: 1,
};

View File

@ -2,7 +2,7 @@ import { render, screen } from '@testing-library/react';
import React from 'react';
import { Subject } from 'rxjs';
import { ArrayVector, DataFrame, DataFrameView, FieldType } from '@grafana/data';
import { DataFrame, DataFrameView, FieldType } from '@grafana/data';
import { DashboardQueryResult, getGrafanaSearcher, QueryResponse } from '../../service';
import { DashboardSearchItemType } from '../../types';
@ -19,14 +19,14 @@ describe('SearchResultsCards', () => {
describe('when there is data', () => {
const searchData: DataFrame = {
fields: [
{ name: 'kind', type: FieldType.string, config: {}, values: new ArrayVector([DashboardSearchItemType.DashDB]) },
{ name: 'uid', type: FieldType.string, config: {}, values: new ArrayVector(['my-dashboard-1']) },
{ name: 'name', type: FieldType.string, config: {}, values: new ArrayVector(['My dashboard 1']) },
{ name: 'panel_type', type: FieldType.string, config: {}, values: new ArrayVector(['']) },
{ name: 'url', type: FieldType.string, config: {}, values: new ArrayVector(['/my-dashboard-1']) },
{ name: 'tags', type: FieldType.other, config: {}, values: new ArrayVector([['foo', 'bar']]) },
{ name: 'ds_uid', type: FieldType.other, config: {}, values: new ArrayVector(['']) },
{ name: 'location', type: FieldType.string, config: {}, values: new ArrayVector(['folder0/my-dashboard-1']) },
{ name: 'kind', type: FieldType.string, config: {}, values: [DashboardSearchItemType.DashDB] },
{ name: 'uid', type: FieldType.string, config: {}, values: ['my-dashboard-1'] },
{ name: 'name', type: FieldType.string, config: {}, values: ['My dashboard 1'] },
{ name: 'panel_type', type: FieldType.string, config: {}, values: [''] },
{ name: 'url', type: FieldType.string, config: {}, values: ['/my-dashboard-1'] },
{ name: 'tags', type: FieldType.other, config: {}, values: [['foo', 'bar']] },
{ name: 'ds_uid', type: FieldType.other, config: {}, values: [''] },
{ name: 'location', type: FieldType.string, config: {}, values: ['folder0/my-dashboard-1'] },
],
meta: {
custom: {
@ -90,12 +90,12 @@ describe('SearchResultsCards', () => {
describe('when there is no data', () => {
const emptySearchData: DataFrame = {
fields: [
{ name: 'kind', type: FieldType.string, config: {}, values: new ArrayVector([]) },
{ name: 'name', type: FieldType.string, config: {}, values: new ArrayVector([]) },
{ name: 'uid', type: FieldType.string, config: {}, values: new ArrayVector([]) },
{ name: 'url', type: FieldType.string, config: {}, values: new ArrayVector([]) },
{ name: 'tags', type: FieldType.other, config: {}, values: new ArrayVector([]) },
{ name: 'location', type: FieldType.string, config: {}, values: new ArrayVector([]) },
{ name: 'kind', type: FieldType.string, config: {}, values: [] },
{ name: 'name', type: FieldType.string, config: {}, values: [] },
{ name: 'uid', type: FieldType.string, config: {}, values: [] },
{ name: 'url', type: FieldType.string, config: {}, values: [] },
{ name: 'tags', type: FieldType.other, config: {}, values: [] },
{ name: 'location', type: FieldType.string, config: {}, values: [] },
],
length: 0,
};

View File

@ -2,15 +2,7 @@ import { render, screen } from '@testing-library/react';
import React from 'react';
import { Subject } from 'rxjs';
import {
applyFieldOverrides,
ArrayVector,
createTheme,
DataFrame,
DataFrameView,
FieldType,
toDataFrame,
} from '@grafana/data';
import { applyFieldOverrides, createTheme, DataFrame, DataFrameView, FieldType, toDataFrame } from '@grafana/data';
import { DashboardQueryResult, getGrafanaSearcher, QueryResponse } from '../../service';
import { DashboardSearchItemType } from '../../types';
@ -121,12 +113,12 @@ describe('SearchResultsTable', () => {
describe('when there is no data', () => {
const emptySearchData: DataFrame = {
fields: [
{ name: 'kind', type: FieldType.string, config: {}, values: new ArrayVector([]) },
{ name: 'name', type: FieldType.string, config: {}, values: new ArrayVector([]) },
{ name: 'uid', type: FieldType.string, config: {}, values: new ArrayVector([]) },
{ name: 'url', type: FieldType.string, config: {}, values: new ArrayVector([]) },
{ name: 'tags', type: FieldType.other, config: {}, values: new ArrayVector([]) },
{ name: 'location', type: FieldType.string, config: {}, values: new ArrayVector([]) },
{ name: 'kind', type: FieldType.string, config: {}, values: [] },
{ name: 'name', type: FieldType.string, config: {}, values: [] },
{ name: 'uid', type: FieldType.string, config: {}, values: [] },
{ name: 'url', type: FieldType.string, config: {}, values: [] },
{ name: 'tags', type: FieldType.other, config: {}, values: [] },
{ name: 'location', type: FieldType.string, config: {}, values: [] },
],
length: 0,
};

View File

@ -108,7 +108,7 @@ export const SearchResultsTable = React.memo(
const row = rows[rowIndex];
prepareRow(row);
const url = response.view.fields.url?.values.get(rowIndex);
const url = response.view.fields.url?.values[rowIndex];
let className = styles.rowContainer;
if (rowIndex === highlightIndex.y) {
className += ' ' + styles.selectedRow;

View File

@ -5,7 +5,7 @@ import { Provider } from 'react-redux';
import configureMockStore from 'redux-mock-store';
import { Observable } from 'rxjs';
import { ArrayVector, DataFrame, DataFrameView, FieldType } from '@grafana/data';
import { DataFrame, DataFrameView, FieldType } from '@grafana/data';
import { config } from '@grafana/runtime';
import { DashboardQueryResult, getGrafanaSearcher, QueryResponse } from '../../service';
@ -50,11 +50,11 @@ describe('SearchView', () => {
name: 'kind',
type: FieldType.string,
config: {},
values: new ArrayVector([DashboardSearchItemType.DashFolder]),
values: [DashboardSearchItemType.DashFolder],
},
{ name: 'name', type: FieldType.string, config: {}, values: new ArrayVector(['My folder 1']) },
{ name: 'uid', type: FieldType.string, config: {}, values: new ArrayVector(['my-folder-1']) },
{ name: 'url', type: FieldType.string, config: {}, values: new ArrayVector(['/my-folder-1']) },
{ name: 'name', type: FieldType.string, config: {}, values: ['My folder 1'] },
{ name: 'uid', type: FieldType.string, config: {}, values: ['my-folder-1'] },
{ name: 'url', type: FieldType.string, config: {}, values: ['/my-folder-1'] },
],
length: 1,
};

View File

@ -92,8 +92,8 @@ export const generateColumns = (
);
},
Cell: (p) => {
const uid = uidField.values.get(p.row.index);
const kind = kindField ? kindField.values.get(p.row.index) : 'dashboard'; // HACK for now
const uid = uidField.values[p.row.index];
const kind = kindField ? kindField.values[p.row.index] : 'dashboard'; // HACK for now
const selected = selection(kind, uid);
const hasUID = uid != null; // Panels don't have UID! Likely should not be shown on pages with manage options
return (
@ -120,7 +120,7 @@ export const generateColumns = (
columns.push({
Cell: (p) => {
let classNames = cx(styles.nameCellStyle);
let name = access.name.values.get(p.row.index);
let name = access.name.values[p.row.index];
if (!name?.length) {
const loading = p.row.index >= response.view.dataFrame.length;
name = loading ? 'Loading...' : 'Missing title'; // normal for panels
@ -166,7 +166,7 @@ export const generateColumns = (
availableWidth -= width;
columns.push({
Cell: (p) => {
const parts = (access.location?.values.get(p.row.index) ?? '').split('/');
const parts = (access.location?.values[p.row.index] ?? '').split('/');
return (
<div {...p.cellProps} className={cx(styles.locationCellStyle)}>
{parts.map((p) => {
@ -226,8 +226,8 @@ export const generateColumns = (
new ShowModalReactEvent({
component: ExplainScorePopup,
props: {
name: access.name.values.get(row),
explain: access.explain.values.get(row),
name: access.name.values[row],
explain: access.explain.values[row],
frame: response.view.dataFrame,
row: row,
},
@ -255,7 +255,7 @@ export const generateColumns = (
function hasValue(f: Field): boolean {
for (let i = 0; i < f.values.length; i++) {
if (f.values.get(i) != null) {
if (f.values[i] != null) {
return true;
}
}
@ -276,7 +276,7 @@ function makeDataSourceColumn(
field,
Header: t('search.results-table.datasource-header', 'Data source'),
Cell: (p) => {
const dslist = field.values.get(p.row.index);
const dslist = field.values[p.row.index];
if (!dslist?.length) {
return null;
}
@ -325,7 +325,7 @@ function makeTypeColumn(
Header: t('search.results-table.type-header', 'Type'),
Cell: (p) => {
const i = p.row.index;
const kind = kindField?.values.get(i) ?? 'dashboard';
const kind = kindField?.values[i] ?? 'dashboard';
let icon: IconName = 'apps';
let txt = 'Dashboard';
if (kind) {
@ -342,7 +342,7 @@ function makeTypeColumn(
case 'panel':
icon = `${PluginIconName.panel}`;
const type = typeField.values.get(i);
const type = typeField.values[i];
if (type) {
txt = type;
const info = config.panels[txt];
@ -384,7 +384,7 @@ function makeTagsColumn(
): TableColumn {
return {
Cell: (p) => {
const tags = field.values.get(p.row.index);
const tags = field.values[p.row.index];
return tags ? (
<div {...p.cellProps}>
<TagList className={tagListClass} tags={tags} onClick={onTagSelected} />
@ -409,8 +409,8 @@ function getDisplayValue({
index: number;
getDisplay: DisplayProcessor;
}) {
const value = sortField.values.get(index);
if (['folder', 'panel'].includes(kind.values.get(index)) && value === 0) {
const value = sortField.values[index];
if (['folder', 'panel'].includes(kind.values[index]) && value === 0) {
return '-';
}
return formattedValueToString(getDisplay(value));

View File

@ -25,7 +25,7 @@ describe('FrontendSearcher', () => {
};
const results = await frontendSearcher.search(query);
expect(results.view.fields.name.values.toArray()).toMatchInlineSnapshot(`
expect(results.view.fields.name.values).toMatchInlineSnapshot(`
[
"foo cat",
"bar dog",
@ -43,7 +43,7 @@ describe('FrontendSearcher', () => {
};
const results = await frontendSearcher.search(query);
expect(results.view.fields.name.values.toArray()).toMatchInlineSnapshot(`
expect(results.view.fields.name.values).toMatchInlineSnapshot(`
[
"bar dog",
"cow baz",
@ -60,7 +60,7 @@ describe('FrontendSearcher', () => {
};
const results = await frontendSearcher.search(query);
expect(results.view.fields.name.values.toArray()).toMatchInlineSnapshot(`
expect(results.view.fields.name.values).toMatchInlineSnapshot(`
[
"bar dog",
]

View File

@ -1,6 +1,6 @@
import uFuzzy from '@leeoniya/ufuzzy';
import { DataFrameView, SelectableValue, ArrayVector } from '@grafana/data';
import { DataFrameView, SelectableValue } from '@grafana/data';
import { TermCount } from 'app/core/components/TagFilter/TagFilter';
import { DashboardQueryResult, GrafanaSearcher, QueryResponse, SearchQuery } from '.';
@ -94,12 +94,12 @@ class FullResultCache {
});
constructor(private full: DataFrameView<DashboardQueryResult>) {
this.names = this.full.fields.name.values.toArray();
this.names = this.full.fields.name.values;
// Copy with empty values
this.empty = new DataFrameView<DashboardQueryResult>({
...this.full.dataFrame, // copy folder metadata
fields: this.full.dataFrame.fields.map((v) => ({ ...v, values: new ArrayVector([]) })),
fields: this.full.dataFrame.fields.map((v) => ({ ...v, values: [] })),
length: 0, // for now
});
}
@ -119,7 +119,7 @@ class FullResultCache {
let [idxs, info, order] = this.ufuzzy.search(haystack, query, true);
for (let c = 0; c < allFields.length; c++) {
let src = allFields[c].values.toArray();
let src = allFields[c].values;
let dst = values[c];
// <= 1000 matches (ranked)
@ -140,7 +140,7 @@ class FullResultCache {
// mutates the search object
this.empty.dataFrame.fields.forEach((f, idx) => {
f.values = new ArrayVector(values[idx]); // or just set it?
f.values = values[idx]; // or just set it?
});
this.empty.dataFrame.length = this.empty.dataFrame.fields[0].values.length;

View File

@ -1,4 +1,4 @@
import { ArrayVector, DataFrame, DataFrameView, FieldType, getDisplayProcessor, SelectableValue } from '@grafana/data';
import { DataFrame, DataFrameView, FieldType, getDisplayProcessor, SelectableValue } from '@grafana/data';
import { config } from '@grafana/runtime';
import { TermCount } from 'app/core/components/TagFilter/TagFilter';
import { backendSrv } from 'app/core/services/backend_srv';
@ -182,12 +182,12 @@ export class SQLSearcher implements GrafanaSearcher {
const data: DataFrame = {
fields: [
{ name: 'kind', type: FieldType.string, config: {}, values: new ArrayVector(kind) },
{ name: 'name', type: FieldType.string, config: {}, values: new ArrayVector(name) },
{ name: 'uid', type: FieldType.string, config: {}, values: new ArrayVector(uid) },
{ name: 'url', type: FieldType.string, config: {}, values: new ArrayVector(url) },
{ name: 'tags', type: FieldType.other, config: {}, values: new ArrayVector(tags) },
{ name: 'location', type: FieldType.string, config: {}, values: new ArrayVector(location) },
{ name: 'kind', type: FieldType.string, config: {}, values: kind },
{ name: 'name', type: FieldType.string, config: {}, values: name },
{ name: 'uid', type: FieldType.string, config: {}, values: uid },
{ name: 'url', type: FieldType.string, config: {}, values: url },
{ name: 'tags', type: FieldType.other, config: {}, values: tags },
{ name: 'location', type: FieldType.string, config: {}, values: location },
],
length: name.length,
meta: {
@ -206,7 +206,7 @@ export class SQLSearcher implements GrafanaSearcher {
name: sortMetaName, // Used in display
type: FieldType.number,
config: {},
values: new ArrayVector(sortBy),
values: sortBy,
});
}

View File

@ -21,7 +21,7 @@ export function StorageFolderPage(props: Props) {
const renderListing = () => {
if (listing.value) {
const names = listing.value.fields[0].values.toArray();
const names = listing.value.fields[0].values;
return names.map((item: string) => {
let name = item;
const isFolder = name.indexOf('.') < 0;

View File

@ -68,7 +68,7 @@ export default function StoragePage(props: Props) {
frame.fields[0] = {
...name,
getLinks: (cfg: ValueLinkConfig) => {
const n = name.values.get(cfg.valueRowIndex ?? 0);
const n = name.values[cfg.valueRowIndex ?? 0];
const p = path + '/' + n;
return [
{
@ -93,7 +93,7 @@ export default function StoragePage(props: Props) {
if (listing.value) {
const length = listing.value.length;
if (length === 1) {
const first = listing.value.fields[0].values.get(0) as string;
const first = listing.value.fields[0].values[0] as string;
isFolder = !path.endsWith(first);
} else {
// TODO: handle files/folders which do not exist
@ -104,12 +104,7 @@ export default function StoragePage(props: Props) {
}, [path, listing]);
const fileNames = useMemo(() => {
return (
listing.value?.fields
?.find((f) => f.name === 'name')
?.values?.toArray()
?.filter((v) => typeof v === 'string') ?? []
);
return listing.value?.fields?.find((f) => f.name === 'name')?.values.filter((v) => typeof v === 'string') ?? [];
}, [listing]);
const renderView = () => {

View File

@ -54,10 +54,10 @@ function getValueForValueMacro(match: string, fieldPath?: string, scopedVars?: S
if (fieldPath === 'time') {
const timeField = frame.fields.find((f) => f.type === FieldType.time);
return timeField ? timeField.values.get(rowIndex) : undefined;
return timeField ? timeField.values[rowIndex] : undefined;
}
const value = field.values.get(rowIndex);
const value = field.values[rowIndex];
if (fieldPath === 'raw') {
return value;
}

View File

@ -53,7 +53,7 @@ export function getFieldConfigFromFrame(
continue;
}
const configValue = field.values.get(rowIndex);
const configValue = field.values[rowIndex];
if (configValue === null || configValue === undefined) {
continue;

View File

@ -72,10 +72,10 @@ export function toMetricFindValues(): OperatorFunction<PanelData, MetricFindValu
for (const frame of frames) {
for (let index = 0; index < frame.length; index++) {
const expandable = expandableIndex !== -1 ? frame.fields[expandableIndex].values.get(index) : undefined;
const string = frame.fields[stringIndex].values.get(index);
const text = textIndex !== -1 ? frame.fields[textIndex].values.get(index) : null;
const value = valueIndex !== -1 ? frame.fields[valueIndex].values.get(index) : null;
const expandable = expandableIndex !== -1 ? frame.fields[expandableIndex].values[index] : undefined;
const string = frame.fields[stringIndex].values[index];
const text = textIndex !== -1 ? frame.fields[textIndex].values[index] : null;
const value = valueIndex !== -1 ? frame.fields[valueIndex].values[index] : null;
if (valueIndex === -1 && textIndex === -1) {
metrics.push({ text: string, value: string, expandable });

View File

@ -4,7 +4,6 @@ import {
DataFrame,
dataFrameToJSON,
MutableDataFrame,
ArrayVector,
DataSourceInstanceSettings,
DataSourceJsonData,
DataSourceRef,
@ -35,15 +34,15 @@ export function setupForLogs() {
fields: [
{
name: '@message',
values: new ArrayVector(['something']),
values: ['something'],
},
{
name: '@timestamp',
values: new ArrayVector([1]),
values: [1],
},
{
name: '@xrayTraceId',
values: new ArrayVector(['1-613f0d6b-3e7cb34375b60662359611bd']),
values: ['1-613f0d6b-3e7cb34375b60662359611bd'],
},
],
meta: { custom: { Status: CloudWatchLogsQueryStatus.Complete } },

View File

@ -128,7 +128,7 @@ export class CloudWatchLogsQueryRunner extends CloudWatchRequest {
// This queries for the results
this.logsQuery(
frames.map((dataFrame) => ({
queryId: dataFrame.fields[0].values.get(0),
queryId: dataFrame.fields[0].values[0],
region: dataFrame.meta?.custom?.['Region'] ?? 'default',
refId: dataFrame.refId!,
statsGroups: logQueries.find((target) => target.refId === dataFrame.refId)?.statsGroups,
@ -350,8 +350,8 @@ export class CloudWatchLogsQueryRunner extends CloudWatchRequest {
limit,
startFromHead: direction !== LogRowContextQueryDirection.Backward,
region: query?.region,
logGroupName: parseLogGroupName(logField!.values.get(row.rowIndex)),
logStreamName: logStreamField!.values.get(row.rowIndex),
logGroupName: parseLogGroupName(logField!.values[row.rowIndex]),
logStreamName: logStreamField!.values[row.rowIndex],
};
if (direction === LogRowContextQueryDirection.Backward) {

View File

@ -120,8 +120,8 @@ describe('runWithRetry', () => {
// dataframe fields
expect(values.length).toBe(1);
expect(values[0].frames.length).toBe(2);
expect(values[0].frames[0].fields[0].values.get(0)).toBe('A');
expect(values[0].frames[1].fields[0].values.get(0)).toBe('B');
expect(values[0].frames[0].fields[0].values[0]).toBe('A');
expect(values[0].frames[1].fields[0].values[0]).toBe('B');
});
it('sends data and also error if only one query gets limit error', async () => {
@ -145,7 +145,7 @@ describe('runWithRetry', () => {
expect(queryFunc).nthCalledWith(1, targets);
expect(values.length).toBe(1);
expect(values[0].frames.length).toBe(1);
expect(values[0].frames[0].fields[0].values.get(0)).toBe('A');
expect(values[0].frames[0].fields[0].values[0]).toBe('A');
expect(values[0].error).toEqual({ message: 'Some queries timed out: LimitExceededException' });
});
@ -190,8 +190,8 @@ describe('runWithRetry', () => {
expect(queryFunc).nthCalledWith(3, [targetC]);
expect(values.length).toBe(1);
expect(values[0].frames.length).toBe(2);
expect(values[0].frames[0].fields[0].values.get(0)).toBe('A');
expect(values[0].frames[1].fields[0].values.get(0)).toBe('B');
expect(values[0].frames[0].fields[0].values[0]).toBe('A');
expect(values[0].frames[1].fields[0].values[0]).toBe('B');
expect(values[0].error).toEqual({ message: 'Some queries timed out: LimitExceededException' });
});
});

View File

@ -315,8 +315,8 @@ describe('ElasticResponse', () => {
const frame = result.data[0];
expect(frame.name).toBe('Count');
expect(frame.length).toBe(2);
expect(getTimeField(frame).values.get(0)).toBe(1000);
expect(getValueField(frame).values.get(0)).toBe(10);
expect(getTimeField(frame).values[0]).toBe(1000);
expect(getValueField(frame).values[0]).toBe(10);
});
});
@ -367,11 +367,11 @@ describe('ElasticResponse', () => {
const frame1 = result.data[0];
const frame2 = result.data[1];
expect(frame1.length).toBe(2);
expect(getValueField(frame1).values.get(0)).toBe(10);
expect(getTimeField(frame1).values.get(0)).toBe(1000);
expect(getValueField(frame1).values[0]).toBe(10);
expect(getTimeField(frame1).values[0]).toBe(1000);
expect(frame2.name).toBe('Average value');
expect(getValueField(frame2).values.toArray()).toEqual([88, 99]);
expect(getValueField(frame2).values).toEqual([88, 99]);
});
});
@ -546,9 +546,9 @@ describe('ElasticResponse', () => {
expect(result.data[0].length).toBe(2);
expect(result.data[0].name).toBe('p75 @value');
expect(result.data[1].name).toBe('p90 @value');
expect(getValueField(result.data[0]).values.get(0)).toBe(3.3);
expect(getTimeField(result.data[0]).values.get(0)).toBe(1000);
expect(getValueField(result.data[1]).values.get(1)).toBe(4.5);
expect(getValueField(result.data[0]).values[0]).toBe(3.3);
expect(getTimeField(result.data[0]).values[0]).toBe(1000);
expect(getValueField(result.data[1]).values[1]).toBe(4.5);
});
});
@ -629,8 +629,8 @@ describe('ElasticResponse', () => {
expect(result.data[0].name).toBe('server1 Max @value');
expect(result.data[1].name).toBe('server1 Std Dev Upper @value');
expect(getValueField(result.data[0]).values.get(0)).toBe(10.2);
expect(getValueField(result.data[1]).values.get(0)).toBe(3);
expect(getValueField(result.data[0]).values[0]).toBe(10.2);
expect(getValueField(result.data[1]).values[0]).toBe(3);
});
});
@ -688,20 +688,20 @@ describe('ElasticResponse', () => {
const firstSeries = result.data[0];
expect(firstSeries.name).toBe('Top Metrics @value');
expect(firstSeries.length).toBe(2);
expect(getTimeField(firstSeries).values.toArray()).toEqual([
expect(getTimeField(firstSeries).values).toEqual([
new Date('2021-01-01T00:00:00.000Z').valueOf(),
new Date('2021-01-01T00:00:10.000Z').valueOf(),
]);
expect(getValueField(firstSeries).values.toArray()).toEqual([1, 1]);
expect(getValueField(firstSeries).values).toEqual([1, 1]);
const secondSeries = result.data[1];
expect(secondSeries.name).toBe('Top Metrics @anotherValue');
expect(secondSeries.length).toBe(2);
expect(getTimeField(secondSeries).values.toArray()).toEqual([
expect(getTimeField(secondSeries).values).toEqual([
new Date('2021-01-01T00:00:00.000Z').valueOf(),
new Date('2021-01-01T00:00:10.000Z').valueOf(),
]);
expect(getValueField(secondSeries).values.toArray()).toEqual([2, 2]);
expect(getValueField(secondSeries).values).toEqual([2, 2]);
});
});
@ -1044,9 +1044,9 @@ describe('ElasticResponse', () => {
expect(field2.name).toBe('p75 value');
expect(field3.name).toBe('p90 value');
expect(field1.values.toArray()).toEqual(['id1', 'id2']);
expect(field2.values.toArray()).toEqual([3.3, 2.3]);
expect(field3.values.toArray()).toEqual([5.5, 4.5]);
expect(field1.values).toEqual(['id1', 'id2']);
expect(field2.values).toEqual([3.3, 2.3]);
expect(field3.values).toEqual([5.5, 4.5]);
});
});
@ -1088,9 +1088,9 @@ describe('ElasticResponse', () => {
it('should include field in metric name', () => {
expect(result.data[0].length).toBe(1);
expect(result.data[0].fields.length).toBe(3);
expect(result.data[0].fields[0].values.toArray()).toEqual(['server-1']);
expect(result.data[0].fields[1].values.toArray()).toEqual([1000]);
expect(result.data[0].fields[2].values.toArray()).toEqual([3000]);
expect(result.data[0].fields[0].values).toEqual(['server-1']);
expect(result.data[0].fields[1].values).toEqual([1000]);
expect(result.data[0].fields[2].values).toEqual([3000]);
});
});
@ -1139,7 +1139,7 @@ describe('ElasticResponse', () => {
expect(fields.length).toBe(1);
const field = fields[0];
expect(field.type === FieldType.other);
const values = field.values.toArray();
const values = field.values;
expect(values.length).toBe(2);
expect(values[0].sourceProp).toBe('asd');
expect(values[0].fieldProp).toBe('field');
@ -1206,12 +1206,12 @@ describe('ElasticResponse', () => {
expect(result.data[0].name).toBe('Sum @value');
expect(result.data[1].name).toBe('Max @value');
expect(result.data[2].name).toBe('Sum @value * Max @value');
expect(getValueField(result.data[0]).values.get(0)).toBe(2);
expect(getValueField(result.data[1]).values.get(0)).toBe(3);
expect(getValueField(result.data[2]).values.get(0)).toBe(6);
expect(getValueField(result.data[0]).values.get(1)).toBe(3);
expect(getValueField(result.data[1]).values.get(1)).toBe(4);
expect(getValueField(result.data[2]).values.get(1)).toBe(12);
expect(getValueField(result.data[0]).values[0]).toBe(2);
expect(getValueField(result.data[1]).values[0]).toBe(3);
expect(getValueField(result.data[2]).values[0]).toBe(6);
expect(getValueField(result.data[0]).values[1]).toBe(3);
expect(getValueField(result.data[1]).values[1]).toBe(4);
expect(getValueField(result.data[2]).values[1]).toBe(12);
});
});
@ -1286,11 +1286,11 @@ describe('ElasticResponse', () => {
expect(frame.length).toBe(2);
const { fields } = frame;
expect(fields.length).toBe(5);
expect(fields[0].values.toArray()).toEqual([1000, 2000]);
expect(fields[1].values.toArray()).toEqual([2, 3]);
expect(fields[2].values.toArray()).toEqual([3, 4]);
expect(fields[3].values.toArray()).toEqual([6, 12]);
expect(fields[4].values.toArray()).toEqual([24, 48]);
expect(fields[0].values).toEqual([1000, 2000]);
expect(fields[1].values).toEqual([2, 3]);
expect(fields[2].values).toEqual([3, 4]);
expect(fields[3].values).toEqual([6, 12]);
expect(fields[4].values).toEqual([24, 48]);
});
});
@ -1338,7 +1338,7 @@ describe('ElasticResponse', () => {
it('should have time field values in DateTime format', () => {
const timeField = result.data[0].fields.find((field) => field.name === '@timestamp');
expect(timeField).toBeDefined();
expect(timeField?.values.get(0)).toBe(1546300800000);
expect(timeField?.values[0]).toBe(1546300800000);
});
});
@ -1467,14 +1467,14 @@ describe('ElasticResponse', () => {
const result = new ElasticResponse(targets, response).getLogs(undefined, 'level');
const fieldCache = new FieldCache(result.data[0]);
const field = fieldCache.getFieldByName('level');
expect(field?.values.toArray()).toEqual(['debug', 'error']);
expect(field?.values).toEqual(['debug', 'error']);
});
it('should re map levels field to new field', () => {
const result = new ElasticResponse(targets, response).getLogs(undefined, 'fields.lvl');
const fieldCache = new FieldCache(result.data[0]);
const field = fieldCache.getFieldByName('level');
expect(field?.values.toArray()).toEqual(['debug', 'info']);
expect(field?.values).toEqual(['debug', 'info']);
});
it('should correctly guess field types', () => {

View File

@ -604,7 +604,7 @@ export class ElasticResponse {
for (let frame of dataFrame) {
for (let field of frame.fields) {
if (field.type === FieldType.time && typeof field.values.get(0) !== 'number') {
if (field.type === FieldType.time && typeof field.values[0] !== 'number') {
field.values = convertFieldType(field, { destinationType: FieldType.time }).values;
}
}

View File

@ -528,7 +528,7 @@ export class ElasticDatasource
);
} else {
const sortField = row.dataFrame.fields.find((f) => f.name === 'sort');
const searchAfter = sortField?.values.get(row.rowIndex) || [row.timeEpochMs];
const searchAfter = sortField?.values[row.rowIndex] || [row.timeEpochMs];
const sort = options?.direction === LogRowContextQueryDirection.Forward ? 'asc' : 'desc';
const header =
@ -1125,7 +1125,7 @@ export class ElasticDatasource
// Sorting of results in the context query
sortDirection: direction === LogRowContextQueryDirection.Backward ? 'desc' : 'asc',
// Used to get the next log lines before/after the current log line using sort field of selected log line
searchAfter: row.dataFrame.fields.find((f) => f.name === 'sort')?.values.get(row.rowIndex) ?? [row.timeEpochMs],
searchAfter: row.dataFrame.fields.find((f) => f.name === 'sort')?.values[row.rowIndex] ?? [row.timeEpochMs],
},
};

View File

@ -134,7 +134,7 @@ export class UnthemedQueryEditor extends PureComponent<Props, State> {
next: (rsp) => {
if (rsp.data.length) {
const names = (rsp.data[0] as DataFrame).fields[0];
const folders = names.values.toArray().map((v) => ({
const folders = names.values.map((v) => ({
value: v,
label: v,
}));

View File

@ -178,7 +178,7 @@ describe('graphiteDatasource', () => {
it('should convert to millisecond resolution', async () => {
await expect(response).toEmitValuesWith((values: any) => {
const results = values[0];
expect(results.data[0].fields[1].values.get(0)).toBe(10);
expect(results.data[0].fields[1].values[0]).toBe(10);
});
});
});

View File

@ -376,8 +376,8 @@ export class GraphiteDatasource
const target = result.data[i];
for (let y = 0; y < target.length; y++) {
const time = target.fields[0].values.get(y);
const value = target.fields[1].values.get(y);
const time = target.fields[0].values[y];
const value = target.fields[1].values[y];
if (!value) {
continue;

View File

@ -4,7 +4,6 @@ import { catchError, map } from 'rxjs/operators';
import {
AnnotationEvent,
ArrayVector,
DataFrame,
DataQueryError,
DataQueryRequest,
@ -89,7 +88,7 @@ function timeSeriesToDataFrame(timeSeries: TimeSeries): DataFrame {
name: TIME_SERIES_TIME_FIELD_NAME,
type: FieldType.time,
config: {},
values: new ArrayVector<number>(times),
values: times,
};
const valueField = {
@ -98,7 +97,7 @@ function timeSeriesToDataFrame(timeSeries: TimeSeries): DataFrame {
config: {
displayNameFromDS: timeSeries.title,
},
values: new ArrayVector<unknown>(values),
values: values,
labels: timeSeries.tags,
};

View File

@ -234,12 +234,12 @@ function getTableCols(dfs: DataFrame[], table: TableModel, target: InfluxQuery):
}
function getTableRows(dfs: DataFrame[], table: TableModel, labels: string[]): TableModel {
const values = dfs[0].fields[0].values.toArray();
const values = dfs[0].fields[0].values;
for (let i = 0; i < values.length; i++) {
const time = values[i];
const metrics = dfs.map((df: DataFrame) => {
return df.fields[1] ? df.fields[1].values.toArray()[i] : null;
return df.fields[1] ? df.fields[1].values[i] : null;
});
if (metrics.indexOf(null) < 0) {
table.rows.push([time, ...labels, ...metrics]);

View File

@ -125,7 +125,7 @@ export class LogContextProvider {
if (tsField === undefined) {
throw new Error('loki: data frame missing time-field, should never happen');
}
const tsValue = tsField.values.get(row.rowIndex);
const tsValue = tsField.values[row.rowIndex];
const timestamp = toUtc(tsValue);
const range =

View File

@ -1,6 +1,6 @@
import { cloneDeep } from 'lodash';
import { ArrayVector, DataFrame, DataQueryResponse, Field, FieldType } from '@grafana/data';
import { DataFrame, DataQueryResponse, Field, FieldType } from '@grafana/data';
import { transformBackendResult } from './backendResultTransformer';
@ -31,34 +31,34 @@ const inputFrame: DataFrame = {
name: 'Time',
type: FieldType.time,
config: {},
values: new ArrayVector([1645030244810, 1645030247027]),
values: [1645030244810, 1645030247027],
},
{
name: 'Line',
type: FieldType.string,
config: {},
values: new ArrayVector(['line1', 'line2']),
values: ['line1', 'line2'],
},
{
name: 'labels',
type: FieldType.other,
config: {},
values: new ArrayVector([
values: [
{ level: 'info', code: '41🌙' },
{ level: 'error', code: '41🌙' },
]),
],
},
{
name: 'tsNs',
type: FieldType.string,
config: {},
values: new ArrayVector(['1645030244810757120', '1645030247027735040']),
values: ['1645030244810757120', '1645030247027735040'],
},
{
name: 'id',
type: FieldType.string,
config: {},
values: new ArrayVector(['id1', 'id2']),
values: ['id1', 'id2'],
},
],
length: 5,
@ -132,13 +132,13 @@ describe('loki backendResultTransformer', () => {
{
name: 'time',
config: {},
values: new ArrayVector([1]),
values: [1],
type: FieldType.time,
},
{
name: 'line',
config: {},
values: new ArrayVector(['line1']),
values: ['line1'],
type: FieldType.string,
},
],
@ -167,10 +167,10 @@ describe('loki backendResultTransformer', () => {
name: 'labels',
type: FieldType.string,
config: {},
values: new ArrayVector([
values: [
{ level: 'info', code: '41🌙', __error__: 'LogfmtParserErr' },
{ level: 'error', code: '41🌙' },
]),
],
};
const response: DataQueryResponse = { data: [clonedFrame] };

View File

@ -2,7 +2,7 @@ import { css } from '@emotion/css';
import cx from 'classnames';
import React, { ReactNode, useState } from 'react';
import { ArrayVector, Field, FieldType, LinkModel } from '@grafana/data';
import { Field, FieldType, LinkModel } from '@grafana/data';
import { LegacyForms } from '@grafana/ui';
import { getFieldLinksForExplore } from '../../../../features/explore/utils/links';
@ -102,7 +102,7 @@ function makeDebugFields(derivedFields: DerivedFieldConfig[], debugText: string)
field: {
name: '',
type: FieldType.string,
values: new ArrayVector([value]),
values: [value],
config: {
links: [{ title: '', url: field.url }],
},

View File

@ -5,7 +5,6 @@ import { getQueryOptions } from 'test/helpers/getQueryOptions';
import {
AbstractLabelOperator,
AnnotationQueryRequest,
ArrayVector,
CoreApp,
DataFrame,
dataFrameToJSON,
@ -57,19 +56,19 @@ const testFrame: DataFrame = {
name: 'Time',
type: FieldType.time,
config: {},
values: new ArrayVector([1, 2]),
values: [1, 2],
},
{
name: 'Line',
type: FieldType.string,
config: {},
values: new ArrayVector(['line1', 'line2']),
values: ['line1', 'line2'],
},
{
name: 'labels',
type: FieldType.other,
config: {},
values: new ArrayVector([
values: [
{
label: 'value',
label2: 'value ',
@ -79,19 +78,19 @@ const testFrame: DataFrame = {
label2: 'value2',
label3: ' ',
},
]),
],
},
{
name: 'tsNs',
type: FieldType.string,
config: {},
values: new ArrayVector(['1000000', '2000000']),
values: ['1000000', '2000000'],
},
{
name: 'id',
type: FieldType.string,
config: {},
values: new ArrayVector(['id1', 'id2']),
values: ['id1', 'id2'],
},
],
length: 2,
@ -400,19 +399,19 @@ describe('LokiDatasource', () => {
name: 'Time',
type: FieldType.time,
config: {},
values: new ArrayVector([1, 2]),
values: [1, 2],
},
{
name: 'Line',
type: FieldType.string,
config: {},
values: new ArrayVector(['hello', 'hello 2']),
values: ['hello', 'hello 2'],
},
{
name: 'labels',
type: FieldType.other,
config: {},
values: new ArrayVector([
values: [
{
label: 'value',
label2: 'value ',
@ -422,19 +421,19 @@ describe('LokiDatasource', () => {
label2: 'value2',
label3: ' ',
},
]),
],
},
{
name: 'tsNs',
type: FieldType.string,
config: {},
values: new ArrayVector(['1000000', '2000000']),
values: ['1000000', '2000000'],
},
{
name: 'id',
type: FieldType.string,
config: {},
values: new ArrayVector(['id1', 'id2']),
values: ['id1', 'id2'],
},
],
length: 2,
@ -457,37 +456,37 @@ describe('LokiDatasource', () => {
name: 'Time',
type: FieldType.time,
config: {},
values: new ArrayVector([1]),
values: [1],
},
{
name: 'Line',
type: FieldType.string,
config: {},
values: new ArrayVector(['hello']),
values: ['hello'],
},
{
name: 'labels',
type: FieldType.other,
config: {},
values: new ArrayVector([
values: [
{
label: 'value',
label2: 'value2',
label3: 'value3',
},
]),
],
},
{
name: 'tsNs',
type: FieldType.string,
config: {},
values: new ArrayVector(['1000000']),
values: ['1000000'],
},
{
name: 'id',
type: FieldType.string,
config: {},
values: new ArrayVector(['id1']),
values: ['id1'],
},
],
length: 1,

View File

@ -39,14 +39,14 @@ describe('getDerivedFields', () => {
]);
expect(newFields.length).toBe(2);
const trace1 = newFields.find((f) => f.name === 'trace1');
expect(trace1!.values.toArray()).toEqual([null, '1234', null]);
expect(trace1!.values).toEqual([null, '1234', null]);
expect(trace1!.config.links![0]).toEqual({
url: 'http://localhost/${__value.raw}',
title: '',
});
const trace2 = newFields.find((f) => f.name === 'trace2');
expect(trace2!.values.toArray()).toEqual([null, null, 'foo']);
expect(trace2!.values).toEqual([null, null, 'foo']);
expect(trace2!.config.links!.length).toBe(2);
expect(trace2!.config.links![0]).toEqual({
title: '',

View File

@ -1,6 +1,6 @@
import { groupBy } from 'lodash';
import { FieldType, DataFrame, ArrayVector, DataLink, Field } from '@grafana/data';
import { FieldType, DataFrame, DataLink, Field } from '@grafana/data';
import { getDataSourceSrv } from '@grafana/runtime';
import { DerivedFieldConfig } from './types';
@ -22,7 +22,7 @@ export function getDerivedFields(dataFrame: DataFrame, derivedFieldConfigs: Deri
throw new Error('invalid logs-dataframe, string-field missing');
}
lineField.values.toArray().forEach((line) => {
lineField.values.forEach((line) => {
for (const field of newFields) {
const logMatch = line.match(derivedFieldsGrouped[field.name][0].matcherRegex);
field.values.add(logMatch && logMatch[1]);
@ -35,7 +35,7 @@ export function getDerivedFields(dataFrame: DataFrame, derivedFieldConfigs: Deri
/**
* Transform derivedField config into dataframe field with config that contains link.
*/
function fieldFromDerivedFieldConfig(derivedFieldConfigs: DerivedFieldConfig[]): Field<any, ArrayVector> {
function fieldFromDerivedFieldConfig(derivedFieldConfigs: DerivedFieldConfig[]): Field {
const dataSourceSrv = getDataSourceSrv();
const dataLinks = derivedFieldConfigs.reduce<DataLink[]>((acc, derivedFieldConfig) => {
@ -72,6 +72,6 @@ function fieldFromDerivedFieldConfig(derivedFieldConfigs: DerivedFieldConfig[]):
links: dataLinks,
},
// We are adding values later on
values: new ArrayVector<string>([]),
values: [],
};
}

View File

@ -1,4 +1,4 @@
import { ArrayVector, DataFrame, FieldType } from '@grafana/data';
import { DataFrame, FieldType } from '@grafana/data';
import { makeTableFrames } from './makeTableFrames';
@ -13,7 +13,7 @@ const frame1: DataFrame = {
name: 'Time',
type: FieldType.time,
config: {},
values: new ArrayVector([1645029699311]),
values: [1645029699311],
},
{
name: 'Value',
@ -26,7 +26,7 @@ const frame1: DataFrame = {
config: {
displayNameFromDS: '{level="error", location="moon", protocol="http"}',
},
values: new ArrayVector([23]),
values: [23],
},
],
length: 1,
@ -43,7 +43,7 @@ const frame2: DataFrame = {
name: 'Time',
type: FieldType.time,
config: {},
values: new ArrayVector([1645029699311]),
values: [1645029699311],
},
{
name: 'Value',
@ -56,7 +56,7 @@ const frame2: DataFrame = {
config: {
displayNameFromDS: '{level="info", location="moon", protocol="http"}',
},
values: new ArrayVector([45]),
values: [45],
},
],
length: 1,
@ -73,7 +73,7 @@ const frame3: DataFrame = {
name: 'Time',
type: FieldType.time,
config: {},
values: new ArrayVector([1645029699311]),
values: [1645029699311],
},
{
name: 'Value',
@ -86,7 +86,7 @@ const frame3: DataFrame = {
config: {
displayNameFromDS: '{level="error", location="moon", protocol="http"}',
},
values: new ArrayVector([72]),
values: [72],
},
],
length: 1,
@ -95,11 +95,11 @@ const frame3: DataFrame = {
const outputSingle = [
{
fields: [
{ config: {}, name: 'Time', type: 'time', values: new ArrayVector([1645029699311]) },
{ config: { filterable: true }, name: 'level', type: 'string', values: new ArrayVector(['error']) },
{ config: { filterable: true }, name: 'location', type: 'string', values: new ArrayVector(['moon']) },
{ config: { filterable: true }, name: 'protocol', type: 'string', values: new ArrayVector(['http']) },
{ config: {}, name: 'Value #A', type: 'number', values: new ArrayVector([23]) },
{ config: {}, name: 'Time', type: 'time', values: [1645029699311] },
{ config: { filterable: true }, name: 'level', type: 'string', values: ['error'] },
{ config: { filterable: true }, name: 'location', type: 'string', values: ['moon'] },
{ config: { filterable: true }, name: 'protocol', type: 'string', values: ['http'] },
{ config: {}, name: 'Value #A', type: 'number', values: [23] },
],
length: 1,
meta: { preferredVisualisationType: 'table' },
@ -110,11 +110,11 @@ const outputSingle = [
const outputMulti = [
{
fields: [
{ config: {}, name: 'Time', type: 'time', values: new ArrayVector([1645029699311, 1645029699311]) },
{ config: { filterable: true }, name: 'level', type: 'string', values: new ArrayVector(['error', 'info']) },
{ config: { filterable: true }, name: 'location', type: 'string', values: new ArrayVector(['moon', 'moon']) },
{ config: { filterable: true }, name: 'protocol', type: 'string', values: new ArrayVector(['http', 'http']) },
{ config: {}, name: 'Value #A', type: 'number', values: new ArrayVector([23, 45]) },
{ config: {}, name: 'Time', type: 'time', values: [1645029699311, 1645029699311] },
{ config: { filterable: true }, name: 'level', type: 'string', values: ['error', 'info'] },
{ config: { filterable: true }, name: 'location', type: 'string', values: ['moon', 'moon'] },
{ config: { filterable: true }, name: 'protocol', type: 'string', values: ['http', 'http'] },
{ config: {}, name: 'Value #A', type: 'number', values: [23, 45] },
],
length: 2,
meta: { preferredVisualisationType: 'table' },
@ -122,11 +122,11 @@ const outputMulti = [
},
{
fields: [
{ config: {}, name: 'Time', type: 'time', values: new ArrayVector([1645029699311]) },
{ config: { filterable: true }, name: 'level', type: 'string', values: new ArrayVector(['error']) },
{ config: { filterable: true }, name: 'location', type: 'string', values: new ArrayVector(['moon']) },
{ config: { filterable: true }, name: 'protocol', type: 'string', values: new ArrayVector(['http']) },
{ config: {}, name: 'Value #B', type: 'number', values: new ArrayVector([72]) },
{ config: {}, name: 'Time', type: 'time', values: [1645029699311] },
{ config: { filterable: true }, name: 'level', type: 'string', values: ['error'] },
{ config: { filterable: true }, name: 'location', type: 'string', values: ['moon'] },
{ config: { filterable: true }, name: 'protocol', type: 'string', values: ['http'] },
{ config: {}, name: 'Value #B', type: 'number', values: [72] },
],
length: 1,
meta: { preferredVisualisationType: 'table' },

View File

@ -1,6 +1,6 @@
import { groupBy } from 'lodash';
import { DataFrame, Field, FieldType, ArrayVector } from '@grafana/data';
import { DataFrame, Field, FieldType } from '@grafana/data';
export function makeTableFrames(instantMetricFrames: DataFrame[]): DataFrame[] {
// first we remove frames that have no refId
@ -12,15 +12,15 @@ export function makeTableFrames(instantMetricFrames: DataFrame[]): DataFrame[] {
return Object.entries(framesByRefId).map(([refId, frames]) => makeTableFrame(frames, refId));
}
type NumberField = Field<number, ArrayVector<number>>;
type StringField = Field<string, ArrayVector<string>>;
type NumberField = Field<number, number[]>;
type StringField = Field<string, string[]>;
function makeTableFrame(instantMetricFrames: DataFrame[], refId: string): DataFrame {
const tableTimeField: NumberField = { name: 'Time', config: {}, values: new ArrayVector(), type: FieldType.time };
const tableTimeField: NumberField = { name: 'Time', config: {}, values: [], type: FieldType.time };
const tableValueField: NumberField = {
name: `Value #${refId}`,
config: {},
values: new ArrayVector(),
values: [],
type: FieldType.number,
};
@ -34,7 +34,7 @@ function makeTableFrame(instantMetricFrames: DataFrame[], refId: string): DataFr
const labelFields: StringField[] = sortedLabelNames.map((labelName) => ({
name: labelName,
config: { filterable: true },
values: new ArrayVector(),
values: [],
type: FieldType.string,
}));
@ -45,15 +45,15 @@ function makeTableFrame(instantMetricFrames: DataFrame[], refId: string): DataFr
return;
}
const timeArray = timeField.values.toArray();
const valueArray = valueField.values.toArray();
const timeArray = timeField.values;
const valueArray = valueField.values;
for (let x of timeArray) {
tableTimeField.values.add(x);
tableTimeField.values.push(x);
}
for (let x of valueArray) {
tableValueField.values.add(x);
tableValueField.values.push(x);
}
const labels = valueField.labels ?? {};
@ -62,7 +62,7 @@ function makeTableFrame(instantMetricFrames: DataFrame[], refId: string): DataFr
const text = labels[f.name] ?? '';
// we insert the labels as many times as we have values
for (let i = 0; i < valueArray.length; i++) {
f.values.add(text);
f.values.push(text);
}
}
});

View File

@ -1,5 +1,4 @@
import {
ArrayVector,
DataFrame,
DataFrameType,
DataSourceInstanceSettings,
@ -117,38 +116,38 @@ export function getMockFrames() {
name: 'Time',
type: FieldType.time,
config: {},
values: new ArrayVector([3, 4]),
values: [3, 4],
},
{
name: 'Line',
type: FieldType.string,
config: {},
values: new ArrayVector(['line1', 'line2']),
values: ['line1', 'line2'],
},
{
name: 'labels',
type: FieldType.other,
config: {},
values: new ArrayVector([
values: [
{
label: 'value',
},
{
otherLabel: 'other value',
},
]),
],
},
{
name: 'tsNs',
type: FieldType.string,
config: {},
values: new ArrayVector(['3000000', '4000000']),
values: ['3000000', '4000000'],
},
{
name: 'id',
type: FieldType.string,
config: {},
values: new ArrayVector(['id1', 'id2']),
values: ['id1', 'id2'],
},
],
meta: {
@ -170,35 +169,35 @@ export function getMockFrames() {
name: 'Time',
type: FieldType.time,
config: {},
values: new ArrayVector([1, 2]),
values: [1, 2],
},
{
name: 'Line',
type: FieldType.string,
config: {},
values: new ArrayVector(['line3', 'line4']),
values: ['line3', 'line4'],
},
{
name: 'labels',
type: FieldType.other,
config: {},
values: new ArrayVector([
values: [
{
otherLabel: 'other value',
},
]),
],
},
{
name: 'tsNs',
type: FieldType.string,
config: {},
values: new ArrayVector(['1000000', '2000000']),
values: ['1000000', '2000000'],
},
{
name: 'id',
type: FieldType.string,
config: {},
values: new ArrayVector(['id3', 'id4']),
values: ['id3', 'id4'],
},
],
meta: {
@ -220,13 +219,13 @@ export function getMockFrames() {
name: 'Time',
type: FieldType.time,
config: {},
values: new ArrayVector([3000000, 4000000]),
values: [3000000, 4000000],
},
{
name: 'Value',
type: FieldType.number,
config: {},
values: new ArrayVector([5, 4]),
values: [5, 4],
labels: {
level: 'debug',
},
@ -249,13 +248,13 @@ export function getMockFrames() {
name: 'Time',
type: FieldType.time,
config: {},
values: new ArrayVector([1000000, 2000000]),
values: [1000000, 2000000],
},
{
name: 'Value',
type: FieldType.number,
config: {},
values: new ArrayVector([6, 7]),
values: [6, 7],
labels: {
level: 'debug',
},
@ -279,13 +278,13 @@ export function getMockFrames() {
name: 'Time',
type: FieldType.time,
config: {},
values: new ArrayVector([3000000, 4000000]),
values: [3000000, 4000000],
},
{
name: 'Value',
type: FieldType.number,
config: {},
values: new ArrayVector([6, 7]),
values: [6, 7],
labels: {
level: 'error',
},

View File

@ -1,4 +1,4 @@
import { ArrayVector, DataFrame, FieldType } from '@grafana/data';
import { DataFrame, FieldType } from '@grafana/data';
import { getQueryHints } from './queryHints';
@ -12,7 +12,7 @@ describe('getQueryHints', () => {
name: 'Line',
type: FieldType.string,
config: {},
values: new ArrayVector(['{"foo": "bar", "bar": "baz"}', '{"foo": "bar", "bar": "baz"}']),
values: ['{"foo": "bar", "bar": "baz"}', '{"foo": "bar", "bar": "baz"}'],
},
],
};
@ -39,7 +39,7 @@ describe('getQueryHints', () => {
name: 'Line',
type: FieldType.string,
config: {},
values: new ArrayVector(['foo="bar" bar="baz"', 'foo="bar" bar="baz"']),
values: ['foo="bar" bar="baz"', 'foo="bar" bar="baz"'],
},
],
};
@ -66,7 +66,7 @@ describe('getQueryHints', () => {
name: 'Line',
type: FieldType.string,
config: {},
values: new ArrayVector(['{"foo": "bar", "bar": "baz"}', 'foo="bar" bar="baz"']),
values: ['{"foo": "bar", "bar": "baz"}', 'foo="bar" bar="baz"'],
},
],
};
@ -99,7 +99,7 @@ describe('getQueryHints', () => {
name: 'Line',
type: FieldType.string,
config: {},
values: new ArrayVector(['{"_entry": "bar", "bar": "baz"}']),
values: ['{"_entry": "bar", "bar": "baz"}'],
},
],
};
@ -139,13 +139,13 @@ describe('getQueryHints', () => {
name: 'Line',
type: FieldType.string,
config: {},
values: new ArrayVector(['{"foo": "bar", "bar": "baz"}', 'foo="bar" bar="baz"']),
values: ['{"foo": "bar", "bar": "baz"}', 'foo="bar" bar="baz"'],
},
{
name: 'labels',
type: FieldType.other,
config: {},
values: new ArrayVector([labelVariable, { job: 'baz', foo: 'bar' }]),
values: [labelVariable, { job: 'baz', foo: 'bar' }],
},
],
};
@ -172,7 +172,7 @@ describe('getQueryHints', () => {
name: 'Line',
type: FieldType.string,
config: {},
values: new ArrayVector(['{"foo": "bar", "bar": "baz"}', 'foo="bar" bar="baz"']),
values: ['{"foo": "bar", "bar": "baz"}', 'foo="bar" bar="baz"'],
},
],
};
@ -199,7 +199,7 @@ describe('getQueryHints', () => {
name: 'Line',
type: FieldType.string,
config: {},
values: new ArrayVector(['{"foo": "bar", "bar": "baz"}', 'foo="bar" bar="baz"']),
values: ['{"foo": "bar", "bar": "baz"}', 'foo="bar" bar="baz"'],
},
],
};
@ -226,7 +226,7 @@ describe('getQueryHints', () => {
name: 'labels',
type: FieldType.other,
config: {},
values: new ArrayVector([{ __error__: 'some error', job: 'a' }]),
values: [{ __error__: 'some error', job: 'a' }],
},
],
};

View File

@ -1,6 +1,6 @@
import { cloneDeep } from 'lodash';
import { ArrayVector, DataQueryResponse, QueryResultMetaStat, DataFrame, FieldType } from '@grafana/data';
import { DataQueryResponse, QueryResultMetaStat, DataFrame, FieldType } from '@grafana/data';
import { getMockFrames } from './mocks';
import {
@ -21,19 +21,19 @@ const frame: DataFrame = {
name: 'Time',
config: {},
type: FieldType.time,
values: new ArrayVector([1]),
values: [1],
},
{
name: 'labels',
config: {},
type: FieldType.other,
values: new ArrayVector([{ level: 'info' }]),
values: [{ level: 'info' }],
},
{
name: 'Line',
config: {},
type: FieldType.string,
values: new ArrayVector(['line1']),
values: ['line1'],
},
],
};
@ -41,7 +41,7 @@ const frame: DataFrame = {
describe('dataFrameHasParsingError', () => {
it('handles frame with parsing error', () => {
const input = cloneDeep(frame);
input.fields[1].values = new ArrayVector([{ level: 'info', __error__: 'error' }]);
input.fields[1].values = [{ level: 'info', __error__: 'error' }];
expect(dataFrameHasLokiError(input)).toBe(true);
});
it('handles frame without parsing error', () => {
@ -53,12 +53,12 @@ describe('dataFrameHasParsingError', () => {
describe('dataFrameHasLevelLabel', () => {
it('returns true if level label is present', () => {
const input = cloneDeep(frame);
input.fields[1].values = new ArrayVector([{ level: 'info' }]);
input.fields[1].values = [{ level: 'info' }];
expect(dataFrameHasLevelLabel(input)).toBe(true);
});
it('returns false if level label is present', () => {
const input = cloneDeep(frame);
input.fields[1].values = new ArrayVector([{ foo: 'bar' }]);
input.fields[1].values = [{ foo: 'bar' }];
expect(dataFrameHasLevelLabel(input)).toBe(false);
});
});
@ -66,17 +66,17 @@ describe('dataFrameHasLevelLabel', () => {
describe('extractLevelLikeLabelFromDataFrame', () => {
it('returns label if lvl label is present', () => {
const input = cloneDeep(frame);
input.fields[1].values = new ArrayVector([{ lvl: 'info' }]);
input.fields[1].values = [{ lvl: 'info' }];
expect(extractLevelLikeLabelFromDataFrame(input)).toBe('lvl');
});
it('returns label if level-like label is present', () => {
const input = cloneDeep(frame);
input.fields[1].values = new ArrayVector([{ error_level: 'info' }]);
input.fields[1].values = [{ error_level: 'info' }];
expect(extractLevelLikeLabelFromDataFrame(input)).toBe('error_level');
});
it('returns undefined if no level-like label is present', () => {
const input = cloneDeep(frame);
input.fields[1].values = new ArrayVector([{ foo: 'info' }]);
input.fields[1].values = [{ foo: 'info' }];
expect(extractLevelLikeLabelFromDataFrame(input)).toBe(null);
});
});
@ -88,12 +88,12 @@ describe('extractLogParserFromDataFrame', () => {
});
it('identifies JSON', () => {
const input = cloneDeep(frame);
input.fields[2].values = new ArrayVector(['{"a":"b"}']);
input.fields[2].values = ['{"a":"b"}'];
expect(extractLogParserFromDataFrame(input)).toEqual({ hasJSON: true, hasLogfmt: false, hasPack: false });
});
it('identifies logfmt', () => {
const input = cloneDeep(frame);
input.fields[2].values = new ArrayVector(['a=b']);
input.fields[2].values = ['a=b'];
expect(extractLogParserFromDataFrame(input)).toEqual({ hasJSON: false, hasLogfmt: true, hasPack: false });
});
});
@ -101,7 +101,7 @@ describe('extractLogParserFromDataFrame', () => {
describe('extractLabelKeysFromDataFrame', () => {
it('returns empty by default', () => {
const input = cloneDeep(frame);
input.fields[1].values = new ArrayVector([]);
input.fields[1].values = [];
expect(extractLabelKeysFromDataFrame(input)).toEqual([]);
});
it('extracts label keys', () => {
@ -113,12 +113,12 @@ describe('extractLabelKeysFromDataFrame', () => {
describe('extractUnwrapLabelKeysFromDataFrame', () => {
it('returns empty by default', () => {
const input = cloneDeep(frame);
input.fields[1].values = new ArrayVector([]);
input.fields[1].values = [];
expect(extractUnwrapLabelKeysFromDataFrame(input)).toEqual([]);
});
it('extracts possible unwrap label keys', () => {
const input = cloneDeep(frame);
input.fields[1].values = new ArrayVector([{ number: 13 }]);
input.fields[1].values = [{ number: 13 }];
expect(extractUnwrapLabelKeysFromDataFrame(input)).toEqual(['number']);
});
});
@ -152,19 +152,19 @@ describe('combineResponses', () => {
config: {},
name: 'Time',
type: 'time',
values: new ArrayVector([1, 2, 3, 4]),
values: [1, 2, 3, 4],
},
{
config: {},
name: 'Line',
type: 'string',
values: new ArrayVector(['line3', 'line4', 'line1', 'line2']),
values: ['line3', 'line4', 'line1', 'line2'],
},
{
config: {},
name: 'labels',
type: 'other',
values: new ArrayVector([
values: [
{
otherLabel: 'other value',
},
@ -174,19 +174,19 @@ describe('combineResponses', () => {
{
otherLabel: 'other value',
},
]),
],
},
{
config: {},
name: 'tsNs',
type: 'string',
values: new ArrayVector(['1000000', '2000000', '3000000', '4000000']),
values: ['1000000', '2000000', '3000000', '4000000'],
},
{
config: {},
name: 'id',
type: 'string',
values: new ArrayVector(['id3', 'id4', 'id1', 'id2']),
values: ['id3', 'id4', 'id1', 'id2'],
},
],
length: 4,
@ -224,13 +224,13 @@ describe('combineResponses', () => {
config: {},
name: 'Time',
type: 'time',
values: new ArrayVector([1000000, 2000000, 3000000, 4000000]),
values: [1000000, 2000000, 3000000, 4000000],
},
{
config: {},
name: 'Value',
type: 'number',
values: new ArrayVector([6, 7, 5, 4]),
values: [6, 7, 5, 4],
labels: {
level: 'debug',
},
@ -269,13 +269,13 @@ describe('combineResponses', () => {
config: {},
name: 'Time',
type: 'time',
values: new ArrayVector([1000000, 2000000, 3000000, 4000000]),
values: [1000000, 2000000, 3000000, 4000000],
},
{
config: {},
name: 'Value',
type: 'number',
values: new ArrayVector([6, 7, 5, 4]),
values: [6, 7, 5, 4],
labels: {
level: 'debug',
},

View File

@ -1,5 +1,4 @@
import {
ArrayVector,
DataFrame,
DataFrameType,
DataQueryResponse,
@ -16,12 +15,12 @@ import { isBytesString } from './languageUtils';
import { isLogLineJSON, isLogLineLogfmt, isLogLinePacked } from './lineParser';
export function dataFrameHasLokiError(frame: DataFrame): boolean {
const labelSets: Labels[] = frame.fields.find((f) => f.name === 'labels')?.values.toArray() ?? [];
const labelSets: Labels[] = frame.fields.find((f) => f.name === 'labels')?.values ?? [];
return labelSets.some((labels) => labels.__error__ !== undefined);
}
export function dataFrameHasLevelLabel(frame: DataFrame): boolean {
const labelSets: Labels[] = frame.fields.find((f) => f.name === 'labels')?.values.toArray() ?? [];
const labelSets: Labels[] = frame.fields.find((f) => f.name === 'labels')?.values ?? [];
return labelSets.some((labels) => labels.level !== undefined);
}
@ -35,7 +34,7 @@ export function extractLogParserFromDataFrame(frame: DataFrame): {
return { hasJSON: false, hasLogfmt: false, hasPack: false };
}
const logLines: string[] = lineField.values.toArray();
const logLines: string[] = lineField.values;
let hasJSON = false;
let hasLogfmt = false;
@ -57,7 +56,7 @@ export function extractLogParserFromDataFrame(frame: DataFrame): {
export function extractLabelKeysFromDataFrame(frame: DataFrame): string[] {
const labelsArray: Array<{ [key: string]: string }> | undefined =
frame?.fields?.find((field) => field.name === 'labels')?.values.toArray() ?? [];
frame?.fields?.find((field) => field.name === 'labels')?.values ?? [];
if (!labelsArray?.length) {
return [];
@ -68,7 +67,7 @@ export function extractLabelKeysFromDataFrame(frame: DataFrame): string[] {
export function extractUnwrapLabelKeysFromDataFrame(frame: DataFrame): string[] {
const labelsArray: Array<{ [key: string]: string }> | undefined =
frame?.fields?.find((field) => field.name === 'labels')?.values.toArray() ?? [];
frame?.fields?.find((field) => field.name === 'labels')?.values ?? [];
if (!labelsArray?.length) {
return [];
@ -94,7 +93,7 @@ export function extractHasErrorLabelFromDataFrame(frame: DataFrame): boolean {
return false;
}
const labels: Array<{ [key: string]: string }> = labelField.values.toArray();
const labels: Array<{ [key: string]: string }> = labelField.values;
return labels.some((label) => label['__error__']);
}
@ -106,7 +105,7 @@ export function extractLevelLikeLabelFromDataFrame(frame: DataFrame): string | n
// Depending on number of labels, this can be pretty heavy operation.
// Let's just look at first 2 lines If needed, we can introduce more later.
const labelsArray: Array<{ [key: string]: string }> = labelField.values.toArray().slice(0, 2);
const labelsArray: Array<{ [key: string]: string }> = labelField.values.slice(0, 2);
let levelLikeLabel: string | null = null;
// Find first level-like label
@ -203,9 +202,7 @@ export function combineResponses(currentResult: DataQueryResponse | null, newRes
function combineFrames(dest: DataFrame, source: DataFrame) {
const totalFields = dest.fields.length;
for (let i = 0; i < totalFields; i++) {
dest.fields[i].values = new ArrayVector(
[].concat.apply(source.fields[i].values.toArray(), dest.fields[i].values.toArray())
);
dest.fields[i].values = [].concat.apply(source.fields[i].values, dest.fields[i].values);
}
dest.length += source.length;
dest.meta = {
@ -251,9 +248,9 @@ export function cloneQueryResponse(response: DataQueryResponse): DataQueryRespon
function cloneDataFrame(frame: DataQueryResponseData): DataQueryResponseData {
return {
...frame,
fields: frame.fields.map((field: Field<unknown, ArrayVector>) => ({
fields: frame.fields.map((field: Field) => ({
...field,
values: new ArrayVector(field.values.toArray()),
values: field.values,
})),
};
}

View File

@ -1,4 +1,4 @@
import { ArrayVector, DataFrame, FieldType } from '@grafana/data';
import { DataFrame, FieldType } from '@grafana/data';
import { sortDataFrameByTime, SortDirection } from './sortDataFrame';
@ -9,19 +9,19 @@ const inputFrame: DataFrame = {
name: 'time',
type: FieldType.time,
config: {},
values: new ArrayVector([1005, 1001, 1004, 1002, 1003]),
values: [1005, 1001, 1004, 1002, 1003],
},
{
name: 'value',
type: FieldType.string,
config: {},
values: new ArrayVector(['line5', 'line1', 'line4', 'line2', 'line3']),
values: ['line5', 'line1', 'line4', 'line2', 'line3'],
},
{
name: 'tsNs',
type: FieldType.time,
config: {},
values: new ArrayVector([`1005000000`, `1001000000`, `1004000000`, `1002000000`, `1003000000`]),
values: [`1005000000`, `1001000000`, `1004000000`, `1002000000`, `1003000000`],
},
],
length: 5,
@ -31,23 +31,23 @@ describe('loki sortDataFrame', () => {
it('sorts a dataframe ascending', () => {
const sortedFrame = sortDataFrameByTime(inputFrame, SortDirection.Ascending);
expect(sortedFrame.length).toBe(5);
const timeValues = sortedFrame.fields[0].values.toArray();
const lineValues = sortedFrame.fields[1].values.toArray();
const tsNsValues = sortedFrame.fields[2].values.toArray();
const timeValues = sortedFrame.fields[0].values;
const lineValues = sortedFrame.fields[1].values;
const tsNsValues = sortedFrame.fields[2].values;
expect(timeValues).toStrictEqual([1001, 1002, 1003, 1004, 1005]);
expect(lineValues).toStrictEqual(['line1', 'line2', 'line3', 'line4', 'line5']);
expect(tsNsValues).toStrictEqual([`1001000000`, `1002000000`, `1003000000`, `1004000000`, `1005000000`]);
expect(timeValues).toEqual([1001, 1002, 1003, 1004, 1005]);
expect(lineValues).toEqual(['line1', 'line2', 'line3', 'line4', 'line5']);
expect(tsNsValues).toEqual([`1001000000`, `1002000000`, `1003000000`, `1004000000`, `1005000000`]);
});
it('sorts a dataframe descending', () => {
const sortedFrame = sortDataFrameByTime(inputFrame, SortDirection.Descending);
expect(sortedFrame.length).toBe(5);
const timeValues = sortedFrame.fields[0].values.toArray();
const lineValues = sortedFrame.fields[1].values.toArray();
const tsNsValues = sortedFrame.fields[2].values.toArray();
const timeValues = sortedFrame.fields[0].values;
const lineValues = sortedFrame.fields[1].values;
const tsNsValues = sortedFrame.fields[2].values;
expect(timeValues).toStrictEqual([1005, 1004, 1003, 1002, 1001]);
expect(lineValues).toStrictEqual(['line5', 'line4', 'line3', 'line2', 'line1']);
expect(tsNsValues).toStrictEqual([`1005000000`, `1004000000`, `1003000000`, `1002000000`, `1001000000`]);
expect(timeValues).toEqual([1005, 1004, 1003, 1002, 1001]);
expect(lineValues).toEqual(['line5', 'line4', 'line3', 'line2', 'line1']);
expect(tsNsValues).toEqual([`1005000000`, `1004000000`, `1003000000`, `1002000000`, `1001000000`]);
});
});

View File

@ -16,7 +16,7 @@ export enum SortDirection {
// - the first row will become the second
// - the second row will become the third
function makeIndex(field: Field<string>, dir: SortDirection): number[] {
const fieldValues: string[] = field.values.toArray();
const fieldValues: string[] = field.values;
// we first build an array which is [0,1,2,3....]
const index = Array(fieldValues.length);
@ -65,7 +65,7 @@ export function sortDataFrameByTime(frame: DataFrame, dir: SortDirection): DataF
...rest,
fields: fields.map((field) => ({
...field,
values: new SortedVector(field.values, index),
values: new SortedVector(field.values, index).toArray(),
})),
};

View File

@ -23,13 +23,13 @@ export class MssqlDatasource extends SqlDatasource {
async fetchDatasets(): Promise<string[]> {
const datasets = await this.runSql<{ name: string[] }>(showDatabases(), { refId: 'datasets' });
return datasets.fields.name?.values.toArray().flat() ?? [];
return datasets.fields.name?.values.flat() ?? [];
}
async fetchTables(dataset?: string): Promise<string[]> {
// We get back the table name with the schema as well. like dbo.table
const tables = await this.runSql<{ schemaAndName: string[] }>(getSchemaAndName(dataset), { refId: 'tables' });
return tables.fields.schemaAndName?.values.toArray().flat() ?? [];
return tables.fields.schemaAndName?.values.flat() ?? [];
}
async fetchFields(query: SQLQuery): Promise<SQLSelectableValue[]> {
@ -42,8 +42,8 @@ export class MssqlDatasource extends SqlDatasource {
});
const result: SQLSelectableValue[] = [];
for (let i = 0; i < schema.length; i++) {
const column = schema.fields.column.values.get(i);
const type = schema.fields.type.values.get(i);
const column = schema.fields.column.values[i];
const type = schema.fields.type.values[i];
result.push({ label: column, value: column, type, icon: getIcon(type), raqbFieldType: getRAQBType(type) });
}
return result;

View File

@ -24,7 +24,7 @@ export class PostgresDatasource extends SqlDatasource {
async getVersion(): Promise<string> {
const value = await this.runSql<{ version: number }>(getVersion());
const results = value.fields.version?.values.toArray();
const results = value.fields.version?.values;
if (!results) {
return '';
@ -35,7 +35,7 @@ export class PostgresDatasource extends SqlDatasource {
async getTimescaleDBVersion(): Promise<string | undefined> {
const value = await this.runSql<{ extversion: string }>(getTimescaleDBVersion());
const results = value.fields.extversion?.values.toArray();
const results = value.fields.extversion?.values;
if (!results) {
return undefined;
@ -46,7 +46,7 @@ export class PostgresDatasource extends SqlDatasource {
async fetchTables(): Promise<string[]> {
const tables = await this.runSql<{ table: string[] }>(showTables(), { refId: 'tables' });
return tables.fields.table?.values.toArray().flat() ?? [];
return tables.fields.table?.values.flat() ?? [];
}
getSqlLanguageDefinition(db: DB): LanguageDefinition {
@ -70,8 +70,8 @@ export class PostgresDatasource extends SqlDatasource {
const schema = await this.runSql<{ column: string; type: string }>(getSchema(query.table), { refId: 'columns' });
const result: SQLSelectableValue[] = [];
for (let i = 0; i < schema.length; i++) {
const column = schema.fields.column.values.get(i);
const type = schema.fields.type.values.get(i);
const column = schema.fields.column.values[i];
const type = schema.fields.type.values[i];
result.push({ label: column, value: column, type, ...getFieldConfig(type) });
}
return result;

View File

@ -396,9 +396,9 @@ describe('PrometheusDatasource', () => {
ds.performTimeSeriesQuery = jest.fn().mockReturnValue(of(responseMock));
await expect(ds.query(query)).toEmitValuesWith((result) => {
const results = result[0].data;
expect(results[0].fields[1].values.toArray()).toEqual([10, 10]);
expect(results[0].fields[2].values.toArray()).toEqual([10, 0]);
expect(results[0].fields[3].values.toArray()).toEqual([5, 0]);
expect(results[0].fields[1].values).toEqual([10, 10]);
expect(results[0].fields[2].values).toEqual([10, 0]);
expect(results[0].fields[3].values).toEqual([5, 0]);
});
});
@ -1018,27 +1018,27 @@ describe('PrometheusDatasource2', () => {
});
it('should fill null until first datapoint in response', () => {
expect(results.data[0].fields[0].values.get(0)).toBe(start * 1000);
expect(results.data[0].fields[1].values.get(0)).toBe(null);
expect(results.data[0].fields[0].values.get(1)).toBe((start + step * 1) * 1000);
expect(results.data[0].fields[1].values.get(1)).toBe(3846);
expect(results.data[0].fields[0].values[0]).toBe(start * 1000);
expect(results.data[0].fields[1].values[0]).toBe(null);
expect(results.data[0].fields[0].values[1]).toBe((start + step * 1) * 1000);
expect(results.data[0].fields[1].values[1]).toBe(3846);
});
it('should fill null after last datapoint in response', () => {
const length = (end - start) / step + 1;
expect(results.data[0].fields[0].values.get(length - 2)).toBe((end - step * 1) * 1000);
expect(results.data[0].fields[1].values.get(length - 2)).toBe(3848);
expect(results.data[0].fields[0].values.get(length - 1)).toBe(end * 1000);
expect(results.data[0].fields[1].values.get(length - 1)).toBe(null);
expect(results.data[0].fields[0].values[length - 2]).toBe((end - step * 1) * 1000);
expect(results.data[0].fields[1].values[length - 2]).toBe(3848);
expect(results.data[0].fields[0].values[length - 1]).toBe(end * 1000);
expect(results.data[0].fields[1].values[length - 1]).toBe(null);
});
it('should fill null at gap between series', () => {
expect(results.data[0].fields[0].values.get(2)).toBe((start + step * 2) * 1000);
expect(results.data[0].fields[1].values.get(2)).toBe(null);
expect(results.data[1].fields[0].values.get(1)).toBe((start + step * 1) * 1000);
expect(results.data[1].fields[1].values.get(1)).toBe(null);
expect(results.data[1].fields[0].values.get(3)).toBe((start + step * 3) * 1000);
expect(results.data[1].fields[1].values.get(3)).toBe(null);
expect(results.data[0].fields[0].values[2]).toBe((start + step * 2) * 1000);
expect(results.data[0].fields[1].values[2]).toBe(null);
expect(results.data[1].fields[0].values[1]).toBe((start + step * 1) * 1000);
expect(results.data[1].fields[1].values[1]).toBe(null);
expect(results.data[1].fields[0].values[3]).toBe((start + step * 3) * 1000);
expect(results.data[1].fields[1].values[3]).toBe(null);
});
});

View File

@ -866,10 +866,10 @@ export class PrometheusDatasource
const timeValueTuple: Array<[number, number]> = [];
let idx = 0;
valueField.values.toArray().forEach((value: string) => {
valueField.values.forEach((value: string) => {
let timeStampValue: number;
let valueValue: number;
const time = timeField.values.get(idx);
const time = timeField.values[idx];
// If we want to use value as a time, we use value as timeStampValue and valueValue will be 1
if (options.annotation.useValueForTime) {

View File

@ -268,7 +268,7 @@ describe('QueryCache', function () {
// All of the new values should be the ones that were stored, this is overkill
secondFrames.forEach((frame, frameIdx) => {
frame.fields.forEach((field, fieldIdx) => {
secondFrames[frameIdx].fields[fieldIdx].values.toArray().forEach((value) => {
secondFrames[frameIdx].fields[fieldIdx].values.forEach((value) => {
expect(secondStoredFrames[frameIdx].fields[fieldIdx].values).toContain(value);
});
});
@ -394,12 +394,8 @@ describe('QueryCache', function () {
// Since the step is 15s, and the request was 30 seconds later, we should have 2 extra frames, but we should evict the first two, so we should get the same length
expect(firstMergedLength).toEqual(secondMergedLength);
expect(firstQueryResult[0].fields[0].values.toArray()[2]).toEqual(
secondQueryResult[0].fields[0].values.toArray()[0]
);
expect(firstQueryResult[0].fields[0].values.toArray()[0] + 30000).toEqual(
secondQueryResult[0].fields[0].values.toArray()[0]
);
expect(firstQueryResult[0].fields[0].values[2]).toEqual(secondQueryResult[0].fields[0].values[0]);
expect(firstQueryResult[0].fields[0].values[0] + 30000).toEqual(secondQueryResult[0].fields[0].values[0]);
cache.set(targetIdentity, `'1=1'|${interval}|${JSON.stringify(thirdRange.raw)}`);
@ -418,7 +414,7 @@ describe('QueryCache', function () {
);
const cachedAfterThird = storage.cache.get(targetIdentity);
const storageLengthAfterThirdQuery = cachedAfterThird?.frames[0].fields[0].values.toArray().length;
const storageLengthAfterThirdQuery = cachedAfterThird?.frames[0].fields[0].values.length;
expect(storageLengthAfterThirdQuery).toEqual(20);
});

View File

@ -1,5 +1,4 @@
import {
ArrayVector,
DataFrame,
DataQueryRequest,
dateTime,
@ -360,16 +359,15 @@ export class QueryCache {
// amend & re-cache
// eslint-disable-next-line @typescript-eslint/consistent-type-assertions
let prevTable: Table = cachedFrame.fields.map((field) => field.values.toArray()) as Table;
let prevTable: Table = cachedFrame.fields.map((field) => field.values) as Table;
// eslint-disable-next-line @typescript-eslint/consistent-type-assertions
let nextTable: Table = respFrame.fields.map((field) => field.values.toArray()) as Table;
let nextTable: Table = respFrame.fields.map((field) => field.values) as Table;
let amendedTable = amendTable(prevTable, nextTable);
if (amendedTable) {
for (let i = 0; i < amendedTable.length; i++) {
cachedFrame.fields[i].values = new ArrayVector(amendedTable[i]);
cachedFrame.fields[i].values = amendedTable[i];
}
cachedFrame.length = cachedFrame.fields[0].values.length;
}
}
@ -380,13 +378,13 @@ export class QueryCache {
cachedFrames.forEach((frame) => {
// eslint-disable-next-line @typescript-eslint/consistent-type-assertions
let table: Table = frame.fields.map((field) => field.values.toArray()) as Table;
let table: Table = frame.fields.map((field) => field.values) as Table;
let trimmed = trimTable(table, newFrom, newTo);
if (trimmed[0].length > 0) {
for (let i = 0; i < trimmed.length; i++) {
frame.fields[i].values = new ArrayVector(trimmed[i]);
frame.fields[i].values = trimmed[i];
}
nonEmptyCachedFrames.push(frame);
}
@ -409,7 +407,7 @@ export class QueryCache {
config: {
...field.config, // prevents mutatative exemplars links (re)enrichment
},
values: new ArrayVector(field.values.toArray().slice()),
values: field.values.slice(),
})),
}));
}

View File

@ -1,20 +1,18 @@
import { clone } from 'lodash';
import { ArrayVector } from '@grafana/data/src';
/**
*
* @param length - Number of values to add
* @param start - First timestamp (ms)
* @param step - step duration (ms)
*/
export const getMockTimeFrameArray = (length: number, start: number, step: number): ArrayVector => {
let timeValues = [];
export const getMockTimeFrameArray = (length: number, start: number, step: number) => {
let timeValues: number[] = [];
for (let i = 0; i < length; i++) {
timeValues.push(start + i * step);
}
return new ArrayVector(timeValues);
return timeValues;
};
/**
@ -22,8 +20,8 @@ export const getMockTimeFrameArray = (length: number, start: number, step: numbe
* @param values
* @param high
*/
export const getMockValueFrameArray = (length: number, values = 0): ArrayVector => {
return new ArrayVector(Array(length).fill(values));
export const getMockValueFrameArray = (length: number, values = 0): number[] => {
return Array(length).fill(values);
};
const timeFrameWithMissingValuesInMiddle = getMockTimeFrameArray(721, 1675262550000, 30000);
@ -31,9 +29,9 @@ const timeFrameWithMissingValuesAtStart = getMockTimeFrameArray(721, 16752625500
const timeFrameWithMissingValuesAtEnd = getMockTimeFrameArray(721, 1675262550000, 30000);
// Deleting some out the middle
timeFrameWithMissingValuesInMiddle.toArray().splice(360, 721 - 684);
timeFrameWithMissingValuesAtStart.toArray().splice(0, 721 - 684);
timeFrameWithMissingValuesAtEnd.toArray().splice(721 - 684, 721 - 684);
timeFrameWithMissingValuesInMiddle.splice(360, 721 - 684);
timeFrameWithMissingValuesAtStart.splice(0, 721 - 684);
timeFrameWithMissingValuesAtEnd.splice(721 - 684, 721 - 684);
const mockLabels = {
__name__: 'cortex_request_duration_seconds_bucket',

View File

@ -366,9 +366,9 @@ describe('Prometheus Result Transformer', () => {
const series = transformV2(response, options, {});
expect(series.data[0].fields.length).toEqual(4);
expect(series.data[0].fields[1].values.toArray()).toEqual([10, 10, 0]);
expect(series.data[0].fields[2].values.toArray()).toEqual([10, 0, 30]);
expect(series.data[0].fields[3].values.toArray()).toEqual([10, 0, 10]);
expect(series.data[0].fields[1].values).toEqual([10, 10, 0]);
expect(series.data[0].fields[2].values).toEqual([10, 0, 30]);
expect(series.data[0].fields[3].values).toEqual([10, 0, 10]);
expect(series.data[0].fields[1].name).toEqual('1');
expect(series.data[0].fields[2].name).toEqual('2');
expect(series.data[0].fields[3].name).toEqual('+Inf');
@ -467,9 +467,9 @@ describe('Prometheus Result Transformer', () => {
const series = transformV2(response, options, {});
expect(series.data[0].fields.length).toEqual(4);
expect(series.data[0].fields[1].values.toArray()).toEqual([10, 10, 0]);
expect(series.data[0].fields[2].values.toArray()).toEqual([10, 0, 30]);
expect(series.data[0].fields[3].values.toArray()).toEqual([10, 0, 10]);
expect(series.data[0].fields[1].values).toEqual([10, 10, 0]);
expect(series.data[0].fields[2].values).toEqual([10, 0, 30]);
expect(series.data[0].fields[3].values).toEqual([10, 0, 10]);
});
it('results with heatmap format and multiple histograms should be grouped and de-accumulated by non-le labels', () => {
@ -600,17 +600,17 @@ describe('Prometheus Result Transformer', () => {
const series = transformV2(response, options, {});
expect(series.data[0].fields.length).toEqual(4);
expect(series.data[0].fields[1].values.toArray()).toEqual([10, 10, 0]);
expect(series.data[0].fields[2].values.toArray()).toEqual([10, 0, 30]);
expect(series.data[0].fields[3].values.toArray()).toEqual([10, 0, 10]);
expect(series.data[0].fields[1].values).toEqual([10, 10, 0]);
expect(series.data[0].fields[2].values).toEqual([10, 0, 30]);
expect(series.data[0].fields[3].values).toEqual([10, 0, 10]);
expect(series.data[1].fields[1].values.toArray()).toEqual([0, 10, 10]);
expect(series.data[1].fields[2].values.toArray()).toEqual([20, 0, 30]);
expect(series.data[1].fields[3].values.toArray()).toEqual([10, 0, 20]);
expect(series.data[1].fields[1].values).toEqual([0, 10, 10]);
expect(series.data[1].fields[2].values).toEqual([20, 0, 30]);
expect(series.data[1].fields[3].values).toEqual([10, 0, 20]);
expect(series.data[2].fields[1].values.toArray()).toEqual([30, 30, 60]);
expect(series.data[2].fields[2].values.toArray()).toEqual([0, 10, 0]);
expect(series.data[2].fields[3].values.toArray()).toEqual([10, 0, 0]);
expect(series.data[2].fields[1].values).toEqual([30, 30, 60]);
expect(series.data[2].fields[2].values).toEqual([0, 10, 0]);
expect(series.data[2].fields[3].values).toEqual([10, 0, 0]);
});
it('Retains exemplar frames when data returned is a heatmap', () => {
@ -667,14 +667,7 @@ describe('Prometheus Result Transformer', () => {
const series = transformV2(response, options, {});
expect(series.data[0].fields.length).toEqual(2);
expect(series.data.length).toEqual(2);
expect(series.data[1].fields[2].values.toArray()).toEqual([
'hello',
'doctor',
'name',
'continue',
'yesterday',
'tomorrow',
]);
expect(series.data[1].fields[2].values).toEqual(['hello', 'doctor', 'name', 'continue', 'yesterday', 'tomorrow']);
expect(series.data[1].fields.length).toEqual(3);
});
@ -765,9 +758,9 @@ describe('Prometheus Result Transformer', () => {
expect(tableDf.fields.length).toBe(4);
expect(tableDf.fields[0].name).toBe('Time');
expect(tableDf.fields[1].name).toBe('label1');
expect(tableDf.fields[1].values.get(0)).toBe('value1');
expect(tableDf.fields[1].values[0]).toBe('value1');
expect(tableDf.fields[2].name).toBe('label2');
expect(tableDf.fields[2].values.get(0)).toBe('value2');
expect(tableDf.fields[2].values[0]).toBe('value2');
expect(tableDf.fields[3].name).toBe('Value');
});
@ -789,9 +782,9 @@ describe('Prometheus Result Transformer', () => {
expect(tableDf.fields.length).toBe(4);
expect(tableDf.fields[0].name).toBe('Time');
expect(tableDf.fields[1].name).toBe('label1');
expect(tableDf.fields[1].values.get(0)).toBe('value1');
expect(tableDf.fields[1].values[0]).toBe('value1');
expect(tableDf.fields[2].name).toBe('label2');
expect(tableDf.fields[2].values.get(0)).toBe('value2');
expect(tableDf.fields[2].values[0]).toBe('value2');
expect(tableDf.fields[3].name).toBe('Value');
});
@ -824,16 +817,16 @@ describe('Prometheus Result Transformer', () => {
expect(transformedTableDataFrames[0].fields.length).toBe(4);
expect(transformedTableDataFrames[0].fields[0].name).toBe('Time');
expect(transformedTableDataFrames[0].fields[1].name).toBe('label1');
expect(transformedTableDataFrames[0].fields[1].values.get(0)).toBe(value1);
expect(transformedTableDataFrames[0].fields[1].values[0]).toBe(value1);
expect(transformedTableDataFrames[0].fields[2].name).toBe('label2');
expect(transformedTableDataFrames[0].fields[2].values.get(0)).toBe(value2);
expect(transformedTableDataFrames[0].fields[2].values[0]).toBe(value2);
expect(transformedTableDataFrames[0].fields[3].name).toBe('Value #A');
// Expect the invalid/empty results not to throw an error and to return empty arrays
expect(transformedTableDataFrames[1].fields[1].labels).toBe(undefined);
expect(transformedTableDataFrames[1].fields[1].name).toBe('Value #B');
expect(transformedTableDataFrames[1].fields[1].values.toArray()).toEqual([]);
expect(transformedTableDataFrames[1].fields[0].values.toArray()).toEqual([]);
expect(transformedTableDataFrames[1].fields[1].values).toEqual([]);
expect(transformedTableDataFrames[1].fields[0].values).toEqual([]);
});
});
@ -901,22 +894,20 @@ describe('Prometheus Result Transformer', () => {
format: 'table',
},
});
expect(result[0].fields[0].values.toArray()).toEqual([
1443454528000, 1443454530000, 1443454529000, 1443454531000,
]);
expect(result[0].fields[0].values).toEqual([1443454528000, 1443454530000, 1443454529000, 1443454531000]);
expect(result[0].fields[0].name).toBe('Time');
expect(result[0].fields[0].type).toBe(FieldType.time);
expect(result[0].fields[1].values.toArray()).toEqual(['test', 'test', 'test2', 'test2']);
expect(result[0].fields[1].values).toEqual(['test', 'test', 'test2', 'test2']);
expect(result[0].fields[1].name).toBe('__name__');
expect(result[0].fields[1].config.filterable).toBe(true);
expect(result[0].fields[1].type).toBe(FieldType.string);
expect(result[0].fields[2].values.toArray()).toEqual(['', '', 'localhost:8080', 'localhost:8080']);
expect(result[0].fields[2].values).toEqual(['', '', 'localhost:8080', 'localhost:8080']);
expect(result[0].fields[2].name).toBe('instance');
expect(result[0].fields[2].type).toBe(FieldType.string);
expect(result[0].fields[3].values.toArray()).toEqual(['testjob', 'testjob', 'otherjob', 'otherjob']);
expect(result[0].fields[3].values).toEqual(['testjob', 'testjob', 'otherjob', 'otherjob']);
expect(result[0].fields[3].name).toBe('job');
expect(result[0].fields[3].type).toBe(FieldType.string);
expect(result[0].fields[4].values.toArray()).toEqual([3846, 3848, 3847, 3849]);
expect(result[0].fields[4].values).toEqual([3846, 3848, 3847, 3849]);
expect(result[0].fields[4].name).toEqual('Value');
expect(result[0].fields[4].type).toBe(FieldType.number);
expect(result[0].refId).toBe('A');
@ -952,13 +943,13 @@ describe('Prometheus Result Transformer', () => {
it('should return data frame', () => {
const result = transform({ data: response } as any, { ...options, target: { format: 'table' } });
expect(result[0].fields[0].values.toArray()).toEqual([1443454528000]);
expect(result[0].fields[0].values).toEqual([1443454528000]);
expect(result[0].fields[0].name).toBe('Time');
expect(result[0].fields[1].values.toArray()).toEqual(['test']);
expect(result[0].fields[1].values).toEqual(['test']);
expect(result[0].fields[1].name).toBe('__name__');
expect(result[0].fields[2].values.toArray()).toEqual(['testjob']);
expect(result[0].fields[2].values).toEqual(['testjob']);
expect(result[0].fields[2].name).toBe('job');
expect(result[0].fields[3].values.toArray()).toEqual([3846]);
expect(result[0].fields[3].values).toEqual([3846]);
expect(result[0].fields[3].name).toEqual('Value');
});
@ -976,7 +967,7 @@ describe('Prometheus Result Transformer', () => {
},
};
const result = transform({ data: response } as any, { ...options, target: { format: 'table' } });
expect(result[0].fields[1].values.toArray()).toEqual([102]);
expect(result[0].fields[1].values).toEqual([102]);
expect(result[0].fields[1].type).toEqual(FieldType.number);
});
});
@ -1046,10 +1037,10 @@ describe('Prometheus Result Transformer', () => {
]);
const result = transform({ data: response } as any, { query: options, target: options } as any);
expect(result[0].fields[0].values.toArray()).toEqual([1445000010000, 1445000020000, 1445000030000]);
expect(result[0].fields[1].values.toArray()).toEqual([10, 10, 0]);
expect(result[0].fields[2].values.toArray()).toEqual([10, 0, 30]);
expect(result[0].fields[3].values.toArray()).toEqual([10, 0, 10]);
expect(result[0].fields[0].values).toEqual([1445000010000, 1445000020000, 1445000030000]);
expect(result[0].fields[1].values).toEqual([10, 10, 0]);
expect(result[0].fields[2].values).toEqual([10, 0, 30]);
expect(result[0].fields[3].values).toEqual([10, 0, 10]);
});
it('should handle missing datapoints', () => {
@ -1078,9 +1069,9 @@ describe('Prometheus Result Transformer', () => {
},
]);
const result = transform({ data: response } as any, { query: options, target: options } as any);
expect(result[0].fields[1].values.toArray()).toEqual([1, 2]);
expect(result[0].fields[2].values.toArray()).toEqual([1, 3, 1]);
expect(result[0].fields[3].values.toArray()).toEqual([1, 2]);
expect(result[0].fields[1].values).toEqual([1, 2]);
expect(result[0].fields[2].values).toEqual([1, 3, 1]);
expect(result[0].fields[3].values).toEqual([1, 2]);
});
});
@ -1137,8 +1128,8 @@ describe('Prometheus Result Transformer', () => {
end: 2,
},
});
expect(result[0].fields[0].values.toArray()).toEqual([0, 1000, 2000]);
expect(result[0].fields[1].values.toArray()).toEqual([10, 10, 0]);
expect(result[0].fields[0].values).toEqual([0, 1000, 2000]);
expect(result[0].fields[1].values).toEqual([10, 10, 0]);
expect(result[0].name).toBe('test{job="testjob"}');
});
@ -1148,8 +1139,8 @@ describe('Prometheus Result Transformer', () => {
query: { step: 1, start: 0, end: 2 },
});
expect(result[0].fields[0].values.toArray()).toEqual([0, 1000, 2000]);
expect(result[0].fields[1].values.toArray()).toEqual([null, 10, 0]);
expect(result[0].fields[0].values).toEqual([0, 1000, 2000]);
expect(result[0].fields[1].values).toEqual([null, 10, 0]);
});
it('should use __name__ label as series name', () => {
@ -1242,8 +1233,8 @@ describe('Prometheus Result Transformer', () => {
};
const result = transform({ data: response } as any, { ...options, query: { step: 2, start: 0, end: 8 } });
expect(result[0].fields[0].values.toArray()).toEqual([0, 2000, 4000, 6000, 8000]);
expect(result[0].fields[1].values.toArray()).toEqual([null, null, 10, null, 10]);
expect(result[0].fields[0].values).toEqual([0, 2000, 4000, 6000, 8000]);
expect(result[0].fields[1].values).toEqual([null, null, 10, null, 10]);
});
});
@ -1262,7 +1253,7 @@ describe('Prometheus Result Transformer', () => {
...options,
target: { format: 'table' },
});
expect(result[0].fields[1].values.toArray()).toEqual([Number.POSITIVE_INFINITY]);
expect(result[0].fields[1].values).toEqual([Number.POSITIVE_INFINITY]);
});
});
@ -1290,7 +1281,7 @@ describe('Prometheus Result Transformer', () => {
...options,
target: { format: 'table' },
});
expect(result[0].fields[3].values.toArray()).toEqual([Number.POSITIVE_INFINITY, Number.NEGATIVE_INFINITY]);
expect(result[0].fields[3].values).toEqual([Number.POSITIVE_INFINITY, Number.NEGATIVE_INFINITY]);
});
});
});

View File

@ -3,7 +3,6 @@ import { flatten, forOwn, groupBy, partition } from 'lodash';
import {
ArrayDataFrame,
ArrayVector,
CoreApp,
DataFrame,
DataFrameType,
@ -226,7 +225,7 @@ export function transformDFToTable(dfs: DataFrame[]): DataFrame[] {
name: label,
config: { filterable: true },
type: numberField ? FieldType.number : FieldType.string,
values: new ArrayVector(),
values: [],
});
}
});
@ -234,10 +233,10 @@ export function transformDFToTable(dfs: DataFrame[]): DataFrame[] {
// Fill valueField, timeField and labelFields with values
dataFramesByRefId[refId].forEach((df) => {
const timeFields = df.fields[0]?.values ?? new ArrayVector();
const dataFields = df.fields[1]?.values ?? new ArrayVector();
timeFields.toArray().forEach((value) => timeField.values.add(value));
dataFields.toArray().forEach((value) => {
const timeFields = df.fields[0]?.values ?? [];
const dataFields = df.fields[1]?.values ?? [];
timeFields.forEach((value) => timeField.values.add(value));
dataFields.forEach((value) => {
valueField.values.add(parseSampleValue(value));
const labelsForField = df.fields[1].labels ?? {};
labelFields.forEach((field) => field.values.add(getLabelValue(labelsForField, field.name)));
@ -515,12 +514,13 @@ function transformMetricDataToTable(md: MatrixOrVectorResult[], options: Transfo
// Labels have string field type, otherwise table tries to figure out the type which can result in unexpected results
// Only "le" label has a number field type
const numberField = label === HISTOGRAM_QUANTILE_LABEL_NAME;
return {
const field: Field = {
name: label,
config: { filterable: true },
type: numberField ? FieldType.number : FieldType.string,
values: new ArrayVector(),
values: [],
};
return field;
});
const valueField = getValueField({ data: [], valueName: valueText });
@ -528,12 +528,12 @@ function transformMetricDataToTable(md: MatrixOrVectorResult[], options: Transfo
if (isMatrixData(d)) {
d.values.forEach((val) => {
timeField.values.add(val[0] * 1000);
metricFields.forEach((metricField) => metricField.values.add(getLabelValue(d.metric, metricField.name)));
metricFields.forEach((metricField) => metricField.values.push(getLabelValue(d.metric, metricField.name)));
valueField.values.add(parseSampleValue(val[1]));
});
} else {
timeField.values.add(d.value[0] * 1000);
metricFields.forEach((metricField) => metricField.values.add(getLabelValue(d.metric, metricField.name)));
metricFields.forEach((metricField) => metricField.values.push(getLabelValue(d.metric, metricField.name)));
valueField.values.add(parseSampleValue(d.value[1]));
}
});
@ -561,7 +561,7 @@ function getTimeField(data: PromValue[], isMs = false): MutableField {
name: TIME_SERIES_TIME_FIELD_NAME,
type: FieldType.time,
config: {},
values: new ArrayVector<number>(data.map((val) => (isMs ? val[0] : val[0] * 1000))),
values: data.map((val) => (isMs ? val[0] : val[0] * 1000)),
};
}
@ -588,7 +588,7 @@ function getValueField({
displayNameFromDS,
},
labels,
values: new ArrayVector<number | null>(data.map((val) => (parseValue ? parseSampleValue(val[1]) : val[1]))),
values: data.map((val) => (parseValue ? parseSampleValue(val[1]) : val[1])),
};
}
@ -660,8 +660,8 @@ function transformToHistogramOverTime(seriesList: DataFrame[]) {
}
for (let j = 0; j < topSeries.values.length; j++) {
const bottomPoint = bottomSeries.values.get(j) || [0];
topSeries.values.toArray()[j] -= bottomPoint;
const bottomPoint = bottomSeries.values[j] || [0];
topSeries.values[j] -= bottomPoint;
}
}

View File

@ -2,7 +2,6 @@ import { lastValueFrom, Observable, of } from 'rxjs';
import { createFetchResponse } from 'test/helpers/createFetchResponse';
import {
ArrayVector,
DataFrame,
dataFrameToJSON,
DataSourceInstanceSettings,
@ -144,7 +143,7 @@ describe('Tempo data source', () => {
expect(
(response.data[0] as DataFrame).fields.map((f) => ({
name: f.name,
values: f.values.toArray(),
values: f.values,
}))
).toMatchObject([
{ name: 'traceID', values: ['04450900759028499335'] },
@ -162,7 +161,7 @@ describe('Tempo data source', () => {
expect(
(response.data[1] as DataFrame).fields.map((f) => ({
name: f.name,
values: f.values.toArray(),
values: f.values,
}))
).toMatchObject([
{ name: 'id', values: ['4322526419282105830'] },
@ -176,7 +175,7 @@ describe('Tempo data source', () => {
expect(
(response.data[2] as DataFrame).fields.map((f) => ({
name: f.name,
values: f.values.toArray(),
values: f.values,
}))
).toMatchObject([
{ name: 'id', values: [] },
@ -196,7 +195,7 @@ describe('Tempo data source', () => {
const field = response.data[0].fields[0];
expect(field.name).toBe('traceID');
expect(field.type).toBe(FieldType.string);
expect(field.values.get(0)).toBe('60ba2abb44f13eae');
expect(field.values[0]).toBe('60ba2abb44f13eae');
expect(field.values.length).toBe(6);
});
@ -457,14 +456,14 @@ describe('Tempo service graph view', () => {
// Service Graph view
expect(response.data[0].fields[0].name).toBe('Name');
expect(response.data[0].fields[0].values.toArray().length).toBe(2);
expect(response.data[0].fields[0].values.toArray()[0]).toBe('HTTP Client');
expect(response.data[0].fields[0].values.toArray()[1]).toBe('HTTP GET - root');
expect(response.data[0].fields[0].values.length).toBe(2);
expect(response.data[0].fields[0].values[0]).toBe('HTTP Client');
expect(response.data[0].fields[0].values[1]).toBe('HTTP GET - root');
expect(response.data[0].fields[1].name).toBe('Rate');
expect(response.data[0].fields[1].values.toArray().length).toBe(2);
expect(response.data[0].fields[1].values.toArray()[0]).toBe(12.75164671814457);
expect(response.data[0].fields[1].values.toArray()[1]).toBe(12.121331111401608);
expect(response.data[0].fields[1].values.length).toBe(2);
expect(response.data[0].fields[1].values[0]).toBe(12.75164671814457);
expect(response.data[0].fields[1].values[1]).toBe(12.121331111401608);
expect(response.data[0].fields[1].config.decimals).toBe(2);
expect(response.data[0].fields[1].config.links[0].title).toBe('Rate');
expect(response.data[0].fields[1].config.links[0].internal.query.expr).toBe(
@ -474,9 +473,9 @@ describe('Tempo service graph view', () => {
expect(response.data[0].fields[1].config.links[0].internal.query.exemplar).toBe(true);
expect(response.data[0].fields[1].config.links[0].internal.query.instant).toBe(false);
expect(response.data[0].fields[2].values.toArray().length).toBe(2);
expect(response.data[0].fields[2].values.toArray()[0]).toBe(12.75164671814457);
expect(response.data[0].fields[2].values.toArray()[1]).toBe(12.121331111401608);
expect(response.data[0].fields[2].values.length).toBe(2);
expect(response.data[0].fields[2].values[0]).toBe(12.75164671814457);
expect(response.data[0].fields[2].values[1]).toBe(12.121331111401608);
expect(response.data[0].fields[2].config.color.mode).toBe('continuous-BlPu');
expect(response.data[0].fields[2].config.custom.cellOptions.mode).toBe(BarGaugeDisplayMode.Lcd);
expect(response.data[0].fields[2].config.custom.cellOptions.type).toBe(TableCellDisplayMode.Gauge);
@ -678,7 +677,7 @@ describe('Tempo service graph view', () => {
filterable: true,
},
type: 'string',
values: new ArrayVector(['HTTP Client', 'HTTP GET', 'HTTP GET - root', 'HTTP POST', 'HTTP POST - post']),
values: ['HTTP Client', 'HTTP GET', 'HTTP GET - root', 'HTTP POST', 'HTTP POST - post'],
},
],
},

View File

@ -597,7 +597,7 @@ function errorAndDurationQuery(
let serviceGraphViewMetrics = [];
let errorRateBySpanName = '';
let durationsBySpanName: string[] = [];
const spanNames = rateResponse.data[0][0]?.fields[1]?.values.toArray() ?? [];
const spanNames = rateResponse.data[0][0]?.fields[1]?.values ?? [];
if (spanNames.length > 0) {
errorRateBySpanName = buildExpr(errorRateMetric, 'span_name=~"' + spanNames.join('|') + '"', request);
@ -797,8 +797,8 @@ function getServiceGraphView(
}
if (errorRate.length > 0 && errorRate[0].fields?.length > 2) {
const errorRateNames = errorRate[0].fields[1]?.values.toArray() ?? [];
const errorRateValues = errorRate[0].fields[2]?.values.toArray() ?? [];
const errorRateNames = errorRate[0].fields[1]?.values ?? [];
const errorRateValues = errorRate[0].fields[2]?.values ?? [];
let errorRateObj: any = {};
errorRateNames.map((name: string, index: number) => {
errorRateObj[name] = { value: errorRateValues[index] };
@ -848,7 +848,7 @@ function getServiceGraphView(
duration.map((d) => {
const delimiter = d.refId?.includes('span_name=~"') ? 'span_name=~"' : 'span_name="';
const name = d.refId?.split(delimiter)[1].split('"}')[0];
durationObj[name] = { value: d.fields[1].values.toArray()[0] };
durationObj[name] = { value: d.fields[1].values[0] };
});
df.fields.push({
@ -918,7 +918,7 @@ export function getRateAlignedValues(
rateResp: DataQueryResponseData[],
objToAlign: { [x: string]: { value: string } }
) {
const rateNames = rateResp[0]?.fields[1]?.values.toArray() ?? [];
const rateNames = rateResp[0]?.fields[1]?.values ?? [];
let values: string[] = [];
for (let i = 0; i < rateNames.length; i++) {

View File

@ -85,7 +85,7 @@ export function createTableFrame(
const match = (line as string).match(traceRegex);
if (match) {
const traceId = match[1];
const time = timeField ? timeField.values.get(i) : null;
const time = timeField ? timeField.values[i] : null;
tableFrame.fields[0].values.add(time);
tableFrame.fields[1].values.add(traceId);
tableFrame.fields[2].values.add(line);

View File

@ -1,11 +1,4 @@
import {
ArrayVector,
FieldColorModeId,
FieldDTO,
FieldType,
MutableDataFrame,
NodeGraphDataFrameFieldNames,
} from '@grafana/data';
import { FieldColorModeId, FieldDTO, FieldType, MutableDataFrame, NodeGraphDataFrameFieldNames } from '@grafana/data';
import { nodes, edges } from './testData/serviceMapResponse';
@ -51,9 +44,9 @@ export function generateRandomNodes(count = 10) {
nodes[sourceIndex].edges.push(nodes[targetIndex].id);
}
const nodeFields: Record<string, Omit<FieldDTO, 'name'> & { values: ArrayVector }> = {
const nodeFields: Record<string, Omit<FieldDTO, 'name'> & { values: any[] }> = {
[NodeGraphDataFrameFieldNames.id]: {
values: new ArrayVector(),
values: [],
type: FieldType.string,
config: {
links: [
@ -70,35 +63,35 @@ export function generateRandomNodes(count = 10) {
},
},
[NodeGraphDataFrameFieldNames.title]: {
values: new ArrayVector(),
values: [],
type: FieldType.string,
},
[NodeGraphDataFrameFieldNames.subTitle]: {
values: new ArrayVector(),
values: [],
type: FieldType.string,
},
[NodeGraphDataFrameFieldNames.mainStat]: {
values: new ArrayVector(),
values: [],
type: FieldType.number,
config: { displayName: 'Transactions per second' },
},
[NodeGraphDataFrameFieldNames.secondaryStat]: {
values: new ArrayVector(),
values: [],
type: FieldType.number,
config: { displayName: 'Average duration' },
},
[NodeGraphDataFrameFieldNames.arc + 'success']: {
values: new ArrayVector(),
values: [],
type: FieldType.number,
config: { color: { fixedColor: 'green', mode: FieldColorModeId.Fixed }, displayName: 'Success' },
},
[NodeGraphDataFrameFieldNames.arc + 'errors']: {
values: new ArrayVector(),
values: [],
type: FieldType.number,
config: { color: { fixedColor: 'red', mode: FieldColorModeId.Fixed }, displayName: 'Errors' },
},
[NodeGraphDataFrameFieldNames.icon]: {
values: new ArrayVector(),
values: [],
type: FieldType.string,
},
};
@ -115,10 +108,10 @@ export function generateRandomNodes(count = 10) {
const edgesFrame = new MutableDataFrame({
name: 'edges',
fields: [
{ name: NodeGraphDataFrameFieldNames.id, values: new ArrayVector(), type: FieldType.string },
{ name: NodeGraphDataFrameFieldNames.source, values: new ArrayVector(), type: FieldType.string },
{ name: NodeGraphDataFrameFieldNames.target, values: new ArrayVector(), type: FieldType.string },
{ name: NodeGraphDataFrameFieldNames.mainStat, values: new ArrayVector(), type: FieldType.number },
{ name: NodeGraphDataFrameFieldNames.id, values: [], type: FieldType.string },
{ name: NodeGraphDataFrameFieldNames.source, values: [], type: FieldType.string },
{ name: NodeGraphDataFrameFieldNames.target, values: [], type: FieldType.string },
{ name: NodeGraphDataFrameFieldNames.mainStat, values: [], type: FieldType.number },
],
meta: { preferredVisualisationType: 'nodeGraph' },
});

View File

@ -220,7 +220,7 @@ export const BarChartPanel = ({ data, options, fieldConfig, width, height, timeZ
};
const rawValue = (seriesIdx: number, valueIdx: number) => {
return frame0Ref.current!.fields[seriesIdx].values.get(valueIdx);
return frame0Ref.current!.fields[seriesIdx].values[valueIdx];
};
// Color by value
@ -233,7 +233,7 @@ export const BarChartPanel = ({ data, options, fieldConfig, width, height, timeZ
const disp = colorByField.display!;
fillOpacity = (colorByField.config.custom.fillOpacity ?? 100) / 100;
// gradientMode? ignore?
getColor = (seriesIdx: number, valueIdx: number) => disp(colorByFieldRef.current?.values.get(valueIdx)).color!;
getColor = (seriesIdx: number, valueIdx: number) => disp(colorByFieldRef.current?.values[valueIdx]).color!;
} else {
const hasPerBarColor = frame0Ref.current!.fields.some((f) => {
const fromThresholds =
@ -261,7 +261,7 @@ export const BarChartPanel = ({ data, options, fieldConfig, width, height, timeZ
getColor = (seriesIdx: number, valueIdx: number) => {
let field = frame0Ref.current!.fields[seriesIdx];
return field.display!(field.values.get(valueIdx)).color!;
return field.display!(field.values[valueIdx]).color!;
};
}
}

View File

@ -109,7 +109,7 @@ describe('BarChart utils', () => {
valueSize: 10,
},
fullHighlight: false,
rawValue: (seriesIdx: number, valueIdx: number) => frame.fields[seriesIdx].values.get(valueIdx),
rawValue: (seriesIdx: number, valueIdx: number) => frame.fields[seriesIdx].values[valueIdx],
};
it.each([VizOrientation.Auto, VizOrientation.Horizontal, VizOrientation.Vertical])('orientation', (v) => {

View File

@ -97,7 +97,7 @@ function buildTableDataFrame(
url: '',
onClick: (e: DataLinkClickEvent) => {
const field: Field = e.origin.field;
const value = field.values.get(e.origin.rowIndex);
const value = field.values[e.origin.rowIndex];
onSymbolClick(value);
},
},

View File

@ -73,7 +73,7 @@ export const generateLabel = (feature: FeatureLike, idx: number): string | React
if (!first) {
first = k;
}
props[k] = f.values.get(rowIndex);
props[k] = f.values[rowIndex];
}
}
}

View File

@ -45,7 +45,7 @@ export const DataHoverView = ({ data, rowIndex, columnIndex, sortOrder, mode, he
const linkLookup = new Set<string>();
for (const f of orderedVisibleFields) {
const v = f.values.get(rowIndex);
const v = f.values[rowIndex];
const disp = f.display ? f.display(v) : { text: `${v}`, numeric: +v };
if (f.getLinks) {
f.getLinks({ calculatedValue: disp, valueRowIndex: rowIndex }).forEach((link) => {

View File

@ -43,7 +43,7 @@ export function MarkersLegend(props: MarkersLegendProps) {
}
const rowIndex = props.rowIndex as number; // eslint-disable-line
return colorField.values.get(rowIndex);
return colorField.values[rowIndex];
}, [hoverEvent, colorField]);
if (!styleConfig) {

Some files were not shown because too many files have changed in this diff Show More