mirror of
https://github.com/grafana/grafana.git
synced 2025-02-25 18:55:37 -06:00
Chore: more type fixes (#62952)
* fix some more any/type assertions * more any/type assertion fixes * implement review comments
This commit is contained in:
@@ -38,7 +38,7 @@ export function getDerivedFields(dataFrame: DataFrame, derivedFieldConfigs: Deri
|
||||
function fieldFromDerivedFieldConfig(derivedFieldConfigs: DerivedFieldConfig[]): Field<any, ArrayVector> {
|
||||
const dataSourceSrv = getDataSourceSrv();
|
||||
|
||||
const dataLinks = derivedFieldConfigs.reduce((acc, derivedFieldConfig) => {
|
||||
const dataLinks = derivedFieldConfigs.reduce<DataLink[]>((acc, derivedFieldConfig) => {
|
||||
// Having field.datasourceUid means it is an internal link.
|
||||
if (derivedFieldConfig.datasourceUid) {
|
||||
const dsSettings = dataSourceSrv.getInstanceSettings(derivedFieldConfig.datasourceUid);
|
||||
@@ -63,7 +63,7 @@ function fieldFromDerivedFieldConfig(derivedFieldConfigs: DerivedFieldConfig[]):
|
||||
});
|
||||
}
|
||||
return acc;
|
||||
}, [] as DataLink[]);
|
||||
}, []);
|
||||
|
||||
return {
|
||||
name: derivedFieldConfigs[0].name,
|
||||
|
||||
@@ -27,7 +27,7 @@ export function appendResponseToBufferedData(response: LokiTailResponse, data: M
|
||||
|
||||
// We are comparing used ids only within the received stream. This could be a problem if the same line + labels + nanosecond timestamp came in 2 separate batches.
|
||||
// As this is very unlikely, and the result would only affect live-tailing css animation we have decided to not compare all received uids from data param as this would slow down processing.
|
||||
const usedUids: { string?: number } = {};
|
||||
const usedUids: Record<string, number> = {};
|
||||
|
||||
for (const stream of streams) {
|
||||
// Find unique labels
|
||||
@@ -45,7 +45,13 @@ export function appendResponseToBufferedData(response: LokiTailResponse, data: M
|
||||
}
|
||||
}
|
||||
|
||||
function createUid(ts: string, labelsString: string, line: string, usedUids: any, refId?: string): string {
|
||||
function createUid(
|
||||
ts: string,
|
||||
labelsString: string,
|
||||
line: string,
|
||||
usedUids: Record<string, number>,
|
||||
refId?: string
|
||||
): string {
|
||||
// Generate id as hashed nanosecond timestamp, labels and line (this does not have to be unique)
|
||||
let id = uuidv5(`${ts}_${labelsString}_${line}`, UUID_NAMESPACE);
|
||||
|
||||
|
||||
@@ -45,7 +45,7 @@ export const LokiQueryBuilder = React.memo<Props>(({ datasource, query, onChange
|
||||
return [...datasource.getVariables(), ...options].map((value) => ({ label: value, value }));
|
||||
};
|
||||
|
||||
const onGetLabelNames = async (forLabel: Partial<QueryBuilderLabelFilter>): Promise<any> => {
|
||||
const onGetLabelNames = async (forLabel: Partial<QueryBuilderLabelFilter>): Promise<string[]> => {
|
||||
const labelsToConsider = query.labels.filter((x) => x !== forLabel);
|
||||
|
||||
if (labelsToConsider.length === 0) {
|
||||
|
||||
@@ -71,9 +71,11 @@ export function LokiQueryBuilderContainer(props: Props) {
|
||||
);
|
||||
}
|
||||
|
||||
const initialState: State = { expr: '' };
|
||||
|
||||
const stateSlice = createSlice({
|
||||
name: 'loki-builder-container',
|
||||
initialState: { expr: '' } as State,
|
||||
initialState,
|
||||
reducers: {
|
||||
visualQueryChange: (state, action: PayloadAction<{ visQuery: LokiVisualQuery; expr: string }>) => {
|
||||
state.expr = action.payload.expr;
|
||||
|
||||
@@ -241,7 +241,7 @@ function getLineFilter(expr: string, node: SyntaxNode): { operation?: QueryBuild
|
||||
},
|
||||
};
|
||||
}
|
||||
const mapFilter: any = {
|
||||
const mapFilter: Record<string, LokiOperationId> = {
|
||||
'|=': LokiOperationId.LineContains,
|
||||
'!=': LokiOperationId.LineContainsNot,
|
||||
'|~': LokiOperationId.LineMatchesRegex,
|
||||
@@ -469,13 +469,13 @@ function handleVectorAggregation(expr: string, node: SyntaxNode, context: Contex
|
||||
return op;
|
||||
}
|
||||
|
||||
const operatorToOpName = binaryScalarDefs.reduce((acc, def) => {
|
||||
const operatorToOpName = binaryScalarDefs.reduce<Record<string, { id: string; comparison?: boolean }>>((acc, def) => {
|
||||
acc[def.sign] = {
|
||||
id: def.id,
|
||||
comparison: def.comparison,
|
||||
};
|
||||
return acc;
|
||||
}, {} as Record<string, { id: string; comparison?: boolean }>);
|
||||
}, {});
|
||||
|
||||
/**
|
||||
* Right now binary expressions can be represented in 2 way in visual query. As additional operation in case it is
|
||||
|
||||
@@ -20,11 +20,12 @@ export function getDefaultEditorMode(expr: string) {
|
||||
return QueryEditorMode.Code;
|
||||
}
|
||||
|
||||
const value = store.get(queryEditorModeDefaultLocalStorageKey) as QueryEditorMode;
|
||||
const value: string | undefined = store.get(queryEditorModeDefaultLocalStorageKey);
|
||||
switch (value) {
|
||||
case QueryEditorMode.Builder:
|
||||
case QueryEditorMode.Code:
|
||||
return value;
|
||||
case 'code':
|
||||
return QueryEditorMode.Code;
|
||||
|
||||
case 'builder':
|
||||
default:
|
||||
return QueryEditorMode.Builder;
|
||||
}
|
||||
|
||||
@@ -2,7 +2,7 @@ import { lastValueFrom } from 'rxjs';
|
||||
import { getQueryOptions } from 'test/helpers/getQueryOptions';
|
||||
import { DatasourceSrvMock, MockObservableDataSourceApi } from 'test/mocks/datasource_srv';
|
||||
|
||||
import { LoadingState } from '@grafana/data';
|
||||
import { DataQueryRequest, DataSourceInstanceSettings, LoadingState } from '@grafana/data';
|
||||
|
||||
import { MIXED_DATASOURCE_NAME } from './MixedDataSource';
|
||||
import { MixedDatasource } from './module';
|
||||
@@ -30,7 +30,7 @@ jest.mock('@grafana/runtime', () => ({
|
||||
describe('MixedDatasource', () => {
|
||||
describe('with no errors', () => {
|
||||
it('direct query should return results', async () => {
|
||||
const ds = new MixedDatasource({} as any);
|
||||
const ds = new MixedDatasource({} as DataSourceInstanceSettings);
|
||||
const requestMixed = getQueryOptions({
|
||||
targets: [
|
||||
{ refId: 'QA', datasource: { uid: 'A' } }, // 1
|
||||
@@ -52,7 +52,7 @@ describe('MixedDatasource', () => {
|
||||
|
||||
describe('with errors', () => {
|
||||
it('direct query should return results', async () => {
|
||||
const ds = new MixedDatasource({} as any);
|
||||
const ds = new MixedDatasource({} as DataSourceInstanceSettings);
|
||||
const requestMixed = getQueryOptions({
|
||||
targets: [
|
||||
{ refId: 'QA', datasource: { uid: 'A' } }, // 1
|
||||
@@ -84,14 +84,14 @@ describe('MixedDatasource', () => {
|
||||
});
|
||||
|
||||
it('should return both query results from the same data source', async () => {
|
||||
const ds = new MixedDatasource({} as any);
|
||||
const request: any = {
|
||||
const ds = new MixedDatasource({} as DataSourceInstanceSettings);
|
||||
const request = {
|
||||
targets: [
|
||||
{ refId: 'A', datasource: { uid: 'Loki' } },
|
||||
{ refId: 'B', datasource: { uid: 'Loki' } },
|
||||
{ refId: 'C', datasource: { uid: 'A' } },
|
||||
],
|
||||
};
|
||||
} as DataQueryRequest;
|
||||
|
||||
await expect(ds.query(request)).toEmitValuesWith((results) => {
|
||||
expect(results).toHaveLength(3);
|
||||
@@ -104,14 +104,14 @@ describe('MixedDatasource', () => {
|
||||
});
|
||||
|
||||
it('should not return the error for the second time', async () => {
|
||||
const ds = new MixedDatasource({} as any);
|
||||
const request: any = {
|
||||
const ds = new MixedDatasource({} as DataSourceInstanceSettings);
|
||||
const request = {
|
||||
targets: [
|
||||
{ refId: 'A', datasource: 'Loki' },
|
||||
{ refId: 'DD', datasource: 'D' },
|
||||
{ refId: 'C', datasource: 'A' },
|
||||
],
|
||||
};
|
||||
} as unknown as DataQueryRequest;
|
||||
|
||||
await lastValueFrom(ds.query(request));
|
||||
|
||||
@@ -121,7 +121,7 @@ describe('MixedDatasource', () => {
|
||||
{ refId: 'QA', datasource: { uid: 'A' } },
|
||||
{ refId: 'QB', datasource: { uid: 'B' } },
|
||||
],
|
||||
} as any)
|
||||
} as DataQueryRequest)
|
||||
).toEmitValuesWith((results) => {
|
||||
expect(results).toHaveLength(2);
|
||||
expect(results[0].key).toBe('mixed-0-');
|
||||
@@ -131,12 +131,12 @@ describe('MixedDatasource', () => {
|
||||
});
|
||||
|
||||
it('should filter out MixedDataSource queries', async () => {
|
||||
const ds = new MixedDatasource({} as any);
|
||||
const ds = new MixedDatasource({} as DataSourceInstanceSettings);
|
||||
|
||||
await expect(
|
||||
ds.query({
|
||||
targets: [{ refId: 'A', datasource: { uid: MIXED_DATASOURCE_NAME, id: 'datasource' } }],
|
||||
} as any)
|
||||
} as unknown as DataQueryRequest)
|
||||
).toEmitValuesWith((results) => {
|
||||
expect(results).toHaveLength(1);
|
||||
expect(results[0].data).toHaveLength(0);
|
||||
|
||||
@@ -31,7 +31,7 @@ export class MixedDatasource extends DataSourceApi<DataQuery> {
|
||||
});
|
||||
|
||||
if (!queries.length) {
|
||||
return of({ data: [] } as DataQueryResponse); // nothing
|
||||
return of({ data: [] }); // nothing
|
||||
}
|
||||
|
||||
// Build groups of queries to run in parallel
|
||||
@@ -49,7 +49,7 @@ export class MixedDatasource extends DataSourceApi<DataQuery> {
|
||||
|
||||
// Missing UIDs?
|
||||
if (!mixed.length) {
|
||||
return of({ data: [] } as DataQueryResponse); // nothing
|
||||
return of({ data: [] }); // nothing
|
||||
}
|
||||
|
||||
return this.batchQueries(mixed, request);
|
||||
@@ -70,7 +70,7 @@ export class MixedDatasource extends DataSourceApi<DataQuery> {
|
||||
data: response.data || [],
|
||||
state: LoadingState.Loading,
|
||||
key: `mixed-${i}-${response.key || ''}`,
|
||||
} as DataQueryResponse;
|
||||
};
|
||||
}),
|
||||
toArray(),
|
||||
catchError((err) => {
|
||||
|
||||
@@ -1,15 +1,18 @@
|
||||
import { render, screen } from '@testing-library/react';
|
||||
import React from 'react';
|
||||
|
||||
import { PrometheusDatasource } from '../datasource';
|
||||
import { PromQuery } from '../types';
|
||||
|
||||
import { PromExploreExtraFieldProps, PromExploreExtraField, testIds } from './PromExploreExtraField';
|
||||
|
||||
const setup = (propOverrides?: PromExploreExtraFieldProps) => {
|
||||
const query = { exemplar: false };
|
||||
const datasource = {};
|
||||
const query = { exemplar: false } as PromQuery;
|
||||
const datasource = {} as PrometheusDatasource;
|
||||
const onChange = jest.fn();
|
||||
const onRunQuery = jest.fn();
|
||||
|
||||
const props: any = {
|
||||
const props: PromExploreExtraFieldProps = {
|
||||
onChange,
|
||||
onRunQuery,
|
||||
query,
|
||||
|
||||
@@ -75,7 +75,11 @@ describe('PromLink', () => {
|
||||
render(
|
||||
<div>
|
||||
<PromLink datasource={getDataSource()} panelData={getPanelData()} query={{} as PromQuery} />
|
||||
<PromLink datasource={getDataSource({ directUrl: 'prom2' })} panelData={getPanelData()} query={{} as any} />
|
||||
<PromLink
|
||||
datasource={getDataSource({ directUrl: 'prom2' })}
|
||||
panelData={getPanelData()}
|
||||
query={{} as PromQuery}
|
||||
/>
|
||||
</div>
|
||||
);
|
||||
const promLinkButtons = screen.getAllByText('Prometheus');
|
||||
|
||||
@@ -8,13 +8,14 @@ import { PrometheusDatasource } from '../datasource';
|
||||
|
||||
import { PromQueryEditorByApp } from './PromQueryEditorByApp';
|
||||
import { testIds as alertingTestIds } from './PromQueryEditorForAlerting';
|
||||
import { Props } from './monaco-query-field/MonacoQueryFieldProps';
|
||||
|
||||
// the monaco-based editor uses lazy-loading and that does not work
|
||||
// well with this test, and we do not need the monaco-related
|
||||
// functionality in this test anyway, so we mock it out.
|
||||
jest.mock('./monaco-query-field/MonacoQueryFieldLazy', () => {
|
||||
const fakeQueryField = (props: any) => {
|
||||
return <input onBlur={props.onBlur} data-testid={'dummy-code-input'} type={'text'} />;
|
||||
const fakeQueryField = (props: Props) => {
|
||||
return <input onBlur={(e) => props.onBlur(e.currentTarget.value)} data-testid={'dummy-code-input'} type={'text'} />;
|
||||
};
|
||||
return {
|
||||
MonacoQueryFieldLazy: fakeQueryField,
|
||||
|
||||
@@ -9,13 +9,14 @@ import { PrometheusDatasource } from '../datasource';
|
||||
import PromQlLanguageProvider from '../language_provider';
|
||||
|
||||
import PromQueryField from './PromQueryField';
|
||||
import { Props } from './monaco-query-field/MonacoQueryFieldProps';
|
||||
|
||||
// the monaco-based editor uses lazy-loading and that does not work
|
||||
// well with this test, and we do not need the monaco-related
|
||||
// functionality in this test anyway, so we mock it out.
|
||||
jest.mock('./monaco-query-field/MonacoQueryFieldLazy', () => {
|
||||
const fakeQueryField = (props: any) => {
|
||||
return <input onBlur={props.onBlur} data-testid={'dummy-code-input'} type={'text'} />;
|
||||
const fakeQueryField = (props: Props) => {
|
||||
return <input onBlur={(e) => props.onBlur(e.currentTarget.value)} data-testid={'dummy-code-input'} type={'text'} />;
|
||||
};
|
||||
return {
|
||||
MonacoQueryFieldLazy: fakeQueryField,
|
||||
@@ -154,7 +155,7 @@ describe('PromQueryField', () => {
|
||||
function makeLanguageProvider(options: { metrics: string[][] }) {
|
||||
const metricsStack = [...options.metrics];
|
||||
return {
|
||||
histogramMetrics: [] as any,
|
||||
histogramMetrics: [],
|
||||
metrics: [],
|
||||
metricsMetadata: {},
|
||||
lookupsDisabled: false,
|
||||
|
||||
@@ -191,7 +191,7 @@ const getStyles = stylesFactory((theme: GrafanaTheme2) => ({
|
||||
export class UnthemedPrometheusMetricsBrowser extends React.Component<BrowserProps, BrowserState> {
|
||||
valueListsRef = React.createRef<HTMLDivElement>();
|
||||
state: BrowserState = {
|
||||
labels: [] as SelectableLabel[],
|
||||
labels: [],
|
||||
labelSearchTerm: '',
|
||||
metricSearchTerm: '',
|
||||
status: 'Ready',
|
||||
|
||||
@@ -7,11 +7,11 @@ export enum AzureCloud {
|
||||
None = '',
|
||||
}
|
||||
|
||||
export const KnownAzureClouds = [
|
||||
export const KnownAzureClouds: Array<SelectableValue<AzureCloud>> = [
|
||||
{ value: AzureCloud.Public, label: 'Azure' },
|
||||
{ value: AzureCloud.China, label: 'Azure China' },
|
||||
{ value: AzureCloud.USGovernment, label: 'Azure US Government' },
|
||||
] as SelectableValue[];
|
||||
];
|
||||
|
||||
export type AzureAuthType = 'msi' | 'clientsecret';
|
||||
|
||||
|
||||
@@ -2,8 +2,11 @@ import { cloneDeep } from 'lodash';
|
||||
import { lastValueFrom, of, throwError } from 'rxjs';
|
||||
|
||||
import {
|
||||
AnnotationEvent,
|
||||
AnnotationQueryRequest,
|
||||
CoreApp,
|
||||
DataQueryRequest,
|
||||
DataQueryResponse,
|
||||
DataQueryResponseData,
|
||||
DataSourceInstanceSettings,
|
||||
dateTime,
|
||||
@@ -25,32 +28,35 @@ import {
|
||||
prometheusRegularEscape,
|
||||
prometheusSpecialRegexEscape,
|
||||
} from './datasource';
|
||||
import { PromOptions, PromQuery } from './types';
|
||||
import PromQlLanguageProvider from './language_provider';
|
||||
import { PromOptions, PromQuery, PromQueryRequest } from './types';
|
||||
|
||||
const fetchMock = jest.fn().mockReturnValue(of(createDefaultPromResponse()));
|
||||
|
||||
jest.mock('./metric_find_query');
|
||||
jest.mock('@grafana/runtime', () => ({
|
||||
// @ts-ignore
|
||||
...jest.requireActual('@grafana/runtime'),
|
||||
getBackendSrv: () => ({
|
||||
fetch: fetchMock,
|
||||
}),
|
||||
}));
|
||||
|
||||
const getAdhocFiltersMock = jest.fn().mockImplementation(() => []);
|
||||
const replaceMock = jest.fn().mockImplementation((a: string, ...rest: unknown[]) => a);
|
||||
|
||||
const templateSrvStub = {
|
||||
getAdhocFilters: jest.fn(() => [] as any[]),
|
||||
replace: jest.fn((a: string, ...rest: any) => a),
|
||||
};
|
||||
getAdhocFilters: getAdhocFiltersMock,
|
||||
replace: replaceMock,
|
||||
} as unknown as TemplateSrv;
|
||||
|
||||
const timeSrvStub = {
|
||||
timeRange(): any {
|
||||
timeRange() {
|
||||
return {
|
||||
from: dateTime(1531468681),
|
||||
to: dateTime(1531489712),
|
||||
};
|
||||
},
|
||||
};
|
||||
} as unknown as TimeSrv;
|
||||
|
||||
beforeEach(() => {
|
||||
jest.clearAllMocks();
|
||||
@@ -67,11 +73,11 @@ describe('PrometheusDatasource', () => {
|
||||
password: 'mupp',
|
||||
jsonData: {
|
||||
customQueryParameters: '',
|
||||
} as any,
|
||||
},
|
||||
} as unknown as DataSourceInstanceSettings<PromOptions>;
|
||||
|
||||
beforeEach(() => {
|
||||
ds = new PrometheusDatasource(instanceSettings, templateSrvStub as any, timeSrvStub as any);
|
||||
ds = new PrometheusDatasource(instanceSettings, templateSrvStub, timeSrvStub);
|
||||
});
|
||||
|
||||
describe('Query', () => {
|
||||
@@ -113,10 +119,10 @@ describe('PrometheusDatasource', () => {
|
||||
access: 'direct',
|
||||
jsonData: {
|
||||
customQueryParameters: '',
|
||||
} as any,
|
||||
},
|
||||
} as unknown as DataSourceInstanceSettings<PromOptions>;
|
||||
const range = { from: time({ seconds: 63 }), to: time({ seconds: 183 }) };
|
||||
const directDs = new PrometheusDatasource(instanceSettings, templateSrvStub as any, timeSrvStub as any);
|
||||
const directDs = new PrometheusDatasource(instanceSettings, templateSrvStub, timeSrvStub);
|
||||
|
||||
await expect(
|
||||
lastValueFrom(directDs.query(createDataRequest([{}, {}], { app: CoreApp.Dashboard })))
|
||||
@@ -166,7 +172,7 @@ describe('PrometheusDatasource', () => {
|
||||
it('should still perform a GET request with the DS HTTP method set to POST and not POST-friendly endpoint', () => {
|
||||
const postSettings = cloneDeep(instanceSettings);
|
||||
postSettings.jsonData.httpMethod = 'POST';
|
||||
const promDs = new PrometheusDatasource(postSettings, templateSrvStub as any, timeSrvStub as any);
|
||||
const promDs = new PrometheusDatasource(postSettings, templateSrvStub, timeSrvStub);
|
||||
promDs.metadataRequest('/foo');
|
||||
expect(fetchMock.mock.calls.length).toBe(1);
|
||||
expect(fetchMock.mock.calls[0][0].method).toBe('GET');
|
||||
@@ -174,7 +180,7 @@ describe('PrometheusDatasource', () => {
|
||||
it('should try to perform a POST request with the DS HTTP method set to POST and POST-friendly endpoint', () => {
|
||||
const postSettings = cloneDeep(instanceSettings);
|
||||
postSettings.jsonData.httpMethod = 'POST';
|
||||
const promDs = new PrometheusDatasource(postSettings, templateSrvStub as any, timeSrvStub as any);
|
||||
const promDs = new PrometheusDatasource(postSettings, templateSrvStub, timeSrvStub);
|
||||
promDs.metadataRequest('api/v1/series', { bar: 'baz baz', foo: 'foo' });
|
||||
expect(fetchMock.mock.calls.length).toBe(1);
|
||||
expect(fetchMock.mock.calls[0][0].method).toBe('POST');
|
||||
@@ -190,14 +196,14 @@ describe('PrometheusDatasource', () => {
|
||||
range: { from: time({ seconds: 63 }), to: time({ seconds: 183 }) },
|
||||
targets: [target],
|
||||
interval: '60s',
|
||||
} as any;
|
||||
} as DataQueryRequest<PromQuery>;
|
||||
}
|
||||
|
||||
describe('with GET http method', () => {
|
||||
const promDs = new PrometheusDatasource(
|
||||
{ ...instanceSettings, jsonData: { customQueryParameters: 'customQuery=123', httpMethod: 'GET' } as any },
|
||||
templateSrvStub as any,
|
||||
timeSrvStub as any
|
||||
{ ...instanceSettings, jsonData: { customQueryParameters: 'customQuery=123', httpMethod: 'GET' } },
|
||||
templateSrvStub,
|
||||
timeSrvStub
|
||||
);
|
||||
|
||||
it('added to metadata request', () => {
|
||||
@@ -230,9 +236,9 @@ describe('PrometheusDatasource', () => {
|
||||
|
||||
describe('with POST http method', () => {
|
||||
const promDs = new PrometheusDatasource(
|
||||
{ ...instanceSettings, jsonData: { customQueryParameters: 'customQuery=123', httpMethod: 'POST' } as any },
|
||||
templateSrvStub as any,
|
||||
timeSrvStub as any
|
||||
{ ...instanceSettings, jsonData: { customQueryParameters: 'customQuery=123', httpMethod: 'POST' } },
|
||||
templateSrvStub,
|
||||
timeSrvStub
|
||||
);
|
||||
|
||||
it('added to metadata request with non-POST endpoint', () => {
|
||||
@@ -278,20 +284,19 @@ describe('PrometheusDatasource', () => {
|
||||
|
||||
describe('When using adhoc filters', () => {
|
||||
const DEFAULT_QUERY_EXPRESSION = 'metric{job="foo"} - metric';
|
||||
const target = { expr: DEFAULT_QUERY_EXPRESSION };
|
||||
const originalAdhocFiltersMock = templateSrvStub.getAdhocFilters();
|
||||
const target: PromQuery = { expr: DEFAULT_QUERY_EXPRESSION, refId: 'A' };
|
||||
|
||||
afterAll(() => {
|
||||
templateSrvStub.getAdhocFilters.mockReturnValue(originalAdhocFiltersMock);
|
||||
getAdhocFiltersMock.mockImplementation(() => []);
|
||||
});
|
||||
|
||||
it('should not modify expression with no filters', () => {
|
||||
const result = ds.createQuery(target as any, { interval: '15s' } as any, 0, 0);
|
||||
const result = ds.createQuery(target, { interval: '15s' } as DataQueryRequest<PromQuery>, 0, 0);
|
||||
expect(result).toMatchObject({ expr: DEFAULT_QUERY_EXPRESSION });
|
||||
});
|
||||
|
||||
it('should add filters to expression', () => {
|
||||
templateSrvStub.getAdhocFilters.mockReturnValue([
|
||||
getAdhocFiltersMock.mockReturnValue([
|
||||
{
|
||||
key: 'k1',
|
||||
operator: '=',
|
||||
@@ -303,12 +308,12 @@ describe('PrometheusDatasource', () => {
|
||||
value: 'v2',
|
||||
},
|
||||
]);
|
||||
const result = ds.createQuery(target as any, { interval: '15s' } as any, 0, 0);
|
||||
const result = ds.createQuery(target, { interval: '15s' } as DataQueryRequest<PromQuery>, 0, 0);
|
||||
expect(result).toMatchObject({ expr: 'metric{job="foo", k1="v1", k2!="v2"} - metric{k1="v1", k2!="v2"}' });
|
||||
});
|
||||
|
||||
it('should add escaping if needed to regex filter expressions', () => {
|
||||
templateSrvStub.getAdhocFilters.mockReturnValue([
|
||||
getAdhocFiltersMock.mockReturnValue([
|
||||
{
|
||||
key: 'k1',
|
||||
operator: '=~',
|
||||
@@ -320,7 +325,7 @@ describe('PrometheusDatasource', () => {
|
||||
value: `v'.*`,
|
||||
},
|
||||
]);
|
||||
const result = ds.createQuery(target as any, { interval: '15s' } as any, 0, 0);
|
||||
const result = ds.createQuery(target, { interval: '15s' } as DataQueryRequest<PromQuery>, 0, 0);
|
||||
expect(result).toMatchObject({
|
||||
expr: `metric{job="foo", k1=~"v.*", k2=~"v\\\\'.*"} - metric{k1=~"v.*", k2=~"v\\\\'.*"}`,
|
||||
});
|
||||
@@ -328,13 +333,13 @@ describe('PrometheusDatasource', () => {
|
||||
});
|
||||
|
||||
describe('When converting prometheus histogram to heatmap format', () => {
|
||||
let query: any;
|
||||
let query: DataQueryRequest<PromQuery>;
|
||||
beforeEach(() => {
|
||||
query = {
|
||||
range: { from: dateTime(1443454528000), to: dateTime(1443454528000) },
|
||||
targets: [{ expr: 'test{job="testjob"}', format: 'heatmap', legendFormat: '{{le}}' }],
|
||||
interval: '1s',
|
||||
};
|
||||
} as DataQueryRequest<PromQuery>;
|
||||
});
|
||||
|
||||
it('should convert cumulative histogram to ordinary', async () => {
|
||||
@@ -619,7 +624,7 @@ describe('PrometheusDatasource', () => {
|
||||
refId: 'A',
|
||||
};
|
||||
const interval = '10m';
|
||||
templateSrvStub.replace.mockReturnValue(interval);
|
||||
replaceMock.mockReturnValue(interval);
|
||||
|
||||
const queries = ds.interpolateVariablesInQueries([query], { Interval: { text: interval, value: interval } });
|
||||
expect(templateSrvStub.replace).toBeCalledTimes(2);
|
||||
@@ -640,11 +645,9 @@ describe('PrometheusDatasource', () => {
|
||||
});
|
||||
|
||||
describe('applyTemplateVariables', () => {
|
||||
const originalAdhocFiltersMock = templateSrvStub.getAdhocFilters();
|
||||
const originalReplaceMock = jest.fn((a: string, ...rest: any) => a);
|
||||
afterAll(() => {
|
||||
templateSrvStub.getAdhocFilters.mockReturnValue(originalAdhocFiltersMock);
|
||||
templateSrvStub.replace = originalReplaceMock;
|
||||
getAdhocFiltersMock.mockImplementation(() => []);
|
||||
replaceMock.mockImplementation((a: string, ...rest: unknown[]) => a);
|
||||
});
|
||||
|
||||
it('should call replace function for legendFormat', () => {
|
||||
@@ -654,7 +657,7 @@ describe('PrometheusDatasource', () => {
|
||||
refId: 'A',
|
||||
};
|
||||
const legend = 'baz';
|
||||
templateSrvStub.replace.mockReturnValue(legend);
|
||||
replaceMock.mockReturnValue(legend);
|
||||
|
||||
const interpolatedQuery = ds.applyTemplateVariables(query, { legend: { text: legend, value: legend } });
|
||||
expect(interpolatedQuery.legendFormat).toBe(legend);
|
||||
@@ -667,7 +670,7 @@ describe('PrometheusDatasource', () => {
|
||||
refId: 'A',
|
||||
};
|
||||
const step = '5s';
|
||||
templateSrvStub.replace.mockReturnValue(step);
|
||||
replaceMock.mockReturnValue(step);
|
||||
|
||||
const interpolatedQuery = ds.applyTemplateVariables(query, { step: { text: step, value: step } });
|
||||
expect(interpolatedQuery.interval).toBe(step);
|
||||
@@ -679,15 +682,15 @@ describe('PrometheusDatasource', () => {
|
||||
refId: 'A',
|
||||
};
|
||||
const job = 'bar';
|
||||
templateSrvStub.replace.mockReturnValue(job);
|
||||
replaceMock.mockReturnValue(job);
|
||||
|
||||
const interpolatedQuery = ds.applyTemplateVariables(query, { job: { text: job, value: job } });
|
||||
expect(interpolatedQuery.expr).toBe(job);
|
||||
});
|
||||
|
||||
it('should add ad-hoc filters to expr', () => {
|
||||
templateSrvStub.replace = jest.fn((a: string) => a);
|
||||
templateSrvStub.getAdhocFilters.mockReturnValue([
|
||||
replaceMock.mockImplementation((a: string) => a);
|
||||
getAdhocFiltersMock.mockReturnValue([
|
||||
{
|
||||
key: 'k1',
|
||||
operator: '=',
|
||||
@@ -713,30 +716,25 @@ describe('PrometheusDatasource', () => {
|
||||
describe('metricFindQuery', () => {
|
||||
beforeEach(() => {
|
||||
const query = 'query_result(topk(5,rate(http_request_duration_microseconds_count[$__interval])))';
|
||||
templateSrvStub.replace = jest.fn();
|
||||
ds.metricFindQuery(query);
|
||||
});
|
||||
|
||||
afterAll(() => {
|
||||
templateSrvStub.replace = jest.fn((a: string) => a);
|
||||
});
|
||||
|
||||
it('should call templateSrv.replace with scopedVars', () => {
|
||||
expect(templateSrvStub.replace.mock.calls[0][1]).toBeDefined();
|
||||
expect(replaceMock.mock.calls[0][1]).toBeDefined();
|
||||
});
|
||||
|
||||
it('should have the correct range and range_ms', () => {
|
||||
const range = templateSrvStub.replace.mock.calls[0][1].__range;
|
||||
const rangeMs = templateSrvStub.replace.mock.calls[0][1].__range_ms;
|
||||
const rangeS = templateSrvStub.replace.mock.calls[0][1].__range_s;
|
||||
const range = replaceMock.mock.calls[0][1].__range;
|
||||
const rangeMs = replaceMock.mock.calls[0][1].__range_ms;
|
||||
const rangeS = replaceMock.mock.calls[0][1].__range_s;
|
||||
expect(range).toEqual({ text: '21s', value: '21s' });
|
||||
expect(rangeMs).toEqual({ text: 21031, value: 21031 });
|
||||
expect(rangeS).toEqual({ text: 21, value: 21 });
|
||||
});
|
||||
|
||||
it('should pass the default interval value', () => {
|
||||
const interval = templateSrvStub.replace.mock.calls[0][1].__interval;
|
||||
const intervalMs = templateSrvStub.replace.mock.calls[0][1].__interval_ms;
|
||||
const interval = replaceMock.mock.calls[0][1].__interval;
|
||||
const intervalMs = replaceMock.mock.calls[0][1].__interval_ms;
|
||||
expect(interval).toEqual({ text: '15s', value: '15s' });
|
||||
expect(intervalMs).toEqual({ text: 15000, value: 15000 });
|
||||
});
|
||||
@@ -762,17 +760,17 @@ describe('PrometheusDatasource2', () => {
|
||||
|
||||
let ds: PrometheusDatasource;
|
||||
beforeEach(() => {
|
||||
ds = new PrometheusDatasource(instanceSettings, templateSrvStub as any, timeSrvStub as any);
|
||||
ds = new PrometheusDatasource(instanceSettings, templateSrvStub, timeSrvStub);
|
||||
});
|
||||
|
||||
describe('When querying prometheus with one target using query editor target spec', () => {
|
||||
describe('and query syntax is valid', () => {
|
||||
let results: any;
|
||||
let results: DataQueryResponse;
|
||||
const query = {
|
||||
range: { from: time({ seconds: 63 }), to: time({ seconds: 183 }) },
|
||||
targets: [{ expr: 'test{job="testjob"}', format: 'time_series' }],
|
||||
interval: '60s',
|
||||
};
|
||||
} as DataQueryRequest<PromQuery>;
|
||||
|
||||
// Interval alignment with step
|
||||
const urlExpected = `proxied/api/v1/query_range?query=${encodeURIComponent(
|
||||
@@ -795,7 +793,7 @@ describe('PrometheusDatasource2', () => {
|
||||
},
|
||||
};
|
||||
fetchMock.mockImplementation(() => of(response));
|
||||
ds.query(query as any).subscribe((data: any) => {
|
||||
ds.query(query).subscribe((data) => {
|
||||
results = data;
|
||||
});
|
||||
});
|
||||
@@ -819,7 +817,7 @@ describe('PrometheusDatasource2', () => {
|
||||
range: { from: time({ seconds: 63 }), to: time({ seconds: 183 }) },
|
||||
targets: [{ expr: 'tes;;t{job="testjob"}', format: 'time_series' }],
|
||||
interval: '60s',
|
||||
};
|
||||
} as DataQueryRequest<PromQuery>;
|
||||
|
||||
const errMessage = 'parse error at char 25: could not parse remaining input';
|
||||
const response = {
|
||||
@@ -832,7 +830,7 @@ describe('PrometheusDatasource2', () => {
|
||||
|
||||
it('should generate an error', () => {
|
||||
fetchMock.mockImplementation(() => throwError(response));
|
||||
ds.query(query as any).subscribe((e: any) => {
|
||||
ds.query(query).subscribe((e: any) => {
|
||||
results = e.message;
|
||||
expect(results).toBe(`"${errMessage}"`);
|
||||
});
|
||||
@@ -841,7 +839,7 @@ describe('PrometheusDatasource2', () => {
|
||||
});
|
||||
|
||||
describe('When querying prometheus with one target which returns multiple series', () => {
|
||||
let results: any;
|
||||
let results: DataQueryResponse;
|
||||
const start = 60;
|
||||
const end = 360;
|
||||
const step = 60;
|
||||
@@ -850,7 +848,7 @@ describe('PrometheusDatasource2', () => {
|
||||
range: { from: time({ seconds: start }), to: time({ seconds: end }) },
|
||||
targets: [{ expr: 'test{job="testjob"}', format: 'time_series' }],
|
||||
interval: '60s',
|
||||
};
|
||||
} as DataQueryRequest<PromQuery>;
|
||||
|
||||
beforeEach(async () => {
|
||||
const response = {
|
||||
@@ -878,7 +876,7 @@ describe('PrometheusDatasource2', () => {
|
||||
|
||||
fetchMock.mockImplementation(() => of(response));
|
||||
|
||||
ds.query(query as any).subscribe((data: any) => {
|
||||
ds.query(query).subscribe((data) => {
|
||||
results = data;
|
||||
});
|
||||
});
|
||||
@@ -915,7 +913,7 @@ describe('PrometheusDatasource2', () => {
|
||||
});
|
||||
|
||||
describe('When querying prometheus with one target and instant = true', () => {
|
||||
let results: any;
|
||||
let results: DataQueryResponse;
|
||||
const urlExpected = `/api/datasources/uid/ABCDEF/resources/api/v1/query?query=${encodeURIComponent(
|
||||
'test{job="testjob"}'
|
||||
)}&time=123`;
|
||||
@@ -923,7 +921,7 @@ describe('PrometheusDatasource2', () => {
|
||||
range: { from: time({ seconds: 63 }), to: time({ seconds: 123 }) },
|
||||
targets: [{ expr: 'test{job="testjob"}', format: 'time_series', instant: true }],
|
||||
interval: '60s',
|
||||
};
|
||||
} as DataQueryRequest<PromQuery>;
|
||||
|
||||
beforeEach(async () => {
|
||||
const response = {
|
||||
@@ -942,7 +940,7 @@ describe('PrometheusDatasource2', () => {
|
||||
};
|
||||
|
||||
fetchMock.mockImplementation(() => of(response));
|
||||
ds.query(query as any).subscribe((data: any) => {
|
||||
ds.query(query).subscribe((data) => {
|
||||
results = data;
|
||||
});
|
||||
});
|
||||
@@ -962,8 +960,8 @@ describe('PrometheusDatasource2', () => {
|
||||
});
|
||||
|
||||
describe('annotationQuery', () => {
|
||||
let results: any;
|
||||
const options: any = {
|
||||
let results: AnnotationEvent[];
|
||||
const options = {
|
||||
annotation: {
|
||||
expr: 'ALERTS{alertstate="firing"}',
|
||||
tagKeys: 'job',
|
||||
@@ -974,7 +972,7 @@ describe('PrometheusDatasource2', () => {
|
||||
from: time({ seconds: 63 }),
|
||||
to: time({ seconds: 123 }),
|
||||
},
|
||||
};
|
||||
} as unknown as AnnotationQueryRequest<PromQuery>;
|
||||
|
||||
const response = createAnnotationResponse();
|
||||
|
||||
@@ -982,7 +980,7 @@ describe('PrometheusDatasource2', () => {
|
||||
it('should return empty results', async () => {
|
||||
fetchMock.mockImplementation(() => of({ cancelled: true }));
|
||||
|
||||
await ds.annotationQuery(options).then((data: any) => {
|
||||
await ds.annotationQuery(options).then((data) => {
|
||||
results = data;
|
||||
});
|
||||
|
||||
@@ -995,7 +993,7 @@ describe('PrometheusDatasource2', () => {
|
||||
options.annotation.useValueForTime = false;
|
||||
fetchMock.mockImplementation(() => of(response));
|
||||
|
||||
await ds.annotationQuery(options).then((data: any) => {
|
||||
await ds.annotationQuery(options).then((data) => {
|
||||
results = data;
|
||||
});
|
||||
});
|
||||
@@ -1014,7 +1012,7 @@ describe('PrometheusDatasource2', () => {
|
||||
options.annotation.useValueForTime = true;
|
||||
fetchMock.mockImplementation(() => of(response));
|
||||
|
||||
await ds.annotationQuery(options).then((data: any) => {
|
||||
await ds.annotationQuery(options).then((data) => {
|
||||
results = data;
|
||||
});
|
||||
});
|
||||
@@ -1036,7 +1034,7 @@ describe('PrometheusDatasource2', () => {
|
||||
from: time({ seconds: 63 }),
|
||||
to: time({ seconds: 123 }),
|
||||
},
|
||||
};
|
||||
} as AnnotationQueryRequest<PromQuery>;
|
||||
ds.annotationQuery(query);
|
||||
const req = fetchMock.mock.calls[0][0];
|
||||
expect(req.data.queries[0].interval).toBe('60s');
|
||||
@@ -1053,7 +1051,7 @@ describe('PrometheusDatasource2', () => {
|
||||
from: time({ seconds: 63 }),
|
||||
to: time({ seconds: 123 }),
|
||||
},
|
||||
};
|
||||
} as unknown as AnnotationQueryRequest<PromQuery>;
|
||||
ds.annotationQuery(query);
|
||||
const req = fetchMock.mock.calls[0][0];
|
||||
expect(req.data.queries[0].interval).toBe('60s');
|
||||
@@ -1071,7 +1069,7 @@ describe('PrometheusDatasource2', () => {
|
||||
from: time({ seconds: 63 }),
|
||||
to: time({ seconds: 123 }),
|
||||
},
|
||||
};
|
||||
} as unknown as AnnotationQueryRequest<PromQuery>;
|
||||
ds.annotationQuery(query);
|
||||
const req = fetchMock.mock.calls[0][0];
|
||||
expect(req.data.queries[0].interval).toBe('10s');
|
||||
@@ -1135,10 +1133,10 @@ describe('PrometheusDatasource2', () => {
|
||||
expect(results.map((result) => [result.time, result.timeEnd])).toEqual([[120000, 120000]]);
|
||||
});
|
||||
});
|
||||
|
||||
describe('with template variables', () => {
|
||||
const originalReplaceMock = jest.fn((a: string, ...rest: any) => a);
|
||||
afterAll(() => {
|
||||
templateSrvStub.replace = originalReplaceMock;
|
||||
replaceMock.mockImplementation((a: string, ...rest: unknown[]) => a);
|
||||
});
|
||||
|
||||
it('should interpolate variables in query expr', () => {
|
||||
@@ -1152,9 +1150,9 @@ describe('PrometheusDatasource2', () => {
|
||||
from: time({ seconds: 1 }),
|
||||
to: time({ seconds: 2 }),
|
||||
},
|
||||
};
|
||||
} as unknown as AnnotationQueryRequest<PromQuery>;
|
||||
const interpolated = 'interpolated_expr';
|
||||
templateSrvStub.replace.mockReturnValue(interpolated);
|
||||
replaceMock.mockReturnValue(interpolated);
|
||||
ds.annotationQuery(query);
|
||||
const req = fetchMock.mock.calls[0][0];
|
||||
expect(req.data.queries[0].expr).toBe(interpolated);
|
||||
@@ -1163,12 +1161,12 @@ describe('PrometheusDatasource2', () => {
|
||||
});
|
||||
|
||||
describe('When resultFormat is table and instant = true', () => {
|
||||
let results: any;
|
||||
let results: DataQueryResponse;
|
||||
const query = {
|
||||
range: { from: time({ seconds: 63 }), to: time({ seconds: 123 }) },
|
||||
targets: [{ expr: 'test{job="testjob"}', format: 'time_series', instant: true }],
|
||||
interval: '60s',
|
||||
};
|
||||
} as DataQueryRequest<PromQuery>;
|
||||
|
||||
beforeEach(async () => {
|
||||
const response = {
|
||||
@@ -1187,7 +1185,7 @@ describe('PrometheusDatasource2', () => {
|
||||
};
|
||||
|
||||
fetchMock.mockImplementation(() => of(response));
|
||||
ds.query(query as any).subscribe((data: any) => {
|
||||
ds.query(query).subscribe((data: any) => {
|
||||
results = data;
|
||||
});
|
||||
});
|
||||
@@ -1219,11 +1217,11 @@ describe('PrometheusDatasource2', () => {
|
||||
},
|
||||
],
|
||||
interval: '5s',
|
||||
};
|
||||
} as DataQueryRequest<PromQuery>;
|
||||
const urlExpected = 'proxied/api/v1/query_range?query=test&start=60&end=420&step=10';
|
||||
|
||||
fetchMock.mockImplementation(() => of(response));
|
||||
ds.query(query as any);
|
||||
ds.query(query);
|
||||
const res = fetchMock.mock.calls[0][0];
|
||||
expect(res.method).toBe('GET');
|
||||
expect(res.url).toBe(urlExpected);
|
||||
@@ -1235,10 +1233,10 @@ describe('PrometheusDatasource2', () => {
|
||||
range: { from: time({ minutes: 1 }), to: time({ minutes: 7 }) },
|
||||
targets: [{ expr: 'test' }],
|
||||
interval: '100ms',
|
||||
};
|
||||
} as DataQueryRequest<PromQuery>;
|
||||
const urlExpected = 'proxied/api/v1/query_range?query=test&start=60&end=420&step=0.1';
|
||||
fetchMock.mockImplementation(() => of(response));
|
||||
ds.query(query as any);
|
||||
ds.query(query);
|
||||
const res = fetchMock.mock.calls[0][0];
|
||||
expect(res.method).toBe('GET');
|
||||
expect(res.url).toBe(urlExpected);
|
||||
@@ -1255,10 +1253,10 @@ describe('PrometheusDatasource2', () => {
|
||||
},
|
||||
],
|
||||
interval: '10s',
|
||||
};
|
||||
} as DataQueryRequest<PromQuery>;
|
||||
const urlExpected = 'proxied/api/v1/query_range?query=test&start=60&end=420&step=10';
|
||||
fetchMock.mockImplementation(() => of(response));
|
||||
ds.query(query as any);
|
||||
ds.query(query);
|
||||
const res = fetchMock.mock.calls[0][0];
|
||||
expect(res.method).toBe('GET');
|
||||
expect(res.url).toBe(urlExpected);
|
||||
@@ -1270,12 +1268,12 @@ describe('PrometheusDatasource2', () => {
|
||||
range: { from: time({ hours: 1 }), to: time({ hours: 7 }) },
|
||||
targets: [{ expr: 'test' }],
|
||||
interval: '1s',
|
||||
};
|
||||
} as DataQueryRequest<PromQuery>;
|
||||
const end = 7 * 60 * 60;
|
||||
const start = 60 * 60;
|
||||
const urlExpected = 'proxied/api/v1/query_range?query=test&start=' + start + '&end=' + end + '&step=2';
|
||||
fetchMock.mockImplementation(() => of(response));
|
||||
ds.query(query as any);
|
||||
ds.query(query);
|
||||
const res = fetchMock.mock.calls[0][0];
|
||||
expect(res.method).toBe('GET');
|
||||
expect(res.url).toBe(urlExpected);
|
||||
@@ -1293,11 +1291,11 @@ describe('PrometheusDatasource2', () => {
|
||||
},
|
||||
],
|
||||
interval: '5s',
|
||||
};
|
||||
} as DataQueryRequest<PromQuery>;
|
||||
// times get rounded up to interval
|
||||
const urlExpected = 'proxied/api/v1/query_range?query=test&start=50&end=400&step=50';
|
||||
fetchMock.mockImplementation(() => of(response));
|
||||
ds.query(query as any);
|
||||
ds.query(query);
|
||||
const res = fetchMock.mock.calls[0][0];
|
||||
expect(res.method).toBe('GET');
|
||||
expect(res.url).toBe(urlExpected);
|
||||
@@ -1315,10 +1313,10 @@ describe('PrometheusDatasource2', () => {
|
||||
},
|
||||
],
|
||||
interval: '5s',
|
||||
};
|
||||
} as DataQueryRequest<PromQuery>;
|
||||
const urlExpected = 'proxied/api/v1/query_range?query=test' + '&start=60&end=420&step=15';
|
||||
fetchMock.mockImplementation(() => of(response));
|
||||
ds.query(query as any);
|
||||
ds.query(query);
|
||||
const res = fetchMock.mock.calls[0][0];
|
||||
expect(res.method).toBe('GET');
|
||||
expect(res.url).toBe(urlExpected);
|
||||
@@ -1336,11 +1334,11 @@ describe('PrometheusDatasource2', () => {
|
||||
},
|
||||
],
|
||||
interval: '10s',
|
||||
};
|
||||
} as DataQueryRequest<PromQuery>;
|
||||
// times get aligned to interval
|
||||
const urlExpected = 'proxied/api/v1/query_range?query=test' + '&start=0&end=400&step=100';
|
||||
fetchMock.mockImplementation(() => of(response));
|
||||
ds.query(query as any);
|
||||
ds.query(query);
|
||||
const res = fetchMock.mock.calls[0][0];
|
||||
expect(res.method).toBe('GET');
|
||||
expect(res.url).toBe(urlExpected);
|
||||
@@ -1357,12 +1355,12 @@ describe('PrometheusDatasource2', () => {
|
||||
},
|
||||
],
|
||||
interval: '10s',
|
||||
};
|
||||
} as DataQueryRequest<PromQuery>;
|
||||
const end = 7 * 24 * 60 * 60;
|
||||
const start = 0;
|
||||
const urlExpected = 'proxied/api/v1/query_range?query=test' + '&start=' + start + '&end=' + end + '&step=100';
|
||||
fetchMock.mockImplementation(() => of(response));
|
||||
ds.query(query as any);
|
||||
ds.query(query);
|
||||
const res = fetchMock.mock.calls[0][0];
|
||||
expect(res.method).toBe('GET');
|
||||
expect(res.url).toBe(urlExpected);
|
||||
@@ -1379,7 +1377,7 @@ describe('PrometheusDatasource2', () => {
|
||||
},
|
||||
],
|
||||
interval: '5s',
|
||||
};
|
||||
} as DataQueryRequest<PromQuery>;
|
||||
let end = 7 * 24 * 60 * 60;
|
||||
end -= end % 55;
|
||||
const start = 0;
|
||||
@@ -1388,7 +1386,7 @@ describe('PrometheusDatasource2', () => {
|
||||
const urlExpected =
|
||||
'proxied/api/v1/query_range?query=test' + '&start=' + adjusted.start + '&end=' + adjusted.end + '&step=' + step;
|
||||
fetchMock.mockImplementation(() => of(response));
|
||||
ds.query(query as any);
|
||||
ds.query(query);
|
||||
const res = fetchMock.mock.calls[0][0];
|
||||
expect(res.method).toBe('GET');
|
||||
expect(res.url).toBe(urlExpected);
|
||||
@@ -1428,14 +1426,14 @@ describe('PrometheusDatasource2', () => {
|
||||
encodeURIComponent('rate(test[$__interval])') +
|
||||
'&start=60&end=420&step=10';
|
||||
|
||||
templateSrvStub.replace = jest.fn((str) => str) as any;
|
||||
replaceMock.mockImplementation((str) => str);
|
||||
fetchMock.mockImplementation(() => of(response));
|
||||
ds.query(query as any);
|
||||
const res = fetchMock.mock.calls[0][0];
|
||||
expect(res.method).toBe('GET');
|
||||
expect(res.url).toBe(urlExpected);
|
||||
|
||||
expect(templateSrvStub.replace.mock.calls[0][1]).toEqual({
|
||||
expect(replaceMock.mock.calls[0][1]).toEqual({
|
||||
__interval: {
|
||||
text: '10s',
|
||||
value: '10s',
|
||||
@@ -1468,13 +1466,13 @@ describe('PrometheusDatasource2', () => {
|
||||
encodeURIComponent('rate(test[$__interval])') +
|
||||
'&start=60&end=420&step=10';
|
||||
fetchMock.mockImplementation(() => of(response));
|
||||
templateSrvStub.replace = jest.fn((str) => str) as any;
|
||||
replaceMock.mockImplementation((str) => str);
|
||||
ds.query(query as any);
|
||||
const res = fetchMock.mock.calls[0][0];
|
||||
expect(res.method).toBe('GET');
|
||||
expect(res.url).toBe(urlExpected);
|
||||
|
||||
expect(templateSrvStub.replace.mock.calls[0][1]).toEqual({
|
||||
expect(replaceMock.mock.calls[0][1]).toEqual({
|
||||
__interval: {
|
||||
text: '5s',
|
||||
value: '5s',
|
||||
@@ -1508,13 +1506,13 @@ describe('PrometheusDatasource2', () => {
|
||||
encodeURIComponent('rate(test[$__interval])') +
|
||||
'&start=0&end=400&step=100';
|
||||
fetchMock.mockImplementation(() => of(response));
|
||||
templateSrvStub.replace = jest.fn((str) => str) as any;
|
||||
replaceMock.mockImplementation((str) => str);
|
||||
ds.query(query as any);
|
||||
const res = fetchMock.mock.calls[0][0];
|
||||
expect(res.method).toBe('GET');
|
||||
expect(res.url).toBe(urlExpected);
|
||||
|
||||
expect(templateSrvStub.replace.mock.calls[0][1]).toEqual({
|
||||
expect(replaceMock.mock.calls[0][1]).toEqual({
|
||||
__interval: {
|
||||
text: '10s',
|
||||
value: '10s',
|
||||
@@ -1553,14 +1551,14 @@ describe('PrometheusDatasource2', () => {
|
||||
encodeURIComponent('rate(test[$__interval])') +
|
||||
'&start=50&end=400&step=50';
|
||||
|
||||
templateSrvStub.replace = jest.fn((str) => str) as any;
|
||||
replaceMock.mockImplementation((str) => str);
|
||||
fetchMock.mockImplementation(() => of(response));
|
||||
ds.query(query as any);
|
||||
const res = fetchMock.mock.calls[0][0];
|
||||
expect(res.method).toBe('GET');
|
||||
expect(res.url).toBe(urlExpected);
|
||||
|
||||
expect(templateSrvStub.replace.mock.calls[0][1]).toEqual({
|
||||
expect(replaceMock.mock.calls[0][1]).toEqual({
|
||||
__interval: {
|
||||
text: '5s',
|
||||
value: '5s',
|
||||
@@ -1600,7 +1598,7 @@ describe('PrometheusDatasource2', () => {
|
||||
expect(res.method).toBe('GET');
|
||||
expect(res.url).toBe(urlExpected);
|
||||
|
||||
expect(templateSrvStub.replace.mock.calls[0][1]).toEqual({
|
||||
expect(replaceMock.mock.calls[0][1]).toEqual({
|
||||
__interval: {
|
||||
text: '5s',
|
||||
value: '5s',
|
||||
@@ -1643,13 +1641,13 @@ describe('PrometheusDatasource2', () => {
|
||||
'&step=' +
|
||||
step;
|
||||
fetchMock.mockImplementation(() => of(response));
|
||||
templateSrvStub.replace = jest.fn((str) => str) as any;
|
||||
replaceMock.mockImplementation((str) => str);
|
||||
ds.query(query as any);
|
||||
const res = fetchMock.mock.calls[0][0];
|
||||
expect(res.method).toBe('GET');
|
||||
expect(res.url).toBe(urlExpected);
|
||||
|
||||
expect(templateSrvStub.replace.mock.calls[0][1]).toEqual({
|
||||
expect(replaceMock.mock.calls[0][1]).toEqual({
|
||||
__interval: {
|
||||
text: '5s',
|
||||
value: '5s',
|
||||
@@ -1687,18 +1685,18 @@ describe('PrometheusDatasource2', () => {
|
||||
},
|
||||
],
|
||||
interval: '60s',
|
||||
};
|
||||
} as DataQueryRequest<PromQuery>;
|
||||
const urlExpected = `proxied/api/v1/query_range?query=${encodeURIComponent(
|
||||
query.targets[0].expr
|
||||
)}&start=0&end=3600&step=60`;
|
||||
|
||||
templateSrvStub.replace = jest.fn((str) => str) as any;
|
||||
replaceMock.mockImplementation((str) => str);
|
||||
fetchMock.mockImplementation(() => of(response));
|
||||
ds.query(query as any);
|
||||
ds.query(query);
|
||||
const res = fetchMock.mock.calls[0][0];
|
||||
expect(res.url).toBe(urlExpected);
|
||||
|
||||
expect(templateSrvStub.replace.mock.calls[1][1]).toEqual({
|
||||
expect(replaceMock.mock.calls[1][1]).toEqual({
|
||||
__range_s: {
|
||||
text: expectedRangeSecond,
|
||||
value: expectedRangeSecond,
|
||||
@@ -1723,46 +1721,46 @@ describe('PrometheusDatasource2', () => {
|
||||
const target = { expr: 'rate(process_cpu_seconds_total[$__rate_interval])', refId: 'A' };
|
||||
|
||||
beforeEach(() => {
|
||||
templateSrvStub.replace.mockClear();
|
||||
replaceMock.mockClear();
|
||||
});
|
||||
|
||||
it('should be 4 times the scrape interval if interval + scrape interval is lower', () => {
|
||||
ds.createQuery(target, { interval: '15s' } as any, 0, 300);
|
||||
expect(templateSrvStub.replace.mock.calls[1][1]['__rate_interval'].value).toBe('60s');
|
||||
ds.createQuery(target, { interval: '15s' } as DataQueryRequest<PromQuery>, 0, 300);
|
||||
expect(replaceMock.mock.calls[1][1]['__rate_interval'].value).toBe('60s');
|
||||
});
|
||||
it('should be interval + scrape interval if 4 times the scrape interval is lower', () => {
|
||||
ds.createQuery(target, { interval: '5m' } as any, 0, 10080);
|
||||
expect(templateSrvStub.replace.mock.calls[1][1]['__rate_interval'].value).toBe('315s');
|
||||
ds.createQuery(target, { interval: '5m' } as DataQueryRequest<PromQuery>, 0, 10080);
|
||||
expect(replaceMock.mock.calls[1][1]['__rate_interval'].value).toBe('315s');
|
||||
});
|
||||
it('should fall back to a scrape interval of 15s if min step is set to 0, resulting in 4*15s = 60s', () => {
|
||||
ds.createQuery({ ...target, interval: '' }, { interval: '15s' } as any, 0, 300);
|
||||
expect(templateSrvStub.replace.mock.calls[1][1]['__rate_interval'].value).toBe('60s');
|
||||
ds.createQuery({ ...target, interval: '' }, { interval: '15s' } as DataQueryRequest<PromQuery>, 0, 300);
|
||||
expect(replaceMock.mock.calls[1][1]['__rate_interval'].value).toBe('60s');
|
||||
});
|
||||
it('should be 4 times the scrape interval if min step set to 1m and interval is 15s', () => {
|
||||
// For a 5m graph, $__interval is 15s
|
||||
ds.createQuery({ ...target, interval: '1m' }, { interval: '15s' } as any, 0, 300);
|
||||
expect(templateSrvStub.replace.mock.calls[2][1]['__rate_interval'].value).toBe('240s');
|
||||
ds.createQuery({ ...target, interval: '1m' }, { interval: '15s' } as DataQueryRequest<PromQuery>, 0, 300);
|
||||
expect(replaceMock.mock.calls[2][1]['__rate_interval'].value).toBe('240s');
|
||||
});
|
||||
it('should be interval + scrape interval if min step set to 1m and interval is 5m', () => {
|
||||
// For a 7d graph, $__interval is 5m
|
||||
ds.createQuery({ ...target, interval: '1m' }, { interval: '5m' } as any, 0, 10080);
|
||||
expect(templateSrvStub.replace.mock.calls[2][1]['__rate_interval'].value).toBe('360s');
|
||||
ds.createQuery({ ...target, interval: '1m' }, { interval: '5m' } as DataQueryRequest<PromQuery>, 0, 10080);
|
||||
expect(replaceMock.mock.calls[2][1]['__rate_interval'].value).toBe('360s');
|
||||
});
|
||||
it('should be interval + scrape interval if resolution is set to 1/2 and interval is 10m', () => {
|
||||
// For a 7d graph, $__interval is 10m
|
||||
ds.createQuery({ ...target, intervalFactor: 2 }, { interval: '10m' } as any, 0, 10080);
|
||||
expect(templateSrvStub.replace.mock.calls[1][1]['__rate_interval'].value).toBe('1215s');
|
||||
ds.createQuery({ ...target, intervalFactor: 2 }, { interval: '10m' } as DataQueryRequest<PromQuery>, 0, 10080);
|
||||
expect(replaceMock.mock.calls[1][1]['__rate_interval'].value).toBe('1215s');
|
||||
});
|
||||
it('should be 4 times the scrape interval if resolution is set to 1/2 and interval is 15s', () => {
|
||||
// For a 5m graph, $__interval is 15s
|
||||
ds.createQuery({ ...target, intervalFactor: 2 }, { interval: '15s' } as any, 0, 300);
|
||||
expect(templateSrvStub.replace.mock.calls[1][1]['__rate_interval'].value).toBe('60s');
|
||||
ds.createQuery({ ...target, intervalFactor: 2 }, { interval: '15s' } as DataQueryRequest<PromQuery>, 0, 300);
|
||||
expect(replaceMock.mock.calls[1][1]['__rate_interval'].value).toBe('60s');
|
||||
});
|
||||
it('should interpolate min step if set', () => {
|
||||
templateSrvStub.replace = jest.fn((_: string) => '15s');
|
||||
ds.createQuery({ ...target, interval: '$int' }, { interval: '15s' } as any, 0, 300);
|
||||
expect(templateSrvStub.replace.mock.calls).toHaveLength(3);
|
||||
templateSrvStub.replace = jest.fn((a: string) => a);
|
||||
replaceMock.mockImplementation((_: string) => '15s');
|
||||
ds.createQuery({ ...target, interval: '$int' }, { interval: '15s' } as DataQueryRequest<PromQuery>, 0, 300);
|
||||
expect(replaceMock.mock.calls).toHaveLength(3);
|
||||
replaceMock.mockImplementation((str) => str);
|
||||
});
|
||||
});
|
||||
|
||||
@@ -1780,7 +1778,7 @@ describe('PrometheusDatasource2', () => {
|
||||
|
||||
ds.languageProvider = {
|
||||
histogramMetrics: ['tns_request_duration_seconds_bucket'],
|
||||
} as any;
|
||||
} as PromQlLanguageProvider;
|
||||
|
||||
const request = {
|
||||
targets: [targetA, targetB],
|
||||
@@ -1807,11 +1805,11 @@ describe('PrometheusDatasource for POST', () => {
|
||||
|
||||
let ds: PrometheusDatasource;
|
||||
beforeEach(() => {
|
||||
ds = new PrometheusDatasource(instanceSettings, templateSrvStub as any, timeSrvStub as any);
|
||||
ds = new PrometheusDatasource(instanceSettings, templateSrvStub, timeSrvStub);
|
||||
});
|
||||
|
||||
describe('When querying prometheus with one target using query editor target spec', () => {
|
||||
let results: any;
|
||||
let results: DataQueryResponse;
|
||||
const urlExpected = 'proxied/api/v1/query_range';
|
||||
const dataExpected = {
|
||||
query: 'test{job="testjob"}',
|
||||
@@ -1823,7 +1821,7 @@ describe('PrometheusDatasource for POST', () => {
|
||||
range: { from: time({ minutes: 1, seconds: 3 }), to: time({ minutes: 2, seconds: 3 }) },
|
||||
targets: [{ expr: 'test{job="testjob"}', format: 'time_series' }],
|
||||
interval: '60s',
|
||||
};
|
||||
} as DataQueryRequest<PromQuery>;
|
||||
|
||||
beforeEach(async () => {
|
||||
const response = {
|
||||
@@ -1841,7 +1839,7 @@ describe('PrometheusDatasource for POST', () => {
|
||||
},
|
||||
};
|
||||
fetchMock.mockImplementation(() => of(response));
|
||||
ds.query(query as any).subscribe((data: any) => {
|
||||
ds.query(query).subscribe((data) => {
|
||||
results = data;
|
||||
});
|
||||
});
|
||||
@@ -1861,10 +1859,10 @@ describe('PrometheusDatasource for POST', () => {
|
||||
});
|
||||
|
||||
describe('When querying prometheus via check headers X-Dashboard-Id X-Panel-Id and X-Dashboard-UID', () => {
|
||||
const options = { dashboardId: 1, panelId: 2, dashboardUID: 'WFlOM-jM1' };
|
||||
const options = { dashboardId: 1, panelId: 2, dashboardUID: 'WFlOM-jM1' } as DataQueryRequest<PromQuery>;
|
||||
const httpOptions = {
|
||||
headers: {} as { [key: string]: number | undefined },
|
||||
};
|
||||
} as PromQueryRequest;
|
||||
const instanceSettings = {
|
||||
url: 'proxied',
|
||||
directUrl: 'direct',
|
||||
@@ -1884,7 +1882,7 @@ describe('PrometheusDatasource for POST', () => {
|
||||
});
|
||||
|
||||
it('with proxy access tracing headers should be added', () => {
|
||||
ds._addTracingHeaders(httpOptions as any, options as any);
|
||||
ds._addTracingHeaders(httpOptions, options);
|
||||
expect(httpOptions.headers['X-Dashboard-Id']).toBe(options.dashboardId);
|
||||
expect(httpOptions.headers['X-Panel-Id']).toBe(options.panelId);
|
||||
expect(httpOptions.headers['X-Dashboard-UID']).toBe(options.dashboardUID);
|
||||
@@ -1901,10 +1899,10 @@ describe('PrometheusDatasource for POST', () => {
|
||||
|
||||
const mockDs = new PrometheusDatasource(
|
||||
{ ...instanceSettings, url: 'http://127.0.0.1:8000' },
|
||||
templateSrvStub as any,
|
||||
timeSrvStub as any
|
||||
templateSrvStub,
|
||||
timeSrvStub
|
||||
);
|
||||
mockDs._addTracingHeaders(httpOptions as any, options as any);
|
||||
mockDs._addTracingHeaders(httpOptions, options);
|
||||
expect(httpOptions.headers['X-Dashboard-Id']).toBe(undefined);
|
||||
expect(httpOptions.headers['X-Panel-Id']).toBe(undefined);
|
||||
expect(httpOptions.headers['X-Dashboard-UID']).toBe(undefined);
|
||||
@@ -1921,7 +1919,7 @@ function getPrepareTargetsContext({
|
||||
targets: PromQuery[];
|
||||
app?: CoreApp;
|
||||
queryOptions?: Partial<QueryOptions>;
|
||||
languageProvider?: any;
|
||||
languageProvider?: PromQlLanguageProvider;
|
||||
}) {
|
||||
const instanceSettings = {
|
||||
url: 'proxied',
|
||||
@@ -1942,7 +1940,7 @@ function getPrepareTargetsContext({
|
||||
...queryOptions,
|
||||
} as unknown as DataQueryRequest<PromQuery>;
|
||||
|
||||
const ds = new PrometheusDatasource(instanceSettings, templateSrvStub as any, timeSrvStub as any);
|
||||
const ds = new PrometheusDatasource(instanceSettings, templateSrvStub, timeSrvStub);
|
||||
if (languageProvider) {
|
||||
ds.languageProvider = languageProvider;
|
||||
}
|
||||
@@ -2004,7 +2002,7 @@ describe('prepareTargets', () => {
|
||||
targets: [targetA, targetB],
|
||||
languageProvider: {
|
||||
histogramMetrics: ['tns_request_duration_seconds_bucket'],
|
||||
},
|
||||
} as PromQlLanguageProvider,
|
||||
});
|
||||
expect(queries).toHaveLength(3);
|
||||
expect(activeTargets).toHaveLength(3);
|
||||
@@ -2026,7 +2024,7 @@ describe('prepareTargets', () => {
|
||||
targets: [targetA, targetB],
|
||||
languageProvider: {
|
||||
histogramMetrics: ['tns_request_duration_seconds_bucket'],
|
||||
},
|
||||
} as PromQlLanguageProvider,
|
||||
});
|
||||
expect(queries).toHaveLength(4);
|
||||
expect(activeTargets).toHaveLength(4);
|
||||
@@ -2083,7 +2081,7 @@ describe('prepareTargets', () => {
|
||||
app: CoreApp.Explore,
|
||||
languageProvider: {
|
||||
histogramMetrics: ['tns_request_duration_seconds_bucket'],
|
||||
},
|
||||
} as PromQlLanguageProvider,
|
||||
});
|
||||
expect(queries).toHaveLength(6);
|
||||
expect(activeTargets).toHaveLength(6);
|
||||
@@ -2110,7 +2108,7 @@ describe('prepareTargets', () => {
|
||||
app: CoreApp.Explore,
|
||||
languageProvider: {
|
||||
histogramMetrics: ['tns_request_duration_seconds_bucket'],
|
||||
},
|
||||
} as PromQlLanguageProvider,
|
||||
});
|
||||
expect(queries).toHaveLength(5);
|
||||
expect(activeTargets).toHaveLength(5);
|
||||
@@ -2259,7 +2257,7 @@ describe('modifyQuery', () => {
|
||||
const query: PromQuery = { refId: 'A', expr: 'go_goroutines' };
|
||||
const action = { options: { key: 'cluster', value: 'us-cluster' }, type: 'ADD_FILTER' };
|
||||
const instanceSettings = { jsonData: {} } as unknown as DataSourceInstanceSettings<PromOptions>;
|
||||
const ds = new PrometheusDatasource(instanceSettings, templateSrvStub as any, timeSrvStub as any);
|
||||
const ds = new PrometheusDatasource(instanceSettings, templateSrvStub, timeSrvStub);
|
||||
|
||||
const result = ds.modifyQuery(query, action);
|
||||
|
||||
@@ -2273,7 +2271,7 @@ describe('modifyQuery', () => {
|
||||
const query: PromQuery = { refId: 'A', expr: 'go_goroutines{cluster="us-cluster"}' };
|
||||
const action = { options: { key: 'pod', value: 'pod-123' }, type: 'ADD_FILTER' };
|
||||
const instanceSettings = { jsonData: {} } as unknown as DataSourceInstanceSettings<PromOptions>;
|
||||
const ds = new PrometheusDatasource(instanceSettings, templateSrvStub as any, timeSrvStub as any);
|
||||
const ds = new PrometheusDatasource(instanceSettings, templateSrvStub, timeSrvStub);
|
||||
|
||||
const result = ds.modifyQuery(query, action);
|
||||
|
||||
@@ -2289,7 +2287,7 @@ describe('modifyQuery', () => {
|
||||
const query: PromQuery = { refId: 'A', expr: 'go_goroutines' };
|
||||
const action = { options: { key: 'cluster', value: 'us-cluster' }, type: 'ADD_FILTER_OUT' };
|
||||
const instanceSettings = { jsonData: {} } as unknown as DataSourceInstanceSettings<PromOptions>;
|
||||
const ds = new PrometheusDatasource(instanceSettings, templateSrvStub as any, timeSrvStub as any);
|
||||
const ds = new PrometheusDatasource(instanceSettings, templateSrvStub, timeSrvStub);
|
||||
|
||||
const result = ds.modifyQuery(query, action);
|
||||
|
||||
@@ -2303,7 +2301,7 @@ describe('modifyQuery', () => {
|
||||
const query: PromQuery = { refId: 'A', expr: 'go_goroutines{cluster="us-cluster"}' };
|
||||
const action = { options: { key: 'pod', value: 'pod-123' }, type: 'ADD_FILTER_OUT' };
|
||||
const instanceSettings = { jsonData: {} } as unknown as DataSourceInstanceSettings<PromOptions>;
|
||||
const ds = new PrometheusDatasource(instanceSettings, templateSrvStub as any, timeSrvStub as any);
|
||||
const ds = new PrometheusDatasource(instanceSettings, templateSrvStub, timeSrvStub);
|
||||
|
||||
const result = ds.modifyQuery(query, action);
|
||||
|
||||
|
||||
@@ -6,6 +6,7 @@ import { QueryEditor, Props } from './QueryEditor';
|
||||
import { scenarios } from './__mocks__/scenarios';
|
||||
import { defaultQuery } from './constants';
|
||||
import { TestDataQueryType } from './dataquery.gen';
|
||||
import { TestDataDataSource } from './datasource';
|
||||
import { defaultStreamQuery } from './runStreams';
|
||||
|
||||
beforeEach(() => {
|
||||
@@ -19,7 +20,7 @@ const props = {
|
||||
onChange: mockOnChange,
|
||||
datasource: {
|
||||
getScenarios: () => Promise.resolve(scenarios),
|
||||
} as any,
|
||||
} as TestDataDataSource,
|
||||
};
|
||||
|
||||
const setup = (testProps?: Partial<Props>) => {
|
||||
|
||||
@@ -5,7 +5,12 @@ import { InlineField, InlineFieldRow, Input } from '@grafana/ui';
|
||||
import { EditorProps } from '../QueryEditor';
|
||||
import { PulseWaveQuery } from '../dataquery.gen';
|
||||
|
||||
const fields = [
|
||||
const fields: Array<{
|
||||
label: string;
|
||||
id: keyof PulseWaveQuery;
|
||||
placeholder: string;
|
||||
tooltip: string;
|
||||
}> = [
|
||||
{ label: 'Step', id: 'timeStep', placeholder: '60', tooltip: 'The number of seconds between datapoints.' },
|
||||
{
|
||||
label: 'On Count',
|
||||
@@ -46,7 +51,7 @@ export const PredictablePulseEditor = ({ onChange, query }: EditorProps) => {
|
||||
type="number"
|
||||
name={id}
|
||||
id={`pulseWave.${id}-${query.refId}`}
|
||||
value={query.pulseWave?.[id as keyof PulseWaveQuery]}
|
||||
value={query.pulseWave?.[id]}
|
||||
placeholder={placeholder}
|
||||
onChange={onInputChange}
|
||||
/>
|
||||
|
||||
@@ -21,7 +21,7 @@ export const RawFrameEditor = ({ onChange, query }: EditorProps) => {
|
||||
return;
|
||||
}
|
||||
|
||||
let data: any = undefined;
|
||||
let data = undefined;
|
||||
|
||||
// Copy paste from panel json
|
||||
if (isArray(json.series) && json.state) {
|
||||
|
||||
@@ -31,7 +31,7 @@ export const SimulationQueryEditor = ({ onChange, query, ds }: EditorProps) => {
|
||||
|
||||
// This only changes once
|
||||
const info = useAsync(async () => {
|
||||
const v = (await ds.getResource('sims')) as SimInfo[];
|
||||
const v = await ds.getResource<SimInfo[]>('sims');
|
||||
return {
|
||||
sims: v,
|
||||
options: v.map((s) => ({ label: s.name, value: s.type, description: s.description })),
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import { CSVWave, TestData, TestDataQueryType } from './dataquery.gen';
|
||||
import { CSVWave, PulseWaveQuery, TestData, TestDataQueryType } from './dataquery.gen';
|
||||
|
||||
export const defaultPulseQuery: any = {
|
||||
export const defaultPulseQuery: PulseWaveQuery = {
|
||||
timeStep: 60,
|
||||
onCount: 3,
|
||||
onValue: 2,
|
||||
|
||||
@@ -160,7 +160,7 @@ function makeRandomNode(index: number) {
|
||||
};
|
||||
}
|
||||
|
||||
export function savedNodesResponse(): any {
|
||||
export function savedNodesResponse() {
|
||||
return [new MutableDataFrame(nodes), new MutableDataFrame(edges)];
|
||||
}
|
||||
|
||||
|
||||
@@ -67,7 +67,7 @@ export function runSignalStream(
|
||||
const frame = StreamingDataFrame.fromDataFrameJSON({ schema }, { maxLength: maxDataPoints });
|
||||
|
||||
let value = Math.random() * 100;
|
||||
let timeoutId: any = null;
|
||||
let timeoutId: ReturnType<typeof setTimeout>;
|
||||
let lastSent = -1;
|
||||
|
||||
const addNextRow = (time: number) => {
|
||||
@@ -148,7 +148,7 @@ export function runLogsStream(
|
||||
|
||||
const { speed } = query;
|
||||
|
||||
let timeoutId: any = null;
|
||||
let timeoutId: ReturnType<typeof setTimeout>;
|
||||
|
||||
const pushNextEvent = () => {
|
||||
data.fields[0].values.add(Date.now());
|
||||
@@ -205,7 +205,7 @@ export function runFetchStream(
|
||||
data.addField(field);
|
||||
}
|
||||
},
|
||||
onRow: (row: any[]) => {
|
||||
onRow: (row) => {
|
||||
data.add(row);
|
||||
},
|
||||
},
|
||||
|
||||
@@ -48,10 +48,10 @@ export interface BarsOptions {
|
||||
showValue: VisibilityMode;
|
||||
stacking: StackingMode;
|
||||
rawValue: (seriesIdx: number, valueIdx: number) => number | null;
|
||||
getColor?: (seriesIdx: number, valueIdx: number, value: any) => string | null;
|
||||
getColor?: (seriesIdx: number, valueIdx: number, value: unknown) => string | null;
|
||||
fillOpacity?: number;
|
||||
formatValue: (seriesIdx: number, value: any) => string;
|
||||
formatShortValue: (seriesIdx: number, value: any) => string;
|
||||
formatValue: (seriesIdx: number, value: unknown) => string;
|
||||
formatShortValue: (seriesIdx: number, value: unknown) => string;
|
||||
timeZone?: TimeZone;
|
||||
text?: VizTextDisplayOptions;
|
||||
onHover?: (seriesIdx: number, valueIdx: number) => void;
|
||||
|
||||
@@ -108,7 +108,7 @@ export const plugin = new PanelPlugin<PanelOptions, PanelFieldConfig>(BarChartPa
|
||||
},
|
||||
})
|
||||
.setPanelOptions((builder, context) => {
|
||||
const disp = prepareBarChartDisplayValues(context.data, config.theme2, context.options ?? ({} as any));
|
||||
const disp = prepareBarChartDisplayValues(context.data, config.theme2, context.options ?? ({} as PanelOptions));
|
||||
let xaxisPlaceholder = 'First string or time field';
|
||||
const viz = 'viz' in disp ? disp.viz[0] : undefined;
|
||||
if (viz?.fields?.length) {
|
||||
|
||||
@@ -55,7 +55,7 @@ function getBarCharScaleOrientation(orientation: VizOrientation) {
|
||||
|
||||
export interface BarChartOptionsEX extends PanelOptions {
|
||||
rawValue: (seriesIdx: number, valueIdx: number) => number | null;
|
||||
getColor?: (seriesIdx: number, valueIdx: number, value: any) => string | null;
|
||||
getColor?: (seriesIdx: number, valueIdx: number, value: unknown) => string | null;
|
||||
timeZone?: TimeZone;
|
||||
fillOpacity?: number;
|
||||
}
|
||||
@@ -83,11 +83,11 @@ export const preparePlotConfigBuilder: UPlotConfigPrepFn<BarChartOptionsEX> = ({
|
||||
}) => {
|
||||
const builder = new UPlotConfigBuilder();
|
||||
|
||||
const formatValue = (seriesIdx: number, value: any) => {
|
||||
const formatValue = (seriesIdx: number, value: unknown) => {
|
||||
return formattedValueToString(frame.fields[seriesIdx].display!(value));
|
||||
};
|
||||
|
||||
const formatShortValue = (seriesIdx: number, value: any) => {
|
||||
const formatShortValue = (seriesIdx: number, value: unknown) => {
|
||||
return shortenValue(formatValue(seriesIdx, value), xTickLabelMaxLength);
|
||||
};
|
||||
|
||||
|
||||
@@ -59,7 +59,11 @@ export class RenderInfoViewer extends Component<Props> {
|
||||
|
||||
render() {
|
||||
const { data, options } = this.props;
|
||||
const showCounters = options.counters ?? ({} as UpdateConfig);
|
||||
const showCounters = options.counters ?? {
|
||||
render: false,
|
||||
dataChanged: false,
|
||||
schemaChanged: false,
|
||||
};
|
||||
this.counters.render++;
|
||||
const now = Date.now();
|
||||
const elapsed = now - this.lastRender;
|
||||
|
||||
@@ -10,12 +10,12 @@ export const gaugePanelMigrationHandler = (panel: PanelModel<PanelOptions>): Par
|
||||
|
||||
// This is called when the panel changes from another panel
|
||||
export const gaugePanelChangedHandler = (
|
||||
panel: PanelModel<Partial<PanelOptions>> | any,
|
||||
panel: PanelModel<Partial<PanelOptions>>,
|
||||
prevPluginId: string,
|
||||
prevOptions: any
|
||||
) => {
|
||||
// This handles most config changes
|
||||
const opts = sharedSingleStatPanelChangedHandler(panel, prevPluginId, prevOptions) as PanelOptions;
|
||||
const opts: PanelOptions = sharedSingleStatPanelChangedHandler(panel, prevPluginId, prevOptions);
|
||||
|
||||
// Changing from angular singlestat
|
||||
if (prevPluginId === 'singlestat' && prevOptions.angular) {
|
||||
|
||||
@@ -11,7 +11,7 @@ import { getDashboardSrv } from 'app/features/dashboard/services/DashboardSrv';
|
||||
|
||||
import { Step } from './components/Step';
|
||||
import { getSteps } from './steps';
|
||||
import { Card, SetupStep } from './types';
|
||||
import { SetupStep } from './types';
|
||||
|
||||
interface State {
|
||||
checksDone: boolean;
|
||||
@@ -30,7 +30,7 @@ export class GettingStarted extends PureComponent<PanelProps, State> {
|
||||
const { steps } = this.state;
|
||||
|
||||
const checkedStepsPromises: Array<Promise<SetupStep>> = steps.map(async (step: SetupStep) => {
|
||||
const checkedCardsPromises: Array<Promise<Card>> = step.cards.map((card: Card) => {
|
||||
const checkedCardsPromises = step.cards.map(async (card) => {
|
||||
return card.check().then((passed) => {
|
||||
return { ...card, done: passed };
|
||||
});
|
||||
|
||||
@@ -4,7 +4,7 @@ import React from 'react';
|
||||
import { GrafanaTheme2 } from '@grafana/data';
|
||||
import { useStyles2 } from '@grafana/ui';
|
||||
|
||||
import { SetupStep, TutorialCardType } from '../types';
|
||||
import { SetupStep } from '../types';
|
||||
|
||||
import { DocsCard } from './DocsCard';
|
||||
import { TutorialCard } from './TutorialCard';
|
||||
@@ -26,7 +26,7 @@ export const Step = ({ step }: Props) => {
|
||||
{step.cards.map((card, index) => {
|
||||
const key = `${card.title}-${index}`;
|
||||
if (card.type === 'tutorial') {
|
||||
return <TutorialCard key={key} card={card as TutorialCardType} />;
|
||||
return <TutorialCard key={key} card={card} />;
|
||||
}
|
||||
return <DocsCard key={key} card={card} />;
|
||||
})}
|
||||
|
||||
@@ -1,10 +1,8 @@
|
||||
import { IconName } from '@grafana/ui';
|
||||
|
||||
export type CardType = 'tutorial' | 'docs' | 'other';
|
||||
|
||||
export interface Card {
|
||||
title: string;
|
||||
type: CardType;
|
||||
type: 'docs' | 'other';
|
||||
icon: IconName;
|
||||
href: string;
|
||||
check: () => Promise<boolean>;
|
||||
@@ -13,8 +11,9 @@ export interface Card {
|
||||
learnHref?: string;
|
||||
}
|
||||
|
||||
export interface TutorialCardType extends Card {
|
||||
export interface TutorialCardType extends Omit<Card, 'type'> {
|
||||
info?: string;
|
||||
type: 'tutorial';
|
||||
// For local storage
|
||||
key: string;
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user