mirror of
https://github.com/grafana/grafana.git
synced 2025-02-25 18:55:37 -06:00
Loki Query Splitting: Rename from "chunk" to "splitting" (#65630)
* Update chunking mentions in the code and ui * Rename files and exported functions * Rename configuration attribute * Rename grouped querying function name * Update more function names * Update unit test * Update unit tests * More renames * Rename time splitting functions
This commit is contained in:
parent
c70f700b55
commit
663ed7ba82
@ -77,7 +77,7 @@ enum PluginRequestHeaders {
|
|||||||
DatasourceUID = 'X-Datasource-Uid', // can be used for routing/ load balancing
|
DatasourceUID = 'X-Datasource-Uid', // can be used for routing/ load balancing
|
||||||
DashboardUID = 'X-Dashboard-Uid', // mainly useful for debuging slow queries
|
DashboardUID = 'X-Dashboard-Uid', // mainly useful for debuging slow queries
|
||||||
PanelID = 'X-Panel-Id', // mainly useful for debuging slow queries
|
PanelID = 'X-Panel-Id', // mainly useful for debuging slow queries
|
||||||
QueryGroupID = 'X-Query-Group-Id', // mainly useful to find related queries with query chunking
|
QueryGroupID = 'X-Query-Group-Id', // mainly useful to find related queries with query splitting
|
||||||
FromExpression = 'X-Grafana-From-Expr', // used by datasources to identify expression queries
|
FromExpression = 'X-Grafana-From-Expr', // used by datasources to identify expression queries
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -33,7 +33,7 @@ import { CustomVariableModel } from '../../../features/variables/types';
|
|||||||
|
|
||||||
import { LokiDatasource, REF_ID_DATA_SAMPLES } from './datasource';
|
import { LokiDatasource, REF_ID_DATA_SAMPLES } from './datasource';
|
||||||
import { createLokiDatasource, createMetadataRequest } from './mocks';
|
import { createLokiDatasource, createMetadataRequest } from './mocks';
|
||||||
import { runQueryInChunks } from './queryChunking';
|
import { runSplitQuery } from './querySplitting';
|
||||||
import { parseToNodeNamesArray } from './queryUtils';
|
import { parseToNodeNamesArray } from './queryUtils';
|
||||||
import { LokiOptions, LokiQuery, LokiQueryType, LokiVariableQueryType, SupportingQueryType } from './types';
|
import { LokiOptions, LokiQuery, LokiQueryType, LokiVariableQueryType, SupportingQueryType } from './types';
|
||||||
import { LokiVariableSupport } from './variables';
|
import { LokiVariableSupport } from './variables';
|
||||||
@ -45,7 +45,7 @@ jest.mock('@grafana/runtime', () => {
|
|||||||
};
|
};
|
||||||
});
|
});
|
||||||
|
|
||||||
jest.mock('./queryChunking');
|
jest.mock('./querySplitting');
|
||||||
|
|
||||||
const templateSrvStub = {
|
const templateSrvStub = {
|
||||||
getAdhocFilters: jest.fn(() => [] as unknown[]),
|
getAdhocFilters: jest.fn(() => [] as unknown[]),
|
||||||
@ -1127,7 +1127,7 @@ describe('LokiDatasource', () => {
|
|||||||
describe('Query splitting', () => {
|
describe('Query splitting', () => {
|
||||||
beforeAll(() => {
|
beforeAll(() => {
|
||||||
config.featureToggles.lokiQuerySplitting = true;
|
config.featureToggles.lokiQuerySplitting = true;
|
||||||
jest.mocked(runQueryInChunks).mockReturnValue(
|
jest.mocked(runSplitQuery).mockReturnValue(
|
||||||
of({
|
of({
|
||||||
data: [],
|
data: [],
|
||||||
})
|
})
|
||||||
@ -1153,7 +1153,7 @@ describe('LokiDatasource', () => {
|
|||||||
});
|
});
|
||||||
|
|
||||||
await expect(ds.query(query)).toEmitValuesWith(() => {
|
await expect(ds.query(query)).toEmitValuesWith(() => {
|
||||||
expect(runQueryInChunks).toHaveBeenCalled();
|
expect(runSplitQuery).toHaveBeenCalled();
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
@ -66,8 +66,8 @@ import {
|
|||||||
findLastPosition,
|
findLastPosition,
|
||||||
getLabelFilterPositions,
|
getLabelFilterPositions,
|
||||||
} from './modifyQuery';
|
} from './modifyQuery';
|
||||||
import { runQueryInChunks } from './queryChunking';
|
|
||||||
import { getQueryHints } from './queryHints';
|
import { getQueryHints } from './queryHints';
|
||||||
|
import { runSplitQuery } from './querySplitting';
|
||||||
import {
|
import {
|
||||||
getLogQueryFromMetricsQuery,
|
getLogQueryFromMetricsQuery,
|
||||||
getNormalizedLokiQuery,
|
getNormalizedLokiQuery,
|
||||||
@ -75,7 +75,7 @@ import {
|
|||||||
getParserFromQuery,
|
getParserFromQuery,
|
||||||
isLogsQuery,
|
isLogsQuery,
|
||||||
isValidQuery,
|
isValidQuery,
|
||||||
requestSupporsChunking,
|
requestSupportsSplitting,
|
||||||
} from './queryUtils';
|
} from './queryUtils';
|
||||||
import { sortDataFrameByTime, SortDirection } from './sortDataFrame';
|
import { sortDataFrameByTime, SortDirection } from './sortDataFrame';
|
||||||
import { doLokiChannelStream } from './streaming';
|
import { doLokiChannelStream } from './streaming';
|
||||||
@ -284,8 +284,8 @@ export class LokiDatasource
|
|||||||
return this.runLiveQueryThroughBackend(fixedRequest);
|
return this.runLiveQueryThroughBackend(fixedRequest);
|
||||||
}
|
}
|
||||||
|
|
||||||
if (config.featureToggles.lokiQuerySplitting && requestSupporsChunking(fixedRequest.targets)) {
|
if (config.featureToggles.lokiQuerySplitting && requestSupportsSplitting(fixedRequest.targets)) {
|
||||||
return runQueryInChunks(this, fixedRequest);
|
return runSplitQuery(this, fixedRequest);
|
||||||
}
|
}
|
||||||
|
|
||||||
return this.runQuery(fixedRequest);
|
return this.runQuery(fixedRequest);
|
||||||
|
@ -1,11 +1,11 @@
|
|||||||
import { getRangeChunks } from './logsTimeChunking';
|
import { splitTimeRange } from './logsTimeSplitting';
|
||||||
|
|
||||||
describe('logs getRangeChunks', () => {
|
describe('logs splitTimeRange', () => {
|
||||||
it('should split time range into chunks', () => {
|
it('should split time range into chunks', () => {
|
||||||
const start = Date.parse('2022-02-06T14:10:03.234');
|
const start = Date.parse('2022-02-06T14:10:03.234');
|
||||||
const end = Date.parse('2022-02-06T14:11:03.567');
|
const end = Date.parse('2022-02-06T14:11:03.567');
|
||||||
|
|
||||||
expect(getRangeChunks(start, end, 10000)).toStrictEqual([
|
expect(splitTimeRange(start, end, 10000)).toStrictEqual([
|
||||||
[Date.parse('2022-02-06T14:10:03.234'), Date.parse('2022-02-06T14:10:03.567')],
|
[Date.parse('2022-02-06T14:10:03.234'), Date.parse('2022-02-06T14:10:03.567')],
|
||||||
[Date.parse('2022-02-06T14:10:03.567'), Date.parse('2022-02-06T14:10:13.567')],
|
[Date.parse('2022-02-06T14:10:03.567'), Date.parse('2022-02-06T14:10:13.567')],
|
||||||
[Date.parse('2022-02-06T14:10:13.567'), Date.parse('2022-02-06T14:10:23.567')],
|
[Date.parse('2022-02-06T14:10:13.567'), Date.parse('2022-02-06T14:10:23.567')],
|
||||||
@ -20,7 +20,7 @@ describe('logs getRangeChunks', () => {
|
|||||||
const start = Date.parse('2022-02-06T14:10:03.567');
|
const start = Date.parse('2022-02-06T14:10:03.567');
|
||||||
const end = Date.parse('2022-02-06T14:11:03.567');
|
const end = Date.parse('2022-02-06T14:11:03.567');
|
||||||
|
|
||||||
expect(getRangeChunks(start, end, 20000)).toStrictEqual([
|
expect(splitTimeRange(start, end, 20000)).toStrictEqual([
|
||||||
[Date.parse('2022-02-06T14:10:03.567'), Date.parse('2022-02-06T14:10:23.567')],
|
[Date.parse('2022-02-06T14:10:03.567'), Date.parse('2022-02-06T14:10:23.567')],
|
||||||
[Date.parse('2022-02-06T14:10:23.567'), Date.parse('2022-02-06T14:10:43.567')],
|
[Date.parse('2022-02-06T14:10:23.567'), Date.parse('2022-02-06T14:10:43.567')],
|
||||||
[Date.parse('2022-02-06T14:10:43.567'), Date.parse('2022-02-06T14:11:03.567')],
|
[Date.parse('2022-02-06T14:10:43.567'), Date.parse('2022-02-06T14:11:03.567')],
|
@ -16,7 +16,7 @@
|
|||||||
// to the end, so if we do it right in milliseconds, it should be OK in
|
// to the end, so if we do it right in milliseconds, it should be OK in
|
||||||
// nanoseconds too
|
// nanoseconds too
|
||||||
|
|
||||||
export function getRangeChunks(
|
export function splitTimeRange(
|
||||||
startTime: number,
|
startTime: number,
|
||||||
endTime: number,
|
endTime: number,
|
||||||
idealRangeDuration: number
|
idealRangeDuration: number
|
@ -1,12 +1,12 @@
|
|||||||
import { getRangeChunks } from './metricTimeChunking';
|
import { splitTimeRange } from './metricTimeSplitting';
|
||||||
|
|
||||||
describe('metric getRangeChunks', () => {
|
describe('metric splitTimeRange', () => {
|
||||||
it('should split time range into chunks', () => {
|
it('should split time range into chunks', () => {
|
||||||
const start = Date.parse('2022-02-06T14:10:03');
|
const start = Date.parse('2022-02-06T14:10:03');
|
||||||
const end = Date.parse('2022-02-06T14:11:03');
|
const end = Date.parse('2022-02-06T14:11:03');
|
||||||
const step = 10 * 1000;
|
const step = 10 * 1000;
|
||||||
|
|
||||||
expect(getRangeChunks(start, end, step, 25000)).toStrictEqual([
|
expect(splitTimeRange(start, end, step, 25000)).toStrictEqual([
|
||||||
[Date.parse('2022-02-06T14:10:00'), Date.parse('2022-02-06T14:10:10')],
|
[Date.parse('2022-02-06T14:10:00'), Date.parse('2022-02-06T14:10:10')],
|
||||||
[Date.parse('2022-02-06T14:10:20'), Date.parse('2022-02-06T14:10:40')],
|
[Date.parse('2022-02-06T14:10:20'), Date.parse('2022-02-06T14:10:40')],
|
||||||
[Date.parse('2022-02-06T14:10:50'), Date.parse('2022-02-06T14:11:10')],
|
[Date.parse('2022-02-06T14:10:50'), Date.parse('2022-02-06T14:11:10')],
|
||||||
@ -17,6 +17,6 @@ describe('metric getRangeChunks', () => {
|
|||||||
const start = Date.parse('2022-02-06T14:10:03');
|
const start = Date.parse('2022-02-06T14:10:03');
|
||||||
const end = Date.parse('2022-02-06T14:10:33');
|
const end = Date.parse('2022-02-06T14:10:33');
|
||||||
const step = 10 * 1000;
|
const step = 10 * 1000;
|
||||||
expect(getRangeChunks(start, end, step, 1000)).toEqual([[start, end]]);
|
expect(splitTimeRange(start, end, step, 1000)).toEqual([[start, end]]);
|
||||||
});
|
});
|
||||||
});
|
});
|
@ -19,7 +19,7 @@ function expandTimeRange(startTime: number, endTime: number, step: number): [num
|
|||||||
return [newStartTime, newEndTime];
|
return [newStartTime, newEndTime];
|
||||||
}
|
}
|
||||||
|
|
||||||
export function getRangeChunks(
|
export function splitTimeRange(
|
||||||
startTime: number,
|
startTime: number,
|
||||||
endTime: number,
|
endTime: number,
|
||||||
step: number,
|
step: number,
|
@ -5,13 +5,13 @@ import { dateTime } from '@grafana/data';
|
|||||||
import { LoadingState } from '@grafana/schema';
|
import { LoadingState } from '@grafana/schema';
|
||||||
|
|
||||||
import { LokiDatasource } from './datasource';
|
import { LokiDatasource } from './datasource';
|
||||||
import * as logsTimeSplit from './logsTimeChunking';
|
import * as logsTimeSplit from './logsTimeSplitting';
|
||||||
import * as metricTimeSplit from './metricTimeChunking';
|
import * as metricTimeSplit from './metricTimeSplitting';
|
||||||
import { createLokiDatasource, getMockFrames } from './mocks';
|
import { createLokiDatasource, getMockFrames } from './mocks';
|
||||||
import { runQueryInChunks } from './queryChunking';
|
import { runSplitQuery } from './querySplitting';
|
||||||
import { LokiQuery, LokiQueryType } from './types';
|
import { LokiQuery, LokiQueryType } from './types';
|
||||||
|
|
||||||
describe('runQueryInChunks()', () => {
|
describe('runSplitQuery()', () => {
|
||||||
let datasource: LokiDatasource;
|
let datasource: LokiDatasource;
|
||||||
const range = {
|
const range = {
|
||||||
from: dateTime('2023-02-08T05:00:00.000Z'),
|
from: dateTime('2023-02-08T05:00:00.000Z'),
|
||||||
@ -31,7 +31,7 @@ describe('runQueryInChunks()', () => {
|
|||||||
});
|
});
|
||||||
|
|
||||||
test('Splits datasource queries', async () => {
|
test('Splits datasource queries', async () => {
|
||||||
await expect(runQueryInChunks(datasource, request)).toEmitValuesWith(() => {
|
await expect(runSplitQuery(datasource, request)).toEmitValuesWith(() => {
|
||||||
// 3 days, 3 chunks, 3 requests.
|
// 3 days, 3 chunks, 3 requests.
|
||||||
expect(datasource.runQuery).toHaveBeenCalledTimes(3);
|
expect(datasource.runQuery).toHaveBeenCalledTimes(3);
|
||||||
});
|
});
|
||||||
@ -41,7 +41,7 @@ describe('runQueryInChunks()', () => {
|
|||||||
jest
|
jest
|
||||||
.spyOn(datasource, 'runQuery')
|
.spyOn(datasource, 'runQuery')
|
||||||
.mockReturnValue(of({ state: LoadingState.Error, error: { refId: 'A', message: 'Error' }, data: [] }));
|
.mockReturnValue(of({ state: LoadingState.Error, error: { refId: 'A', message: 'Error' }, data: [] }));
|
||||||
await expect(runQueryInChunks(datasource, request)).toEmitValuesWith((values) => {
|
await expect(runSplitQuery(datasource, request)).toEmitValuesWith((values) => {
|
||||||
expect(values).toEqual([{ error: { refId: 'A', message: 'Error' }, data: [], state: LoadingState.Streaming }]);
|
expect(values).toEqual([{ error: { refId: 'A', message: 'Error' }, data: [], state: LoadingState.Streaming }]);
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
@ -55,17 +55,17 @@ describe('runQueryInChunks()', () => {
|
|||||||
range,
|
range,
|
||||||
});
|
});
|
||||||
beforeAll(() => {
|
beforeAll(() => {
|
||||||
jest.spyOn(logsTimeSplit, 'getRangeChunks').mockReturnValue([]);
|
jest.spyOn(logsTimeSplit, 'splitTimeRange').mockReturnValue([]);
|
||||||
jest.spyOn(metricTimeSplit, 'getRangeChunks').mockReturnValue([]);
|
jest.spyOn(metricTimeSplit, 'splitTimeRange').mockReturnValue([]);
|
||||||
});
|
});
|
||||||
afterAll(() => {
|
afterAll(() => {
|
||||||
jest.mocked(logsTimeSplit.getRangeChunks).mockRestore();
|
jest.mocked(logsTimeSplit.splitTimeRange).mockRestore();
|
||||||
jest.mocked(metricTimeSplit.getRangeChunks).mockRestore();
|
jest.mocked(metricTimeSplit.splitTimeRange).mockRestore();
|
||||||
});
|
});
|
||||||
test('Ignores hidden queries', async () => {
|
test('Ignores hidden queries', async () => {
|
||||||
await expect(runQueryInChunks(datasource, request)).toEmitValuesWith(() => {
|
await expect(runSplitQuery(datasource, request)).toEmitValuesWith(() => {
|
||||||
expect(logsTimeSplit.getRangeChunks).toHaveBeenCalled();
|
expect(logsTimeSplit.splitTimeRange).toHaveBeenCalled();
|
||||||
expect(metricTimeSplit.getRangeChunks).not.toHaveBeenCalled();
|
expect(metricTimeSplit.splitTimeRange).not.toHaveBeenCalled();
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
@ -80,14 +80,14 @@ describe('runQueryInChunks()', () => {
|
|||||||
jest.spyOn(datasource, 'runQuery').mockReturnValue(of({ data: [logFrameA], refId: 'A' }));
|
jest.spyOn(datasource, 'runQuery').mockReturnValue(of({ data: [logFrameA], refId: 'A' }));
|
||||||
});
|
});
|
||||||
test('Stops requesting once maxLines of logs have been received', async () => {
|
test('Stops requesting once maxLines of logs have been received', async () => {
|
||||||
await expect(runQueryInChunks(datasource, request)).toEmitValuesWith(() => {
|
await expect(runSplitQuery(datasource, request)).toEmitValuesWith(() => {
|
||||||
// 3 days, 3 chunks, 2 responses of 2 logs, 2 requests
|
// 3 days, 3 chunks, 2 responses of 2 logs, 2 requests
|
||||||
expect(datasource.runQuery).toHaveBeenCalledTimes(2);
|
expect(datasource.runQuery).toHaveBeenCalledTimes(2);
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
test('Performs all the requests if maxLines has not been reached', async () => {
|
test('Performs all the requests if maxLines has not been reached', async () => {
|
||||||
request.targets[0].maxLines = 9999;
|
request.targets[0].maxLines = 9999;
|
||||||
await expect(runQueryInChunks(datasource, request)).toEmitValuesWith(() => {
|
await expect(runSplitQuery(datasource, request)).toEmitValuesWith(() => {
|
||||||
// 3 days, 3 chunks, 3 responses of 2 logs, 3 requests
|
// 3 days, 3 chunks, 3 responses of 2 logs, 3 requests
|
||||||
expect(datasource.runQuery).toHaveBeenCalledTimes(3);
|
expect(datasource.runQuery).toHaveBeenCalledTimes(3);
|
||||||
});
|
});
|
||||||
@ -95,7 +95,7 @@ describe('runQueryInChunks()', () => {
|
|||||||
test('Performs all the requests if not a log query', async () => {
|
test('Performs all the requests if not a log query', async () => {
|
||||||
request.targets[0].maxLines = 1;
|
request.targets[0].maxLines = 1;
|
||||||
request.targets[0].expr = 'count_over_time({a="b"}[1m])';
|
request.targets[0].expr = 'count_over_time({a="b"}[1m])';
|
||||||
await expect(runQueryInChunks(datasource, request)).toEmitValuesWith(() => {
|
await expect(runSplitQuery(datasource, request)).toEmitValuesWith(() => {
|
||||||
// 3 days, 3 chunks, 3 responses of 2 logs, 3 requests
|
// 3 days, 3 chunks, 3 responses of 2 logs, 3 requests
|
||||||
expect(datasource.runQuery).toHaveBeenCalledTimes(3);
|
expect(datasource.runQuery).toHaveBeenCalledTimes(3);
|
||||||
});
|
});
|
||||||
@ -114,7 +114,7 @@ describe('runQueryInChunks()', () => {
|
|||||||
],
|
],
|
||||||
range,
|
range,
|
||||||
});
|
});
|
||||||
await expect(runQueryInChunks(datasource, request)).toEmitValuesWith(() => {
|
await expect(runSplitQuery(datasource, request)).toEmitValuesWith(() => {
|
||||||
// 3 days, 3 chunks, 1x Metric + 1x Log, 6 requests.
|
// 3 days, 3 chunks, 1x Metric + 1x Log, 6 requests.
|
||||||
expect(datasource.runQuery).toHaveBeenCalledTimes(6);
|
expect(datasource.runQuery).toHaveBeenCalledTimes(6);
|
||||||
});
|
});
|
||||||
@ -127,7 +127,7 @@ describe('runQueryInChunks()', () => {
|
|||||||
],
|
],
|
||||||
range,
|
range,
|
||||||
});
|
});
|
||||||
await expect(runQueryInChunks(datasource, request)).toEmitValuesWith(() => {
|
await expect(runSplitQuery(datasource, request)).toEmitValuesWith(() => {
|
||||||
// 3 days, 3 chunks, 1x2 Metric, 3 requests.
|
// 3 days, 3 chunks, 1x2 Metric, 3 requests.
|
||||||
expect(datasource.runQuery).toHaveBeenCalledTimes(3);
|
expect(datasource.runQuery).toHaveBeenCalledTimes(3);
|
||||||
});
|
});
|
||||||
@ -140,7 +140,7 @@ describe('runQueryInChunks()', () => {
|
|||||||
],
|
],
|
||||||
range,
|
range,
|
||||||
});
|
});
|
||||||
await expect(runQueryInChunks(datasource, request)).toEmitValuesWith(() => {
|
await expect(runSplitQuery(datasource, request)).toEmitValuesWith(() => {
|
||||||
// 3 days, 3 chunks, 1x2 Logs, 3 requests.
|
// 3 days, 3 chunks, 1x2 Logs, 3 requests.
|
||||||
expect(datasource.runQuery).toHaveBeenCalledTimes(3);
|
expect(datasource.runQuery).toHaveBeenCalledTimes(3);
|
||||||
});
|
});
|
||||||
@ -153,7 +153,7 @@ describe('runQueryInChunks()', () => {
|
|||||||
],
|
],
|
||||||
range,
|
range,
|
||||||
});
|
});
|
||||||
await expect(runQueryInChunks(datasource, request)).toEmitValuesWith(() => {
|
await expect(runSplitQuery(datasource, request)).toEmitValuesWith(() => {
|
||||||
// Instant queries are omitted from splitting
|
// Instant queries are omitted from splitting
|
||||||
expect(datasource.runQuery).toHaveBeenCalledTimes(1);
|
expect(datasource.runQuery).toHaveBeenCalledTimes(1);
|
||||||
});
|
});
|
||||||
@ -170,7 +170,7 @@ describe('runQueryInChunks()', () => {
|
|||||||
jest.spyOn(datasource, 'runQuery').mockReturnValue(of({ data: [], refId: 'B' }));
|
jest.spyOn(datasource, 'runQuery').mockReturnValue(of({ data: [], refId: 'B' }));
|
||||||
jest.spyOn(datasource, 'runQuery').mockReturnValueOnce(of({ data: [logFrameA], refId: 'A' }));
|
jest.spyOn(datasource, 'runQuery').mockReturnValueOnce(of({ data: [logFrameA], refId: 'A' }));
|
||||||
|
|
||||||
await expect(runQueryInChunks(datasource, request)).toEmitValuesWith(() => {
|
await expect(runSplitQuery(datasource, request)).toEmitValuesWith(() => {
|
||||||
// 3 days, 3 chunks, 1x Logs + 3x Metric, 3 requests.
|
// 3 days, 3 chunks, 1x Logs + 3x Metric, 3 requests.
|
||||||
expect(datasource.runQuery).toHaveBeenCalledTimes(4);
|
expect(datasource.runQuery).toHaveBeenCalledTimes(4);
|
||||||
});
|
});
|
||||||
@ -184,14 +184,14 @@ describe('runQueryInChunks()', () => {
|
|||||||
],
|
],
|
||||||
range,
|
range,
|
||||||
});
|
});
|
||||||
await expect(runQueryInChunks(datasource, request)).toEmitValuesWith(() => {
|
await expect(runSplitQuery(datasource, request)).toEmitValuesWith(() => {
|
||||||
// 3 days, 3 chunks, 3x Logs + 3x Metric + 1x Instant, 7 requests.
|
// 3 days, 3 chunks, 3x Logs + 3x Metric + 1x Instant, 7 requests.
|
||||||
expect(datasource.runQuery).toHaveBeenCalledTimes(7);
|
expect(datasource.runQuery).toHaveBeenCalledTimes(7);
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
describe('Splitting targets based on chunkDuration', () => {
|
describe('Splitting targets based on splitDuration', () => {
|
||||||
const range1h = {
|
const range1h = {
|
||||||
from: dateTime('2023-02-08T05:00:00.000Z'),
|
from: dateTime('2023-02-08T05:00:00.000Z'),
|
||||||
to: dateTime('2023-02-08T06:00:00.000Z'),
|
to: dateTime('2023-02-08T06:00:00.000Z'),
|
||||||
@ -203,58 +203,58 @@ describe('runQueryInChunks()', () => {
|
|||||||
beforeEach(() => {
|
beforeEach(() => {
|
||||||
jest.spyOn(datasource, 'runQuery').mockReturnValue(of({ data: [], refId: 'A' }));
|
jest.spyOn(datasource, 'runQuery').mockReturnValue(of({ data: [], refId: 'A' }));
|
||||||
});
|
});
|
||||||
test('with 30m chunkDuration runs 2 queries', async () => {
|
test('with 30m splitDuration runs 2 queries', async () => {
|
||||||
const request = getQueryOptions<LokiQuery>({
|
const request = getQueryOptions<LokiQuery>({
|
||||||
targets: [{ expr: '{a="b"}', refId: 'A', chunkDuration: '30m' }],
|
targets: [{ expr: '{a="b"}', refId: 'A', splitDuration: '30m' }],
|
||||||
range: range1h,
|
range: range1h,
|
||||||
});
|
});
|
||||||
await expect(runQueryInChunks(datasource, request)).toEmitValuesWith(() => {
|
await expect(runSplitQuery(datasource, request)).toEmitValuesWith(() => {
|
||||||
expect(datasource.runQuery).toHaveBeenCalledTimes(2);
|
expect(datasource.runQuery).toHaveBeenCalledTimes(2);
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
test('with 1h chunkDuration runs 1 queries', async () => {
|
test('with 1h splitDuration runs 1 queries', async () => {
|
||||||
const request = getQueryOptions<LokiQuery>({
|
const request = getQueryOptions<LokiQuery>({
|
||||||
targets: [{ expr: '{a="b"}', refId: 'A', chunkDuration: '1h' }],
|
targets: [{ expr: '{a="b"}', refId: 'A', splitDuration: '1h' }],
|
||||||
range: range1h,
|
range: range1h,
|
||||||
});
|
});
|
||||||
await expect(runQueryInChunks(datasource, request)).toEmitValuesWith(() => {
|
await expect(runSplitQuery(datasource, request)).toEmitValuesWith(() => {
|
||||||
expect(datasource.runQuery).toHaveBeenCalledTimes(1);
|
expect(datasource.runQuery).toHaveBeenCalledTimes(1);
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
test('with 1h chunkDuration and 2 targets runs 1 queries', async () => {
|
test('with 1h splitDuration and 2 targets runs 1 queries', async () => {
|
||||||
const request = getQueryOptions<LokiQuery>({
|
const request = getQueryOptions<LokiQuery>({
|
||||||
targets: [
|
targets: [
|
||||||
{ expr: '{a="b"}', refId: 'A', chunkDuration: '1h' },
|
{ expr: '{a="b"}', refId: 'A', splitDuration: '1h' },
|
||||||
{ expr: '{a="b"}', refId: 'B', chunkDuration: '1h' },
|
{ expr: '{a="b"}', refId: 'B', splitDuration: '1h' },
|
||||||
],
|
],
|
||||||
range: range1h,
|
range: range1h,
|
||||||
});
|
});
|
||||||
await expect(runQueryInChunks(datasource, request)).toEmitValuesWith(() => {
|
await expect(runSplitQuery(datasource, request)).toEmitValuesWith(() => {
|
||||||
expect(datasource.runQuery).toHaveBeenCalledTimes(1);
|
expect(datasource.runQuery).toHaveBeenCalledTimes(1);
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
test('with 1h/30m chunkDuration and 2 targets runs 3 queries', async () => {
|
test('with 1h/30m splitDuration and 2 targets runs 3 queries', async () => {
|
||||||
const request = getQueryOptions<LokiQuery>({
|
const request = getQueryOptions<LokiQuery>({
|
||||||
targets: [
|
targets: [
|
||||||
{ expr: '{a="b"}', refId: 'A', chunkDuration: '1h' },
|
{ expr: '{a="b"}', refId: 'A', splitDuration: '1h' },
|
||||||
{ expr: '{a="b"}', refId: 'B', chunkDuration: '30m' },
|
{ expr: '{a="b"}', refId: 'B', splitDuration: '30m' },
|
||||||
],
|
],
|
||||||
range: range1h,
|
range: range1h,
|
||||||
});
|
});
|
||||||
await expect(runQueryInChunks(datasource, request)).toEmitValuesWith(() => {
|
await expect(runSplitQuery(datasource, request)).toEmitValuesWith(() => {
|
||||||
// 2 x 30m + 1 x 1h
|
// 2 x 30m + 1 x 1h
|
||||||
expect(datasource.runQuery).toHaveBeenCalledTimes(3);
|
expect(datasource.runQuery).toHaveBeenCalledTimes(3);
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
test('with 1h/30m chunkDuration and 1 log and 2 metric target runs 3 queries', async () => {
|
test('with 1h/30m splitDuration and 1 log and 2 metric target runs 3 queries', async () => {
|
||||||
const request = getQueryOptions<LokiQuery>({
|
const request = getQueryOptions<LokiQuery>({
|
||||||
targets: [
|
targets: [
|
||||||
{ expr: '{a="b"}', refId: 'A', chunkDuration: '1h' },
|
{ expr: '{a="b"}', refId: 'A', splitDuration: '1h' },
|
||||||
{ expr: 'count_over_time({c="d"}[1m])', refId: 'C', chunkDuration: '30m' },
|
{ expr: 'count_over_time({c="d"}[1m])', refId: 'C', splitDuration: '30m' },
|
||||||
],
|
],
|
||||||
range: range1h,
|
range: range1h,
|
||||||
});
|
});
|
||||||
await expect(runQueryInChunks(datasource, request)).toEmitValuesWith(() => {
|
await expect(runSplitQuery(datasource, request)).toEmitValuesWith(() => {
|
||||||
// 2 x 30m + 1 x 1h
|
// 2 x 30m + 1 x 1h
|
||||||
expect(datasource.runQuery).toHaveBeenCalledTimes(3);
|
expect(datasource.runQuery).toHaveBeenCalledTimes(3);
|
||||||
});
|
});
|
||||||
@ -278,7 +278,7 @@ describe('runQueryInChunks()', () => {
|
|||||||
],
|
],
|
||||||
range: range1d,
|
range: range1d,
|
||||||
});
|
});
|
||||||
await expect(runQueryInChunks(datasource, request)).toEmitValuesWith(() => {
|
await expect(runSplitQuery(datasource, request)).toEmitValuesWith(() => {
|
||||||
// A, B
|
// A, B
|
||||||
expect(datasource.runQuery).toHaveBeenCalledTimes(2);
|
expect(datasource.runQuery).toHaveBeenCalledTimes(2);
|
||||||
});
|
});
|
||||||
@ -291,7 +291,7 @@ describe('runQueryInChunks()', () => {
|
|||||||
],
|
],
|
||||||
range: range1d,
|
range: range1d,
|
||||||
});
|
});
|
||||||
await expect(runQueryInChunks(datasource, request)).toEmitValuesWith(() => {
|
await expect(runSplitQuery(datasource, request)).toEmitValuesWith(() => {
|
||||||
// A, B
|
// A, B
|
||||||
expect(datasource.runQuery).toHaveBeenCalledTimes(2);
|
expect(datasource.runQuery).toHaveBeenCalledTimes(2);
|
||||||
});
|
});
|
||||||
@ -307,7 +307,7 @@ describe('runQueryInChunks()', () => {
|
|||||||
],
|
],
|
||||||
range: range1d,
|
range: range1d,
|
||||||
});
|
});
|
||||||
await expect(runQueryInChunks(datasource, request)).toEmitValuesWith(() => {
|
await expect(runSplitQuery(datasource, request)).toEmitValuesWith(() => {
|
||||||
// A, B, C, D, E
|
// A, B, C, D, E
|
||||||
expect(datasource.runQuery).toHaveBeenCalledTimes(5);
|
expect(datasource.runQuery).toHaveBeenCalledTimes(5);
|
||||||
});
|
});
|
||||||
@ -323,7 +323,7 @@ describe('runQueryInChunks()', () => {
|
|||||||
],
|
],
|
||||||
range, // 3 days
|
range, // 3 days
|
||||||
});
|
});
|
||||||
await expect(runQueryInChunks(datasource, request)).toEmitValuesWith(() => {
|
await expect(runSplitQuery(datasource, request)).toEmitValuesWith(() => {
|
||||||
// 3 * A, 3 * B, 3 * C, 3 * D, 1 * E
|
// 3 * A, 3 * B, 3 * C, 3 * D, 1 * E
|
||||||
expect(datasource.runQuery).toHaveBeenCalledTimes(13);
|
expect(datasource.runQuery).toHaveBeenCalledTimes(13);
|
||||||
});
|
});
|
@ -13,8 +13,8 @@ import {
|
|||||||
import { LoadingState } from '@grafana/schema';
|
import { LoadingState } from '@grafana/schema';
|
||||||
|
|
||||||
import { LokiDatasource } from './datasource';
|
import { LokiDatasource } from './datasource';
|
||||||
import { getRangeChunks as getLogsRangeChunks } from './logsTimeChunking';
|
import { splitTimeRange as splitLogsTimeRange } from './logsTimeSplitting';
|
||||||
import { getRangeChunks as getMetricRangeChunks } from './metricTimeChunking';
|
import { splitTimeRange as splitMetricTimeRange } from './metricTimeSplitting';
|
||||||
import { isLogsQuery } from './queryUtils';
|
import { isLogsQuery } from './queryUtils';
|
||||||
import { combineResponses } from './responseUtils';
|
import { combineResponses } from './responseUtils';
|
||||||
import { LokiQuery, LokiQueryType } from './types';
|
import { LokiQuery, LokiQueryType } from './types';
|
||||||
@ -38,8 +38,8 @@ export function partitionTimeRange(
|
|||||||
const step = Math.max(intervalMs * resolution, safeStep);
|
const step = Math.max(intervalMs * resolution, safeStep);
|
||||||
|
|
||||||
const ranges = isLogsQuery
|
const ranges = isLogsQuery
|
||||||
? getLogsRangeChunks(start, end, duration)
|
? splitLogsTimeRange(start, end, duration)
|
||||||
: getMetricRangeChunks(start, end, step, duration);
|
: splitMetricTimeRange(start, end, step, duration);
|
||||||
|
|
||||||
return ranges.map(([start, end]) => {
|
return ranges.map(([start, end]) => {
|
||||||
const from = dateTime(start);
|
const from = dateTime(start);
|
||||||
@ -83,7 +83,7 @@ function adjustTargetsFromResponseState(targets: LokiQuery[], response: DataQuer
|
|||||||
|
|
||||||
type LokiGroupedRequest = Array<{ request: DataQueryRequest<LokiQuery>; partition: TimeRange[] }>;
|
type LokiGroupedRequest = Array<{ request: DataQueryRequest<LokiQuery>; partition: TimeRange[] }>;
|
||||||
|
|
||||||
export function runGroupedQueriesInChunks(datasource: LokiDatasource, requests: LokiGroupedRequest) {
|
export function runSplitGroupedQueries(datasource: LokiDatasource, requests: LokiGroupedRequest) {
|
||||||
let mergedResponse: DataQueryResponse = { data: [], state: LoadingState.Streaming };
|
let mergedResponse: DataQueryResponse = { data: [], state: LoadingState.Streaming };
|
||||||
const totalRequests = Math.max(...requests.map(({ partition }) => partition.length));
|
const totalRequests = Math.max(...requests.map(({ partition }) => partition.length));
|
||||||
|
|
||||||
@ -167,7 +167,7 @@ function getNextRequestPointers(requests: LokiGroupedRequest, requestGroup: numb
|
|||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
export function runQueryInChunks(datasource: LokiDatasource, request: DataQueryRequest<LokiQuery>) {
|
export function runSplitQuery(datasource: LokiDatasource, request: DataQueryRequest<LokiQuery>) {
|
||||||
const queries = request.targets.filter((query) => !query.hide);
|
const queries = request.targets.filter((query) => !query.hide);
|
||||||
const [instantQueries, normalQueries] = partition(queries, (query) => query.queryType === LokiQueryType.Instant);
|
const [instantQueries, normalQueries] = partition(queries, (query) => query.queryType === LokiQueryType.Instant);
|
||||||
const [logQueries, metricQueries] = partition(normalQueries, (query) => isLogsQuery(query.expr));
|
const [logQueries, metricQueries] = partition(normalQueries, (query) => isLogsQuery(query.expr));
|
||||||
@ -176,10 +176,10 @@ export function runQueryInChunks(datasource: LokiDatasource, request: DataQueryR
|
|||||||
|
|
||||||
const oneDayMs = 24 * 60 * 60 * 1000;
|
const oneDayMs = 24 * 60 * 60 * 1000;
|
||||||
const rangePartitionedLogQueries = groupBy(logQueries, (query) =>
|
const rangePartitionedLogQueries = groupBy(logQueries, (query) =>
|
||||||
query.chunkDuration ? durationToMilliseconds(parseDuration(query.chunkDuration)) : oneDayMs
|
query.splitDuration ? durationToMilliseconds(parseDuration(query.splitDuration)) : oneDayMs
|
||||||
);
|
);
|
||||||
const rangePartitionedMetricQueries = groupBy(metricQueries, (query) =>
|
const rangePartitionedMetricQueries = groupBy(metricQueries, (query) =>
|
||||||
query.chunkDuration ? durationToMilliseconds(parseDuration(query.chunkDuration)) : oneDayMs
|
query.splitDuration ? durationToMilliseconds(parseDuration(query.splitDuration)) : oneDayMs
|
||||||
);
|
);
|
||||||
|
|
||||||
const requests: LokiGroupedRequest = [];
|
const requests: LokiGroupedRequest = [];
|
||||||
@ -222,5 +222,5 @@ export function runQueryInChunks(datasource: LokiDatasource, request: DataQueryR
|
|||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
return runGroupedQueriesInChunks(datasource, requests);
|
return runSplitGroupedQueries(datasource, requests);
|
||||||
}
|
}
|
@ -8,7 +8,7 @@ import {
|
|||||||
parseToNodeNamesArray,
|
parseToNodeNamesArray,
|
||||||
getParserFromQuery,
|
getParserFromQuery,
|
||||||
obfuscate,
|
obfuscate,
|
||||||
requestSupporsChunking,
|
requestSupportsSplitting,
|
||||||
} from './queryUtils';
|
} from './queryUtils';
|
||||||
import { LokiQuery, LokiQueryType } from './types';
|
import { LokiQuery, LokiQueryType } from './types';
|
||||||
|
|
||||||
@ -294,7 +294,7 @@ describe('getParserFromQuery', () => {
|
|||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
describe('requestSupporsChunking', () => {
|
describe('requestSupportsSplitting', () => {
|
||||||
it('hidden requests are not partitioned', () => {
|
it('hidden requests are not partitioned', () => {
|
||||||
const requests: LokiQuery[] = [
|
const requests: LokiQuery[] = [
|
||||||
{
|
{
|
||||||
@ -303,7 +303,7 @@ describe('requestSupporsChunking', () => {
|
|||||||
hide: true,
|
hide: true,
|
||||||
},
|
},
|
||||||
];
|
];
|
||||||
expect(requestSupporsChunking(requests)).toBe(false);
|
expect(requestSupportsSplitting(requests)).toBe(false);
|
||||||
});
|
});
|
||||||
it('special requests are not partitioned', () => {
|
it('special requests are not partitioned', () => {
|
||||||
const requests: LokiQuery[] = [
|
const requests: LokiQuery[] = [
|
||||||
@ -312,7 +312,7 @@ describe('requestSupporsChunking', () => {
|
|||||||
refId: 'do-not-chunk',
|
refId: 'do-not-chunk',
|
||||||
},
|
},
|
||||||
];
|
];
|
||||||
expect(requestSupporsChunking(requests)).toBe(false);
|
expect(requestSupportsSplitting(requests)).toBe(false);
|
||||||
});
|
});
|
||||||
it('empty requests are not partitioned', () => {
|
it('empty requests are not partitioned', () => {
|
||||||
const requests: LokiQuery[] = [
|
const requests: LokiQuery[] = [
|
||||||
@ -321,7 +321,7 @@ describe('requestSupporsChunking', () => {
|
|||||||
refId: 'A',
|
refId: 'A',
|
||||||
},
|
},
|
||||||
];
|
];
|
||||||
expect(requestSupporsChunking(requests)).toBe(false);
|
expect(requestSupportsSplitting(requests)).toBe(false);
|
||||||
});
|
});
|
||||||
it('all other requests are partitioned', () => {
|
it('all other requests are partitioned', () => {
|
||||||
const requests: LokiQuery[] = [
|
const requests: LokiQuery[] = [
|
||||||
@ -334,6 +334,6 @@ describe('requestSupporsChunking', () => {
|
|||||||
refId: 'B',
|
refId: 'B',
|
||||||
},
|
},
|
||||||
];
|
];
|
||||||
expect(requestSupporsChunking(requests)).toBe(true);
|
expect(requestSupportsSplitting(requests)).toBe(true);
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
@ -296,7 +296,7 @@ export function getStreamSelectorsFromQuery(query: string): string[] {
|
|||||||
return labelMatchers;
|
return labelMatchers;
|
||||||
}
|
}
|
||||||
|
|
||||||
export function requestSupporsChunking(allQueries: LokiQuery[]) {
|
export function requestSupportsSplitting(allQueries: LokiQuery[]) {
|
||||||
const queries = allQueries
|
const queries = allQueries
|
||||||
.filter((query) => !query.hide)
|
.filter((query) => !query.hide)
|
||||||
.filter((query) => !query.refId.includes('do-not-chunk'))
|
.filter((query) => !query.refId.includes('do-not-chunk'))
|
||||||
|
@ -23,7 +23,7 @@ export interface Props {
|
|||||||
|
|
||||||
export const LokiQueryBuilderOptions = React.memo<Props>(
|
export const LokiQueryBuilderOptions = React.memo<Props>(
|
||||||
({ app, query, onChange, onRunQuery, maxLines, datasource, queryStats }) => {
|
({ app, query, onChange, onRunQuery, maxLines, datasource, queryStats }) => {
|
||||||
const [chunkRangeValid, setChunkRangeValid] = useState(true);
|
const [splitDurationValid, setsplitDurationValid] = useState(true);
|
||||||
|
|
||||||
const onQueryTypeChange = (value: LokiQueryType) => {
|
const onQueryTypeChange = (value: LokiQueryType) => {
|
||||||
onChange({ ...query, queryType: value });
|
onChange({ ...query, queryType: value });
|
||||||
@ -42,11 +42,11 @@ export const LokiQueryBuilderOptions = React.memo<Props>(
|
|||||||
const onChunkRangeChange = (evt: React.FormEvent<HTMLInputElement>) => {
|
const onChunkRangeChange = (evt: React.FormEvent<HTMLInputElement>) => {
|
||||||
const value = evt.currentTarget.value;
|
const value = evt.currentTarget.value;
|
||||||
if (!isValidDuration(value)) {
|
if (!isValidDuration(value)) {
|
||||||
setChunkRangeValid(false);
|
setsplitDurationValid(false);
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
setChunkRangeValid(true);
|
setsplitDurationValid(true);
|
||||||
onChange({ ...query, chunkDuration: value });
|
onChange({ ...query, splitDuration: value });
|
||||||
onRunQuery();
|
onRunQuery();
|
||||||
};
|
};
|
||||||
|
|
||||||
@ -112,16 +112,16 @@ export const LokiQueryBuilderOptions = React.memo<Props>(
|
|||||||
</EditorField>
|
</EditorField>
|
||||||
{config.featureToggles.lokiQuerySplittingConfig && config.featureToggles.lokiQuerySplitting && (
|
{config.featureToggles.lokiQuerySplittingConfig && config.featureToggles.lokiQuerySplitting && (
|
||||||
<EditorField
|
<EditorField
|
||||||
label="Chunk Duration"
|
label="Split Duration"
|
||||||
tooltip="Defines the duration of a single query chunk when query chunking is used."
|
tooltip="Defines the duration of a single query when query splitting is enabled."
|
||||||
>
|
>
|
||||||
<AutoSizeInput
|
<AutoSizeInput
|
||||||
minWidth={14}
|
minWidth={14}
|
||||||
type="string"
|
type="string"
|
||||||
min={0}
|
min={0}
|
||||||
defaultValue={query.chunkDuration ?? '1d'}
|
defaultValue={query.splitDuration ?? '1d'}
|
||||||
onCommitChange={onChunkRangeChange}
|
onCommitChange={onChunkRangeChange}
|
||||||
invalid={!chunkRangeValid}
|
invalid={!splitDurationValid}
|
||||||
/>
|
/>
|
||||||
</EditorField>
|
</EditorField>
|
||||||
)}
|
)}
|
||||||
|
@ -40,7 +40,7 @@ export interface LokiQuery extends LokiQueryFromSchema {
|
|||||||
* This is a property for the experimental query splitting feature.
|
* This is a property for the experimental query splitting feature.
|
||||||
* @experimental
|
* @experimental
|
||||||
*/
|
*/
|
||||||
chunkDuration?: string;
|
splitDuration?: string;
|
||||||
}
|
}
|
||||||
|
|
||||||
export interface LokiOptions extends DataSourceJsonData {
|
export interface LokiOptions extends DataSourceJsonData {
|
||||||
|
Loading…
Reference in New Issue
Block a user