mirror of
https://github.com/grafana/grafana.git
synced 2025-02-25 18:55:37 -06:00
Grafana/Loki: Adds support for new Loki endpoints and metrics (#20158)
* Grafana/Loki: Adds support for new Loki endpoints and metrics * Adds `/loki/` prefix to new loki endpoints and updates response interfaces * Improved legacy support * Removed changes related to plugin.json and added Loki-specific hacks * Fixes live streaming for legacy loki datasources
This commit is contained in:
parent
1248457fee
commit
e0a2d4beac
@ -26,22 +26,24 @@ export class DataFrameView<T = any> implements Vector<T> {
|
||||
|
||||
constructor(private data: DataFrame) {
|
||||
const obj = ({} as unknown) as T;
|
||||
|
||||
for (let i = 0; i < data.fields.length; i++) {
|
||||
const field = data.fields[i];
|
||||
const getter = () => {
|
||||
return field.values.get(this.index);
|
||||
};
|
||||
const getter = () => field.values.get(this.index);
|
||||
|
||||
if (!(obj as any).hasOwnProperty(field.name)) {
|
||||
Object.defineProperty(obj, field.name, {
|
||||
enumerable: true, // Shows up as enumerable property
|
||||
get: getter,
|
||||
});
|
||||
}
|
||||
|
||||
Object.defineProperty(obj, i, {
|
||||
enumerable: false, // Don't enumerate array index
|
||||
get: getter,
|
||||
});
|
||||
}
|
||||
|
||||
this.obj = obj;
|
||||
}
|
||||
|
||||
@ -59,11 +61,7 @@ export class DataFrameView<T = any> implements Vector<T> {
|
||||
}
|
||||
|
||||
toArray(): T[] {
|
||||
const arr: T[] = [];
|
||||
for (let i = 0; i < this.data.length; i++) {
|
||||
arr.push({ ...this.get(i) });
|
||||
}
|
||||
return arr;
|
||||
return new Array(this.data.length).fill(0).map((_, i) => ({ ...this.get(i) }));
|
||||
}
|
||||
|
||||
toJSON(): T[] {
|
||||
|
@ -261,6 +261,8 @@ export abstract class DataSourceApi<
|
||||
*/
|
||||
languageProvider?: any;
|
||||
|
||||
getVersion?(): Promise<string>;
|
||||
|
||||
/**
|
||||
* Can be optionally implemented to allow datasource to be a source of annotations for dashboard. To be visible
|
||||
* in the annotation editor `annotations` capability also needs to be enabled in plugin.json.
|
||||
@ -302,6 +304,7 @@ export interface ExploreQueryFieldProps<
|
||||
|
||||
export interface ExploreStartPageProps {
|
||||
datasource?: DataSourceApi;
|
||||
exploreMode: 'Logs' | 'Metrics';
|
||||
onClickExample: (query: DataQuery) => void;
|
||||
}
|
||||
|
||||
@ -443,18 +446,22 @@ export interface DataQueryError {
|
||||
|
||||
export interface DataQueryRequest<TQuery extends DataQuery = DataQuery> {
|
||||
requestId: string; // Used to identify results and optionally cancel the request in backendSrv
|
||||
|
||||
dashboardId: number;
|
||||
interval: string;
|
||||
intervalMs?: number;
|
||||
maxDataPoints?: number;
|
||||
panelId: number;
|
||||
range?: TimeRange;
|
||||
reverse?: boolean;
|
||||
scopedVars: ScopedVars;
|
||||
targets: TQuery[];
|
||||
timezone: string;
|
||||
range: TimeRange;
|
||||
|
||||
cacheTimeout?: string;
|
||||
exploreMode?: 'Logs' | 'Metrics';
|
||||
rangeRaw?: RawTimeRange;
|
||||
timeInfo?: string; // The query time description (blue text in the upper right)
|
||||
targets: TQuery[];
|
||||
panelId: number;
|
||||
dashboardId: number;
|
||||
cacheTimeout?: string;
|
||||
interval: string;
|
||||
intervalMs: number;
|
||||
maxDataPoints: number;
|
||||
scopedVars: ScopedVars;
|
||||
|
||||
// Request Timing
|
||||
startTime: number;
|
||||
|
@ -10,11 +10,10 @@ export type GraphSeriesValue = number | null;
|
||||
|
||||
/** View model projection of a series */
|
||||
export interface GraphSeriesXY {
|
||||
label: string;
|
||||
color: string;
|
||||
data: GraphSeriesValue[][]; // [x,y][]
|
||||
info?: DisplayValue[]; // Legend info
|
||||
isVisible: boolean;
|
||||
label: string;
|
||||
yAxis: YAxis;
|
||||
// Field with series' time values
|
||||
timeField: Field;
|
||||
@ -22,6 +21,8 @@ export interface GraphSeriesXY {
|
||||
valueField: Field;
|
||||
seriesIndex: number;
|
||||
timeStep: number;
|
||||
|
||||
info?: DisplayValue[]; // Legend info
|
||||
}
|
||||
|
||||
export interface CreatePlotOverlay {
|
||||
|
@ -10,6 +10,17 @@ import { FolderInfo, DashboardDTO, CoreEvents } from 'app/types';
|
||||
import { BackendSrv as BackendService, getBackendSrv as getBackendService, BackendSrvRequest } from '@grafana/runtime';
|
||||
import { AppEvents } from '@grafana/data';
|
||||
|
||||
export interface DatasourceRequestOptions {
|
||||
retry?: number;
|
||||
method?: string;
|
||||
requestId?: string;
|
||||
timeout?: angular.IPromise<any>;
|
||||
url?: string;
|
||||
headers?: { [key: string]: any };
|
||||
silent?: boolean;
|
||||
data?: { [key: string]: any };
|
||||
}
|
||||
|
||||
export class BackendSrv implements BackendService {
|
||||
private inFlightRequests: { [key: string]: Array<angular.IDeferred<any>> } = {};
|
||||
private HTTP_REQUEST_CANCELED = -1;
|
||||
|
@ -148,6 +148,7 @@ export function buildQueryTransaction(
|
||||
__interval_ms: { text: intervalMs, value: intervalMs },
|
||||
},
|
||||
maxDataPoints: queryOptions.maxDataPoints,
|
||||
exploreMode: queryOptions.mode,
|
||||
};
|
||||
|
||||
return {
|
||||
@ -517,7 +518,7 @@ export const convertToWebSocketUrl = (url: string) => {
|
||||
const protocol = window.location.protocol === 'https:' ? 'wss://' : 'ws://';
|
||||
let backend = `${protocol}${window.location.host}${config.appSubUrl}`;
|
||||
if (backend.endsWith('/')) {
|
||||
backend = backend.slice(0, backend.length - 1);
|
||||
backend = backend.slice(0, -1);
|
||||
}
|
||||
return `${backend}${url}`;
|
||||
};
|
||||
|
@ -14,7 +14,7 @@ import { DashboardModel } from '../dashboard/state/DashboardModel';
|
||||
import DatasourceSrv from '../plugins/datasource_srv';
|
||||
import { BackendSrv } from 'app/core/services/backend_srv';
|
||||
import { TimeSrv } from '../dashboard/services/TimeSrv';
|
||||
import { DataSourceApi, PanelEvents, AnnotationEvent, AppEvents } from '@grafana/data';
|
||||
import { DataSourceApi, PanelEvents, AnnotationEvent, AppEvents, PanelModel, TimeRange } from '@grafana/data';
|
||||
import { GrafanaRootScope } from 'app/routes/GrafanaCtrl';
|
||||
|
||||
export class AnnotationsSrv {
|
||||
@ -44,7 +44,7 @@ export class AnnotationsSrv {
|
||||
this.datasourcePromises = null;
|
||||
}
|
||||
|
||||
getAnnotations(options: any) {
|
||||
getAnnotations(options: { dashboard: DashboardModel; panel: PanelModel; range: TimeRange }) {
|
||||
return this.$q
|
||||
.all([this.getGlobalAnnotations(options), this.getAlertStates(options)])
|
||||
.then(results => {
|
||||
@ -104,7 +104,7 @@ export class AnnotationsSrv {
|
||||
return this.alertStatesPromise;
|
||||
}
|
||||
|
||||
getGlobalAnnotations(options: any) {
|
||||
getGlobalAnnotations(options: { dashboard: DashboardModel; panel: PanelModel; range: TimeRange }) {
|
||||
const dashboard = options.dashboard;
|
||||
|
||||
if (this.globalAnnotationsPromise) {
|
||||
@ -130,7 +130,7 @@ export class AnnotationsSrv {
|
||||
.then((datasource: DataSourceApi) => {
|
||||
// issue query against data source
|
||||
return datasource.annotationQuery({
|
||||
range: range,
|
||||
range,
|
||||
rangeRaw: range.raw,
|
||||
annotation: annotation,
|
||||
dashboard: dashboard,
|
||||
|
@ -78,7 +78,7 @@ export function processResponsePacket(packet: DataQueryResponse, state: RunningQ
|
||||
* It will
|
||||
* * Merge multiple responses into a single DataFrame array based on the packet key
|
||||
* * Will emit a loading state if no response after 50ms
|
||||
* * Cancel any still runnning network requests on unsubscribe (using request.requestId)
|
||||
* * Cancel any still running network requests on unsubscribe (using request.requestId)
|
||||
*/
|
||||
export function runRequest(datasource: DataSourceApi, request: DataQueryRequest): Observable<PanelData> {
|
||||
let state: RunningQueryState = {
|
||||
|
@ -83,7 +83,7 @@ export const getPanelMenu = (dashboard: DashboardModel, panel: PanelModel) => {
|
||||
const onNavigateToExplore = (event: React.MouseEvent<any>) => {
|
||||
event.preventDefault();
|
||||
const openInNewWindow = event.ctrlKey || event.metaKey ? (url: string) => window.open(url) : undefined;
|
||||
store.dispatch(navigateToExplore(panel, { getDataSourceSrv, getTimeSrv, getExploreUrl, openInNewWindow }));
|
||||
store.dispatch(navigateToExplore(panel, { getDataSourceSrv, getTimeSrv, getExploreUrl, openInNewWindow }) as any);
|
||||
};
|
||||
|
||||
const menu: PanelMenuItem[] = [];
|
||||
|
@ -35,6 +35,7 @@ import {
|
||||
TimeZone,
|
||||
AbsoluteTimeRange,
|
||||
} from '@grafana/data';
|
||||
|
||||
import {
|
||||
ExploreItemState,
|
||||
ExploreUrlState,
|
||||
@ -288,7 +289,11 @@ export class Explore extends React.PureComponent<ExploreProps> {
|
||||
<ErrorBoundaryAlert>
|
||||
{showingStartPage && (
|
||||
<div className="grafana-info-box grafana-info-box--max-lg">
|
||||
<StartPage onClickExample={this.onClickExample} datasource={datasourceInstance} />
|
||||
<StartPage
|
||||
onClickExample={this.onClickExample}
|
||||
datasource={datasourceInstance}
|
||||
exploreMode={mode}
|
||||
/>
|
||||
</div>
|
||||
)}
|
||||
{!showingStartPage && (
|
||||
@ -373,6 +378,7 @@ function mapStateToProps(state: StoreState, { exploreId }: ExploreProps): Partia
|
||||
const initialRange = urlRange ? getTimeRangeFromUrlMemoized(urlRange, timeZone).raw : DEFAULT_RANGE;
|
||||
|
||||
let newMode: ExploreMode;
|
||||
|
||||
if (supportedModes.length) {
|
||||
const urlModeIsValid = supportedModes.includes(urlMode);
|
||||
const modeStateIsValid = supportedModes.includes(mode);
|
||||
@ -385,7 +391,7 @@ function mapStateToProps(state: StoreState, { exploreId }: ExploreProps): Partia
|
||||
newMode = supportedModes[0];
|
||||
}
|
||||
} else {
|
||||
newMode = [ExploreMode.Metrics, ExploreMode.Logs].includes(mode) ? mode : ExploreMode.Metrics;
|
||||
newMode = [ExploreMode.Metrics, ExploreMode.Logs].includes(urlMode) ? urlMode : null;
|
||||
}
|
||||
|
||||
const initialUI = ui || DEFAULT_UI_STATE;
|
||||
|
@ -347,7 +347,9 @@ const mapStateToProps = (state: StoreState, { exploreId }: OwnProps): StateProps
|
||||
? exploreDatasources.find(datasource => datasource.name === datasourceInstance.name)
|
||||
: undefined;
|
||||
const hasLiveOption =
|
||||
datasourceInstance && datasourceInstance.meta && datasourceInstance.meta.streaming ? true : false;
|
||||
datasourceInstance && datasourceInstance.meta && datasourceInstance.meta.streaming && mode === ExploreMode.Logs
|
||||
? true
|
||||
: false;
|
||||
|
||||
return {
|
||||
datasourceMissing,
|
||||
|
@ -186,6 +186,7 @@ export interface UpdateUIStatePayload extends Partial<ExploreUIState> {
|
||||
export interface UpdateDatasourceInstancePayload {
|
||||
exploreId: ExploreId;
|
||||
datasourceInstance: DataSourceApi;
|
||||
version?: string;
|
||||
}
|
||||
|
||||
export interface ToggleLogLevelPayload {
|
||||
|
@ -123,8 +123,15 @@ export function changeDatasource(exploreId: ExploreId, datasource: string): Thun
|
||||
const currentDataSourceInstance = getState().explore[exploreId].datasourceInstance;
|
||||
const queries = getState().explore[exploreId].queries;
|
||||
const orgId = getState().user.orgId;
|
||||
const datasourceVersion = newDataSourceInstance.getVersion && (await newDataSourceInstance.getVersion());
|
||||
|
||||
dispatch(updateDatasourceInstanceAction({ exploreId, datasourceInstance: newDataSourceInstance }));
|
||||
dispatch(
|
||||
updateDatasourceInstanceAction({
|
||||
exploreId,
|
||||
datasourceInstance: newDataSourceInstance,
|
||||
version: datasourceVersion,
|
||||
})
|
||||
);
|
||||
|
||||
await dispatch(importQueries(exploreId, queries, currentDataSourceInstance, newDataSourceInstance));
|
||||
|
||||
@ -436,6 +443,7 @@ export function runQueries(exploreId: ExploreId): ThunkResult<void> {
|
||||
liveStreaming: live,
|
||||
showingGraph,
|
||||
showingTable,
|
||||
mode,
|
||||
};
|
||||
|
||||
const datasourceId = datasourceInstance.meta.id;
|
||||
|
@ -275,30 +275,46 @@ export const itemReducer = reducerFactory<ExploreItemState>({} as ExploreItemSta
|
||||
.addMapper({
|
||||
filter: updateDatasourceInstanceAction,
|
||||
mapper: (state, action): ExploreItemState => {
|
||||
const { datasourceInstance } = action.payload;
|
||||
const [supportedModes, mode] = getModesForDatasource(datasourceInstance, state.mode);
|
||||
|
||||
const originPanelId = state.urlState && state.urlState.originPanelId;
|
||||
const { datasourceInstance, version } = action.payload;
|
||||
|
||||
// Custom components
|
||||
const StartPage = datasourceInstance.components.ExploreStartPage;
|
||||
stopQueryState(state.querySubscription);
|
||||
|
||||
let newMetadata = datasourceInstance.meta;
|
||||
|
||||
// HACK: Temporary hack for Loki datasource. Can remove when plugin.json structure is changed.
|
||||
if (version && version.length && datasourceInstance.meta.name === 'Loki') {
|
||||
const lokiVersionMetadata: Record<string, { metrics: boolean }> = {
|
||||
v0: {
|
||||
metrics: false,
|
||||
},
|
||||
|
||||
v1: {
|
||||
metrics: true,
|
||||
},
|
||||
};
|
||||
newMetadata = { ...newMetadata, ...lokiVersionMetadata[version] };
|
||||
}
|
||||
|
||||
const updatedDatasourceInstance = Object.assign(datasourceInstance, { meta: newMetadata });
|
||||
const [supportedModes, mode] = getModesForDatasource(updatedDatasourceInstance, state.mode);
|
||||
|
||||
return {
|
||||
...state,
|
||||
datasourceInstance,
|
||||
datasourceInstance: updatedDatasourceInstance,
|
||||
graphResult: null,
|
||||
tableResult: null,
|
||||
logsResult: null,
|
||||
latency: 0,
|
||||
queryResponse: createEmptyQueryResponse(),
|
||||
loading: false,
|
||||
StartPage,
|
||||
StartPage: datasourceInstance.components.ExploreStartPage,
|
||||
showingStartPage: Boolean(StartPage),
|
||||
queryKeys: [],
|
||||
supportedModes,
|
||||
mode,
|
||||
originPanelId,
|
||||
originPanelId: state.urlState && state.urlState.originPanelId,
|
||||
};
|
||||
},
|
||||
})
|
||||
@ -657,10 +673,7 @@ export const updateChildRefreshState = (
|
||||
};
|
||||
|
||||
const getModesForDatasource = (dataSource: DataSourceApi, currentMode: ExploreMode): [ExploreMode[], ExploreMode] => {
|
||||
// Temporary hack here. We want Loki to work in dashboards for which it needs to have metrics = true which is weird
|
||||
// for Explore.
|
||||
// TODO: need to figure out a better way to handle this situation
|
||||
const supportsGraph = dataSource.meta.name === 'Loki' ? false : dataSource.meta.metrics;
|
||||
const supportsGraph = dataSource.meta.metrics;
|
||||
const supportsLogs = dataSource.meta.logs;
|
||||
|
||||
let mode = currentMode || ExploreMode.Metrics;
|
||||
@ -678,6 +691,12 @@ const getModesForDatasource = (dataSource: DataSourceApi, currentMode: ExploreMo
|
||||
mode = supportedModes[0];
|
||||
}
|
||||
|
||||
// HACK: Used to set Loki's default explore mode to Logs mode.
|
||||
// A better solution would be to introduce a "default" or "preferred" mode to the datasource config
|
||||
if (dataSource.meta.name === 'Loki' && !currentMode) {
|
||||
mode = ExploreMode.Logs;
|
||||
}
|
||||
|
||||
return [supportedModes, mode];
|
||||
};
|
||||
|
||||
|
@ -3,13 +3,11 @@ import configureMockStore from 'redux-mock-store';
|
||||
import { PlaylistSrv } from '../playlist_srv';
|
||||
import { setStore } from 'app/store/store';
|
||||
|
||||
const mockStore = configureMockStore();
|
||||
const mockStore = configureMockStore<any, any>();
|
||||
|
||||
setStore(
|
||||
mockStore({
|
||||
location: {},
|
||||
})
|
||||
);
|
||||
setStore(mockStore({
|
||||
location: {},
|
||||
}) as any);
|
||||
|
||||
const dashboards = [{ url: 'dash1' }, { url: 'dash2' }];
|
||||
|
||||
@ -122,13 +120,11 @@ describe('PlaylistSrv', () => {
|
||||
|
||||
srv.next();
|
||||
|
||||
setStore(
|
||||
mockStore({
|
||||
location: {
|
||||
path: 'dash2',
|
||||
},
|
||||
})
|
||||
);
|
||||
setStore(mockStore({
|
||||
location: {
|
||||
path: 'dash2',
|
||||
},
|
||||
}) as any);
|
||||
|
||||
expect((srv as any).validPlaylistUrl).toBe('dash2');
|
||||
|
||||
|
@ -66,7 +66,7 @@ export class DatasourceSrv implements DataSourceService {
|
||||
|
||||
const dsConfig = config.datasources[name];
|
||||
if (!dsConfig) {
|
||||
return this.$q.reject({ message: 'Datasource named ' + name + ' was not found' });
|
||||
return this.$q.reject({ message: `Datasource named ${name} was not found` });
|
||||
}
|
||||
|
||||
const deferred = this.$q.defer();
|
||||
|
@ -285,7 +285,7 @@ describe('CloudWatchDatasource', () => {
|
||||
beforeEach(() => {
|
||||
redux.setStore({
|
||||
dispatch: jest.fn(),
|
||||
});
|
||||
} as any);
|
||||
|
||||
ctx.backendSrv.datasourceRequest = jest.fn(() => {
|
||||
return Promise.reject(backendErrorResponse);
|
||||
|
@ -2,11 +2,31 @@ import React, { PureComponent } from 'react';
|
||||
import { shuffle } from 'lodash';
|
||||
import { ExploreStartPageProps, DataQuery } from '@grafana/data';
|
||||
import LokiLanguageProvider from '../language_provider';
|
||||
import { ExploreMode } from 'app/types';
|
||||
|
||||
const DEFAULT_EXAMPLES = ['{job="default/prometheus"}'];
|
||||
const PREFERRED_LABELS = ['job', 'app', 'k8s_app'];
|
||||
const EXAMPLES_LIMIT = 5;
|
||||
|
||||
const LOGQL_EXAMPLES = [
|
||||
{
|
||||
title: 'Count over time',
|
||||
expression: 'count_over_time({job="mysql"}[5m])',
|
||||
label: 'This query counts all the log lines within the last five minutes for the MySQL job.',
|
||||
},
|
||||
{
|
||||
title: 'Rate',
|
||||
expression: 'rate(({job="mysql"} |= "error" != "timeout")[10s])',
|
||||
label:
|
||||
'This query gets the per-second rate of all non-timeout errors within the last ten seconds for the MySQL job.',
|
||||
},
|
||||
{
|
||||
title: 'Aggregate, count, and group',
|
||||
expression: 'sum(count_over_time({job="mysql"}[5m])) by (level)',
|
||||
label: 'Get the count of logs during the last five minutes, grouping by level.',
|
||||
},
|
||||
];
|
||||
|
||||
export default class LokiCheatSheet extends PureComponent<ExploreStartPageProps, { userExamples: string[] }> {
|
||||
userLabelTimer: NodeJS.Timeout;
|
||||
state = {
|
||||
@ -57,7 +77,7 @@ export default class LokiCheatSheet extends PureComponent<ExploreStartPageProps,
|
||||
);
|
||||
}
|
||||
|
||||
render() {
|
||||
renderLogsCheatSheet() {
|
||||
const { userExamples } = this.state;
|
||||
|
||||
return (
|
||||
@ -98,4 +118,25 @@ export default class LokiCheatSheet extends PureComponent<ExploreStartPageProps,
|
||||
</>
|
||||
);
|
||||
}
|
||||
|
||||
renderMetricsCheatSheet() {
|
||||
return (
|
||||
<div>
|
||||
<h2>LogQL Cheat Sheet</h2>
|
||||
{LOGQL_EXAMPLES.map(item => (
|
||||
<div className="cheat-sheet-item" key={item.expression}>
|
||||
<div className="cheat-sheet-item__title">{item.title}</div>
|
||||
{this.renderExpression(item.expression)}
|
||||
<div className="cheat-sheet-item__label">{item.label}</div>
|
||||
</div>
|
||||
))}
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
render() {
|
||||
const { exploreMode } = this.props;
|
||||
|
||||
return exploreMode === ExploreMode.Logs ? this.renderLogsCheatSheet() : this.renderMetricsCheatSheet();
|
||||
}
|
||||
}
|
||||
|
@ -17,9 +17,9 @@ import {
|
||||
import { Plugin, Node } from 'slate';
|
||||
|
||||
// Types
|
||||
import { LokiQuery } from '../types';
|
||||
import { DOMUtil } from '@grafana/ui';
|
||||
import { ExploreQueryFieldProps, AbsoluteTimeRange } from '@grafana/data';
|
||||
import { LokiQuery, LokiOptions } from '../types';
|
||||
import { Grammar } from 'prismjs';
|
||||
import LokiLanguageProvider, { LokiHistoryItem } from '../language_provider';
|
||||
import LokiDatasource from '../datasource';
|
||||
@ -61,7 +61,7 @@ function willApplySuggestion(suggestion: string, { typeaheadContext, typeaheadTe
|
||||
return suggestion;
|
||||
}
|
||||
|
||||
export interface LokiQueryFieldFormProps extends ExploreQueryFieldProps<LokiDatasource, LokiQuery> {
|
||||
export interface LokiQueryFieldFormProps extends ExploreQueryFieldProps<LokiDatasource, LokiQuery, LokiOptions> {
|
||||
history: LokiHistoryItem[];
|
||||
syntax: Grammar;
|
||||
logLabelOptions: CascaderOption[];
|
||||
|
@ -1,17 +1,20 @@
|
||||
import LokiDatasource from './datasource';
|
||||
import { LokiQuery } from './types';
|
||||
import { LokiQuery, LokiResultType, LokiResponse, LokiLegacyStreamResponse } from './types';
|
||||
import { getQueryOptions } from 'test/helpers/getQueryOptions';
|
||||
import { AnnotationQueryRequest, DataSourceApi, DataFrame, dateTime } from '@grafana/data';
|
||||
import { BackendSrv } from 'app/core/services/backend_srv';
|
||||
import { TemplateSrv } from 'app/features/templating/template_srv';
|
||||
import { CustomVariable } from 'app/features/templating/custom_variable';
|
||||
import { ExploreMode } from 'app/types';
|
||||
import { of } from 'rxjs';
|
||||
import omit from 'lodash/omit';
|
||||
|
||||
describe('LokiDatasource', () => {
|
||||
const instanceSettings: any = {
|
||||
url: 'myloggingurl',
|
||||
};
|
||||
|
||||
const testResp = {
|
||||
const legacyTestResp: { data: LokiLegacyStreamResponse; status: number } = {
|
||||
data: {
|
||||
streams: [
|
||||
{
|
||||
@ -20,6 +23,22 @@ describe('LokiDatasource', () => {
|
||||
},
|
||||
],
|
||||
},
|
||||
status: 404, // for simulating legacy endpoint
|
||||
};
|
||||
|
||||
const testResp: { data: LokiResponse } = {
|
||||
data: {
|
||||
data: {
|
||||
resultType: LokiResultType.Stream,
|
||||
result: [
|
||||
{
|
||||
stream: {},
|
||||
values: [['1573646419522934000', 'hello']],
|
||||
},
|
||||
],
|
||||
},
|
||||
status: 'success',
|
||||
},
|
||||
};
|
||||
|
||||
const backendSrvMock = { datasourceRequest: jest.fn() };
|
||||
@ -30,8 +49,67 @@ describe('LokiDatasource', () => {
|
||||
replace: (a: string) => a,
|
||||
} as unknown) as TemplateSrv;
|
||||
|
||||
describe('when running range query with fallback', () => {
|
||||
let ds: LokiDatasource;
|
||||
beforeEach(() => {
|
||||
const customData = { ...(instanceSettings.jsonData || {}), maxLines: 20 };
|
||||
const customSettings = { ...instanceSettings, jsonData: customData };
|
||||
ds = new LokiDatasource(customSettings, backendSrv, templateSrvMock);
|
||||
backendSrvMock.datasourceRequest = jest.fn(() => Promise.resolve(legacyTestResp));
|
||||
});
|
||||
|
||||
test('should try latest endpoint but fall back to legacy endpoint if it cannot be reached', async () => {
|
||||
const options = getQueryOptions<LokiQuery>({
|
||||
targets: [{ expr: '{job="grafana"}', refId: 'B' }],
|
||||
exploreMode: ExploreMode.Logs,
|
||||
});
|
||||
|
||||
ds.runLegacyQuery = jest.fn();
|
||||
await ds.runRangeQueryWithFallback(options.targets[0], options).toPromise();
|
||||
expect(ds.runLegacyQuery).toBeCalled();
|
||||
});
|
||||
});
|
||||
|
||||
describe('when querying', () => {
|
||||
const testLimit = makeLimitTest(instanceSettings, backendSrvMock, backendSrv, templateSrvMock, testResp);
|
||||
const testLimit = makeLimitTest(instanceSettings, backendSrvMock, backendSrv, templateSrvMock, legacyTestResp);
|
||||
let ds: LokiDatasource;
|
||||
|
||||
beforeEach(() => {
|
||||
const customData = { ...(instanceSettings.jsonData || {}), maxLines: 20 };
|
||||
const customSettings = { ...instanceSettings, jsonData: customData };
|
||||
ds = new LokiDatasource(customSettings, backendSrv, templateSrvMock);
|
||||
backendSrvMock.datasourceRequest = jest.fn(() => Promise.resolve(testResp));
|
||||
});
|
||||
|
||||
test('should run instant query and range query when in metrics mode', async () => {
|
||||
const options = getQueryOptions<LokiQuery>({
|
||||
targets: [{ expr: 'rate({job="grafana"}[5m])', refId: 'A' }],
|
||||
exploreMode: ExploreMode.Metrics,
|
||||
});
|
||||
|
||||
ds.runInstantQuery = jest.fn(() => of({ data: [] }));
|
||||
ds.runLegacyQuery = jest.fn();
|
||||
ds.runRangeQueryWithFallback = jest.fn(() => of({ data: [] }));
|
||||
await ds.query(options).toPromise();
|
||||
|
||||
expect(ds.runInstantQuery).toBeCalled();
|
||||
expect(ds.runLegacyQuery).not.toBeCalled();
|
||||
expect(ds.runRangeQueryWithFallback).toBeCalled();
|
||||
});
|
||||
|
||||
test('should just run range query when in logs mode', async () => {
|
||||
const options = getQueryOptions<LokiQuery>({
|
||||
targets: [{ expr: '{job="grafana"}', refId: 'B' }],
|
||||
exploreMode: ExploreMode.Logs,
|
||||
});
|
||||
|
||||
ds.runInstantQuery = jest.fn(() => of({ data: [] }));
|
||||
ds.runRangeQueryWithFallback = jest.fn(() => of({ data: [] }));
|
||||
await ds.query(options).toPromise();
|
||||
|
||||
expect(ds.runInstantQuery).not.toBeCalled();
|
||||
expect(ds.runRangeQueryWithFallback).toBeCalled();
|
||||
});
|
||||
|
||||
test('should use default max lines when no limit given', () => {
|
||||
testLimit({
|
||||
@ -61,14 +139,17 @@ describe('LokiDatasource', () => {
|
||||
});
|
||||
});
|
||||
|
||||
test('should return series data', async done => {
|
||||
test('should return series data', async () => {
|
||||
const customData = { ...(instanceSettings.jsonData || {}), maxLines: 20 };
|
||||
const customSettings = { ...instanceSettings, jsonData: customData };
|
||||
const ds = new LokiDatasource(customSettings, backendSrv, templateSrvMock);
|
||||
backendSrvMock.datasourceRequest = jest.fn(() => Promise.resolve(testResp));
|
||||
backendSrvMock.datasourceRequest = jest
|
||||
.fn()
|
||||
.mockReturnValueOnce(Promise.resolve(legacyTestResp))
|
||||
.mockReturnValueOnce(Promise.resolve(omit(legacyTestResp, 'status')));
|
||||
|
||||
const options = getQueryOptions<LokiQuery>({
|
||||
targets: [{ expr: '{} foo', refId: 'B' }],
|
||||
targets: [{ expr: '{job="grafana"} |= "foo"', refId: 'B' }],
|
||||
});
|
||||
|
||||
const res = await ds.query(options).toPromise();
|
||||
@ -76,14 +157,13 @@ describe('LokiDatasource', () => {
|
||||
const dataFrame = res.data[0] as DataFrame;
|
||||
expect(dataFrame.fields[1].values.get(0)).toBe('hello');
|
||||
expect(dataFrame.meta.limit).toBe(20);
|
||||
expect(dataFrame.meta.searchWords).toEqual(['(?i)foo']);
|
||||
done();
|
||||
expect(dataFrame.meta.searchWords).toEqual(['foo']);
|
||||
});
|
||||
});
|
||||
|
||||
describe('When interpolating variables', () => {
|
||||
let ds: any = {};
|
||||
let variable: any = {};
|
||||
let ds: LokiDatasource;
|
||||
let variable: CustomVariable;
|
||||
|
||||
beforeEach(() => {
|
||||
const customData = { ...(instanceSettings.jsonData || {}), maxLines: 20 };
|
||||
@ -155,23 +235,25 @@ describe('LokiDatasource', () => {
|
||||
});
|
||||
|
||||
describe('and call fails with 401 error', () => {
|
||||
beforeEach(async () => {
|
||||
const backendSrv = ({
|
||||
async datasourceRequest() {
|
||||
return Promise.reject({
|
||||
statusText: 'Unauthorized',
|
||||
status: 401,
|
||||
data: {
|
||||
message: 'Unauthorized',
|
||||
},
|
||||
});
|
||||
},
|
||||
} as unknown) as BackendSrv;
|
||||
ds = new LokiDatasource(instanceSettings, backendSrv, {} as TemplateSrv);
|
||||
result = await ds.testDatasource();
|
||||
let ds: LokiDatasource;
|
||||
beforeEach(() => {
|
||||
backendSrvMock.datasourceRequest = jest.fn(() =>
|
||||
Promise.reject({
|
||||
statusText: 'Unauthorized',
|
||||
status: 401,
|
||||
data: {
|
||||
message: 'Unauthorized',
|
||||
},
|
||||
})
|
||||
);
|
||||
|
||||
const customData = { ...(instanceSettings.jsonData || {}), maxLines: 20 };
|
||||
const customSettings = { ...instanceSettings, jsonData: customData };
|
||||
ds = new LokiDatasource(customSettings, backendSrv, templateSrvMock);
|
||||
});
|
||||
|
||||
it('should return error status and a detailed error message', () => {
|
||||
it('should return error status and a detailed error message', async () => {
|
||||
const result = await ds.testDatasource();
|
||||
expect(result.status).toEqual('error');
|
||||
expect(result.message).toBe('Loki: Unauthorized. 401. Unauthorized');
|
||||
});
|
||||
@ -221,24 +303,32 @@ describe('LokiDatasource', () => {
|
||||
});
|
||||
|
||||
describe('annotationQuery', () => {
|
||||
it('should transform the loki data to annototion response', async () => {
|
||||
it('should transform the loki data to annotation response', async () => {
|
||||
const ds = new LokiDatasource(instanceSettings, backendSrv, templateSrvMock);
|
||||
backendSrvMock.datasourceRequest = jest.fn(() =>
|
||||
Promise.resolve({
|
||||
data: {
|
||||
streams: [
|
||||
{
|
||||
entries: [{ ts: '2019-02-01T10:27:37.498180581Z', line: 'hello' }],
|
||||
labels: '{label="value"}',
|
||||
},
|
||||
{
|
||||
entries: [{ ts: '2019-02-01T12:27:37.498180581Z', line: 'hello 2' }],
|
||||
labels: '{label2="value2"}',
|
||||
},
|
||||
],
|
||||
},
|
||||
})
|
||||
);
|
||||
backendSrvMock.datasourceRequest = jest
|
||||
.fn()
|
||||
.mockReturnValueOnce(
|
||||
Promise.resolve({
|
||||
data: [],
|
||||
status: 404,
|
||||
})
|
||||
)
|
||||
.mockReturnValueOnce(
|
||||
Promise.resolve({
|
||||
data: {
|
||||
streams: [
|
||||
{
|
||||
entries: [{ ts: '2019-02-01T10:27:37.498180581Z', line: 'hello' }],
|
||||
labels: '{label="value"}',
|
||||
},
|
||||
{
|
||||
entries: [{ ts: '2019-02-01T12:27:37.498180581Z', line: 'hello 2' }],
|
||||
labels: '{label2="value2"}',
|
||||
},
|
||||
],
|
||||
},
|
||||
})
|
||||
);
|
||||
const query = makeAnnotationQueryRequest();
|
||||
|
||||
const res = await ds.annotationQuery(query);
|
||||
|
@ -1,24 +1,36 @@
|
||||
// Libraries
|
||||
import { isEmpty, isString, fromPairs, map as lodashMap } from 'lodash';
|
||||
import { isEmpty, map as lodashMap, fromPairs } from 'lodash';
|
||||
import { Observable, from, merge, of, iif, defer } from 'rxjs';
|
||||
import { map, filter, catchError, switchMap, mergeMap } from 'rxjs/operators';
|
||||
|
||||
// Services & Utils
|
||||
import {
|
||||
dateMath,
|
||||
DataFrame,
|
||||
LogRowModel,
|
||||
DateTime,
|
||||
AnnotationEvent,
|
||||
DataFrameView,
|
||||
LoadingState,
|
||||
ArrayVector,
|
||||
FieldType,
|
||||
FieldConfig,
|
||||
} from '@grafana/data';
|
||||
import { dateMath } from '@grafana/data';
|
||||
import { addLabelToSelector } from 'app/plugins/datasource/prometheus/add_label_to_query';
|
||||
import LanguageProvider from './language_provider';
|
||||
import { logStreamToDataFrame } from './result_transformer';
|
||||
import { BackendSrv, DatasourceRequestOptions } from 'app/core/services/backend_srv';
|
||||
import { TemplateSrv } from 'app/features/templating/template_srv';
|
||||
import { safeStringifyValue, convertToWebSocketUrl } from 'app/core/utils/explore';
|
||||
import {
|
||||
lokiResultsToTableModel,
|
||||
processRangeQueryResponse,
|
||||
legacyLogStreamToDataFrame,
|
||||
lokiStreamResultToDataFrame,
|
||||
isLokiLogsStream,
|
||||
} from './result_transformer';
|
||||
import { formatQuery, parseQuery, getHighlighterExpressionsFromQuery } from './query_utils';
|
||||
|
||||
// Types
|
||||
import {
|
||||
LogRowModel,
|
||||
DateTime,
|
||||
LoadingState,
|
||||
AnnotationEvent,
|
||||
DataFrameView,
|
||||
TimeRange,
|
||||
FieldConfig,
|
||||
ArrayVector,
|
||||
FieldType,
|
||||
DataFrame,
|
||||
TimeSeries,
|
||||
PluginMeta,
|
||||
DataSourceApi,
|
||||
DataSourceInstanceSettings,
|
||||
@ -27,29 +39,38 @@ import {
|
||||
DataQueryResponse,
|
||||
AnnotationQueryRequest,
|
||||
} from '@grafana/data';
|
||||
import { LokiQuery, LokiOptions, LokiLogsStream, LokiResponse } from './types';
|
||||
import { BackendSrv } from 'app/core/services/backend_srv';
|
||||
import { TemplateSrv } from 'app/features/templating/template_srv';
|
||||
import { safeStringifyValue, convertToWebSocketUrl } from 'app/core/utils/explore';
|
||||
import { LiveTarget, LiveStreams } from './live_streams';
|
||||
import { Observable, from, merge, of } from 'rxjs';
|
||||
import { map, filter } from 'rxjs/operators';
|
||||
|
||||
import {
|
||||
LokiQuery,
|
||||
LokiOptions,
|
||||
LokiLegacyQueryRequest,
|
||||
LokiLegacyStreamResponse,
|
||||
LokiResponse,
|
||||
LokiResultType,
|
||||
LokiRangeQueryRequest,
|
||||
LokiStreamResponse,
|
||||
LokiLegacyStreamResult,
|
||||
} from './types';
|
||||
import { ExploreMode } from 'app/types';
|
||||
import { LegacyTarget, LiveStreams } from './live_streams';
|
||||
import LanguageProvider from './language_provider';
|
||||
|
||||
type RangeQueryOptions = Pick<DataQueryRequest<LokiQuery>, 'range' | 'intervalMs' | 'maxDataPoints' | 'reverse'>;
|
||||
export const DEFAULT_MAX_LINES = 1000;
|
||||
const LEGACY_QUERY_ENDPOINT = '/api/prom/query';
|
||||
const RANGE_QUERY_ENDPOINT = '/loki/api/v1/query_range';
|
||||
const INSTANT_QUERY_ENDPOINT = '/loki/api/v1/query';
|
||||
|
||||
const DEFAULT_QUERY_PARAMS = {
|
||||
const DEFAULT_QUERY_PARAMS: Partial<LokiLegacyQueryRequest> = {
|
||||
direction: 'BACKWARD',
|
||||
limit: DEFAULT_MAX_LINES,
|
||||
regexp: '',
|
||||
query: '',
|
||||
};
|
||||
|
||||
function serializeParams(data: any) {
|
||||
function serializeParams(data: Record<string, any>) {
|
||||
return Object.keys(data)
|
||||
.map(k => {
|
||||
const v = data[k];
|
||||
return encodeURIComponent(k) + '=' + encodeURIComponent(v);
|
||||
})
|
||||
.map(k => `${encodeURIComponent(k)}=${encodeURIComponent(data[k])}`)
|
||||
.join('&');
|
||||
}
|
||||
|
||||
@ -62,6 +83,7 @@ export class LokiDatasource extends DataSourceApi<LokiQuery, LokiOptions> {
|
||||
private streams = new LiveStreams();
|
||||
languageProvider: LanguageProvider;
|
||||
maxLines: number;
|
||||
version: string;
|
||||
|
||||
/** @ngInject */
|
||||
constructor(
|
||||
@ -70,155 +92,74 @@ export class LokiDatasource extends DataSourceApi<LokiQuery, LokiOptions> {
|
||||
private templateSrv: TemplateSrv
|
||||
) {
|
||||
super(instanceSettings);
|
||||
|
||||
this.languageProvider = new LanguageProvider(this);
|
||||
const settingsData = instanceSettings.jsonData || {};
|
||||
this.maxLines = parseInt(settingsData.maxLines, 10) || DEFAULT_MAX_LINES;
|
||||
}
|
||||
|
||||
_request(apiUrl: string, data?: any, options?: any) {
|
||||
getVersion() {
|
||||
if (this.version) {
|
||||
return Promise.resolve(this.version);
|
||||
}
|
||||
|
||||
return this._request(RANGE_QUERY_ENDPOINT)
|
||||
.toPromise()
|
||||
.then(() => {
|
||||
this.version = 'v1';
|
||||
return this.version;
|
||||
})
|
||||
.catch((err: any) => {
|
||||
this.version = err.status !== 404 ? 'v1' : 'v0';
|
||||
return this.version;
|
||||
});
|
||||
}
|
||||
|
||||
_request(apiUrl: string, data?: any, options?: DatasourceRequestOptions): Observable<Record<string, any>> {
|
||||
const baseUrl = this.instanceSettings.url;
|
||||
const params = data ? serializeParams(data) : '';
|
||||
const url = `${baseUrl}${apiUrl}?${params}`;
|
||||
const url = `${baseUrl}${apiUrl}${params.length ? `?${params}` : ''}`;
|
||||
const req = {
|
||||
...options,
|
||||
url,
|
||||
};
|
||||
|
||||
return this.backendSrv.datasourceRequest(req);
|
||||
return from(this.backendSrv.datasourceRequest(req));
|
||||
}
|
||||
|
||||
prepareLiveTarget(target: LokiQuery, options: DataQueryRequest<LokiQuery>): LiveTarget {
|
||||
const interpolated = this.templateSrv.replace(target.expr, {}, this.interpolateQueryExpr);
|
||||
const { query, regexp } = parseQuery(interpolated);
|
||||
const refId = target.refId;
|
||||
const baseUrl = this.instanceSettings.url;
|
||||
const params = serializeParams({ query, regexp });
|
||||
const url = convertToWebSocketUrl(`${baseUrl}/api/prom/tail?${params}`);
|
||||
|
||||
return {
|
||||
query,
|
||||
regexp,
|
||||
url,
|
||||
refId,
|
||||
size: Math.min(options.maxDataPoints || Infinity, this.maxLines),
|
||||
};
|
||||
}
|
||||
|
||||
prepareQueryTarget(target: LokiQuery, options: DataQueryRequest<LokiQuery>) {
|
||||
const interpolated = this.templateSrv.replace(target.expr, {}, this.interpolateQueryExpr);
|
||||
const { query, regexp } = parseQuery(interpolated);
|
||||
const start = this.getTime(options.range.from, false);
|
||||
const end = this.getTime(options.range.to, true);
|
||||
const refId = target.refId;
|
||||
return {
|
||||
...DEFAULT_QUERY_PARAMS,
|
||||
query,
|
||||
regexp,
|
||||
start,
|
||||
end,
|
||||
limit: Math.min(options.maxDataPoints || Infinity, this.maxLines),
|
||||
refId,
|
||||
};
|
||||
}
|
||||
|
||||
processError = (err: any, target: any): DataQueryError => {
|
||||
const error: DataQueryError = {
|
||||
message: (err && err.statusText) || 'Unknown error during query transaction. Please check JS console logs.',
|
||||
refId: target.refId,
|
||||
};
|
||||
if (err.data) {
|
||||
if (typeof err.data === 'string') {
|
||||
error.message = err.data;
|
||||
} else if (err.data.error) {
|
||||
error.message = safeStringifyValue(err.data.error);
|
||||
}
|
||||
} else if (err.message) {
|
||||
error.message = err.message;
|
||||
} else if (typeof err === 'string') {
|
||||
error.message = err;
|
||||
}
|
||||
|
||||
error.status = err.status;
|
||||
error.statusText = err.statusText;
|
||||
|
||||
return error;
|
||||
};
|
||||
|
||||
processResult = (data: LokiLogsStream | LokiResponse, target: any): DataFrame[] => {
|
||||
const series: DataFrame[] = [];
|
||||
|
||||
if (Object.keys(data).length === 0) {
|
||||
return series;
|
||||
}
|
||||
|
||||
if (!(data as any).streams) {
|
||||
return [logStreamToDataFrame(data as LokiLogsStream, false, target.refId)];
|
||||
}
|
||||
|
||||
data = data as LokiResponse;
|
||||
for (const stream of data.streams || []) {
|
||||
const dataFrame = logStreamToDataFrame(stream);
|
||||
this.enhanceDataFrame(dataFrame);
|
||||
dataFrame.refId = target.refId;
|
||||
dataFrame.meta = {
|
||||
searchWords: getHighlighterExpressionsFromQuery(formatQuery(target.query, target.regexp)),
|
||||
limit: this.maxLines,
|
||||
};
|
||||
series.push(dataFrame);
|
||||
}
|
||||
|
||||
return series;
|
||||
};
|
||||
|
||||
/**
|
||||
* Runs live queries which in this case means creating a websocket and listening on it for new logs.
|
||||
* This returns a bit different dataFrame than runQueries as it returns single dataframe even if there are multiple
|
||||
* Loki streams, sets only common labels on dataframe.labels and has additional dataframe.fields.labels for unique
|
||||
* labels per row.
|
||||
*/
|
||||
runLiveQuery = (options: DataQueryRequest<LokiQuery>, target: LokiQuery): Observable<DataQueryResponse> => {
|
||||
const liveTarget = this.prepareLiveTarget(target, options);
|
||||
const stream = this.streams.getStream(liveTarget);
|
||||
return stream.pipe(
|
||||
map(data => {
|
||||
return {
|
||||
data,
|
||||
key: `loki-${liveTarget.refId}`,
|
||||
state: LoadingState.Streaming,
|
||||
};
|
||||
})
|
||||
);
|
||||
};
|
||||
|
||||
runQuery = (options: DataQueryRequest<LokiQuery>, target: LokiQuery): Observable<DataQueryResponse> => {
|
||||
const query = this.prepareQueryTarget(target, options);
|
||||
return from(
|
||||
this._request('/api/prom/query', query).catch((err: any) => {
|
||||
if (err.cancelled) {
|
||||
return err;
|
||||
}
|
||||
|
||||
const error: DataQueryError = this.processError(err, query);
|
||||
throw error;
|
||||
})
|
||||
).pipe(
|
||||
filter((response: any) => (response.cancelled ? false : true)),
|
||||
map((response: any) => {
|
||||
const data = this.processResult(response.data, query);
|
||||
return { data, key: query.refId };
|
||||
})
|
||||
);
|
||||
};
|
||||
|
||||
query(options: DataQueryRequest<LokiQuery>): Observable<DataQueryResponse> {
|
||||
const subQueries = options.targets
|
||||
const subQueries: Array<Observable<DataQueryResponse>> = [];
|
||||
const filteredTargets = options.targets
|
||||
.filter(target => target.expr && !target.hide)
|
||||
.map(target => {
|
||||
if (target.liveStreaming) {
|
||||
return this.runLiveQuery(options, target);
|
||||
}
|
||||
return this.runQuery(options, target);
|
||||
});
|
||||
.map(target => ({
|
||||
...target,
|
||||
expr: this.templateSrv.replace(target.expr, {}, this.interpolateQueryExpr),
|
||||
}));
|
||||
|
||||
if (options.exploreMode === ExploreMode.Metrics) {
|
||||
filteredTargets.forEach(target =>
|
||||
subQueries.push(
|
||||
this.runInstantQuery(target, options, filteredTargets.length),
|
||||
this.runRangeQueryWithFallback(target, options, filteredTargets.length)
|
||||
)
|
||||
);
|
||||
} else {
|
||||
filteredTargets.forEach(target =>
|
||||
subQueries.push(
|
||||
this.runRangeQueryWithFallback(target, options, filteredTargets.length).pipe(
|
||||
map(dataQueryResponse => {
|
||||
if (options.exploreMode === ExploreMode.Logs && dataQueryResponse.data.find(d => isTimeSeries(d))) {
|
||||
throw new Error(
|
||||
'Logs mode does not support queries that return time series data. Please perform a logs query or switch to Metrics mode.'
|
||||
);
|
||||
} else {
|
||||
return dataQueryResponse;
|
||||
}
|
||||
})
|
||||
)
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
// No valid targets, return the empty result to save a round trip.
|
||||
if (isEmpty(subQueries)) {
|
||||
@ -231,18 +172,216 @@ export class LokiDatasource extends DataSourceApi<LokiQuery, LokiOptions> {
|
||||
return merge(...subQueries);
|
||||
}
|
||||
|
||||
runLegacyQuery = (
|
||||
target: LokiQuery,
|
||||
options: { range?: TimeRange; maxDataPoints?: number; reverse?: boolean }
|
||||
): Observable<DataQueryResponse> => {
|
||||
if (target.liveStreaming) {
|
||||
return this.runLiveQuery(target, options);
|
||||
}
|
||||
|
||||
const range = options.range
|
||||
? { start: this.getTime(options.range.from, false), end: this.getTime(options.range.to, true) }
|
||||
: {};
|
||||
const query: LokiLegacyQueryRequest = {
|
||||
...DEFAULT_QUERY_PARAMS,
|
||||
...parseQuery(target.expr),
|
||||
...range,
|
||||
limit: Math.min(options.maxDataPoints || Infinity, this.maxLines),
|
||||
refId: target.refId,
|
||||
};
|
||||
|
||||
return this._request(LEGACY_QUERY_ENDPOINT, query).pipe(
|
||||
catchError((err: any) => this.throwUnless(err, err.cancelled, target)),
|
||||
filter((response: any) => !response.cancelled),
|
||||
map((response: { data: LokiLegacyStreamResponse }) => ({
|
||||
data: this.lokiLegacyStreamsToDataframes(response.data, query, this.maxLines, options.reverse),
|
||||
key: `${target.refId}_log`,
|
||||
}))
|
||||
);
|
||||
};
|
||||
|
||||
lokiLegacyStreamsToDataframes = (
|
||||
data: LokiLegacyStreamResult | LokiLegacyStreamResponse,
|
||||
target: { refId: string; query?: string; regexp?: string },
|
||||
limit: number,
|
||||
reverse = false
|
||||
): DataFrame[] => {
|
||||
if (Object.keys(data).length === 0) {
|
||||
return [];
|
||||
}
|
||||
|
||||
if (isLokiLogsStream(data)) {
|
||||
return [legacyLogStreamToDataFrame(data, false, target.refId)];
|
||||
}
|
||||
|
||||
const series: DataFrame[] = data.streams.map(stream => {
|
||||
const dataFrame = legacyLogStreamToDataFrame(stream, reverse);
|
||||
this.enhanceDataFrame(dataFrame);
|
||||
|
||||
return {
|
||||
...dataFrame,
|
||||
refId: target.refId,
|
||||
meta: {
|
||||
searchWords: getHighlighterExpressionsFromQuery(formatQuery(target.query, target.regexp)),
|
||||
limit: this.maxLines,
|
||||
},
|
||||
};
|
||||
});
|
||||
|
||||
return series;
|
||||
};
|
||||
|
||||
runInstantQuery = (
|
||||
target: LokiQuery,
|
||||
options: DataQueryRequest<LokiQuery>,
|
||||
responseListLength: number
|
||||
): Observable<DataQueryResponse> => {
|
||||
const timeNs = this.getTime(options.range.to, true);
|
||||
const query = {
|
||||
query: parseQuery(target.expr).query,
|
||||
time: `${timeNs + (1e9 - (timeNs % 1e9))}`,
|
||||
limit: Math.min(options.maxDataPoints || Infinity, this.maxLines),
|
||||
};
|
||||
|
||||
return this._request(INSTANT_QUERY_ENDPOINT, query).pipe(
|
||||
catchError((err: any) => this.throwUnless(err, err.cancelled, target)),
|
||||
filter((response: any) => (response.cancelled ? false : true)),
|
||||
map((response: { data: LokiResponse }) => {
|
||||
if (response.data.data.resultType === LokiResultType.Stream) {
|
||||
throw new Error('Metrics mode does not support logs. Use an aggregation or switch to Logs mode.');
|
||||
}
|
||||
|
||||
return {
|
||||
data: [lokiResultsToTableModel(response.data.data.result, responseListLength, target.refId, true)],
|
||||
key: `${target.refId}_instant`,
|
||||
};
|
||||
})
|
||||
);
|
||||
};
|
||||
|
||||
createRangeQuery(target: LokiQuery, options: RangeQueryOptions): LokiRangeQueryRequest {
|
||||
const { query } = parseQuery(target.expr);
|
||||
let range: { start?: number; end?: number; step?: number } = {};
|
||||
if (options.range && options.intervalMs) {
|
||||
const startNs = this.getTime(options.range.from, false);
|
||||
const endNs = this.getTime(options.range.to, true);
|
||||
const rangeMs = Math.ceil((endNs - startNs) / 1e6);
|
||||
const step = this.adjustInterval(options.intervalMs, rangeMs) / 1000;
|
||||
const alignedTimes = {
|
||||
start: startNs - (startNs % 1e9),
|
||||
end: endNs + (1e9 - (endNs % 1e9)),
|
||||
};
|
||||
|
||||
range = {
|
||||
start: alignedTimes.start,
|
||||
end: alignedTimes.end,
|
||||
step,
|
||||
};
|
||||
}
|
||||
|
||||
return {
|
||||
...DEFAULT_QUERY_PARAMS,
|
||||
...range,
|
||||
query,
|
||||
limit: Math.min(options.maxDataPoints || Infinity, this.maxLines),
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Attempts to send a query to /loki/api/v1/query_range but falls back to the legacy endpoint if necessary.
|
||||
*/
|
||||
runRangeQueryWithFallback = (
|
||||
target: LokiQuery,
|
||||
options: RangeQueryOptions,
|
||||
responseListLength = 1
|
||||
): Observable<DataQueryResponse> => {
|
||||
if (target.liveStreaming) {
|
||||
return this.runLiveQuery(target, options);
|
||||
}
|
||||
|
||||
const query = this.createRangeQuery(target, options);
|
||||
return this._request(RANGE_QUERY_ENDPOINT, query).pipe(
|
||||
catchError((err: any) => this.throwUnless(err, err.cancelled || err.status === 404, target)),
|
||||
filter((response: any) => (response.cancelled ? false : true)),
|
||||
switchMap((response: { data: LokiResponse; status: number }) =>
|
||||
iif<DataQueryResponse, DataQueryResponse>(
|
||||
() => response.status === 404,
|
||||
defer(() => this.runLegacyQuery(target, options)),
|
||||
defer(() =>
|
||||
processRangeQueryResponse(response.data, target, query, responseListLength, this.maxLines, options.reverse)
|
||||
)
|
||||
)
|
||||
)
|
||||
);
|
||||
};
|
||||
|
||||
createLegacyLiveTarget(target: LokiQuery, options: { maxDataPoints?: number }): LegacyTarget {
|
||||
const { query, regexp } = parseQuery(target.expr);
|
||||
const baseUrl = this.instanceSettings.url;
|
||||
const params = serializeParams({ query });
|
||||
|
||||
return {
|
||||
query,
|
||||
regexp,
|
||||
url: convertToWebSocketUrl(`${baseUrl}/api/prom/tail?${params}`),
|
||||
refId: target.refId,
|
||||
size: Math.min(options.maxDataPoints || Infinity, this.maxLines),
|
||||
};
|
||||
}
|
||||
|
||||
createLiveTarget(target: LokiQuery, options: { maxDataPoints?: number }): LegacyTarget {
|
||||
const { query, regexp } = parseQuery(target.expr);
|
||||
const baseUrl = this.instanceSettings.url;
|
||||
const params = serializeParams({ query });
|
||||
|
||||
return {
|
||||
query,
|
||||
regexp,
|
||||
url: convertToWebSocketUrl(`${baseUrl}/loki/api/v1/tail?${params}`),
|
||||
refId: target.refId,
|
||||
size: Math.min(options.maxDataPoints || Infinity, this.maxLines),
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Runs live queries which in this case means creating a websocket and listening on it for new logs.
|
||||
* This returns a bit different dataFrame than runQueries as it returns single dataframe even if there are multiple
|
||||
* Loki streams, sets only common labels on dataframe.labels and has additional dataframe.fields.labels for unique
|
||||
* labels per row.
|
||||
*/
|
||||
runLiveQuery = (target: LokiQuery, options: { maxDataPoints?: number }): Observable<DataQueryResponse> => {
|
||||
const liveTarget = this.createLiveTarget(target, options);
|
||||
|
||||
return from(this.getVersion()).pipe(
|
||||
mergeMap(version =>
|
||||
iif(
|
||||
() => version === 'v1',
|
||||
defer(() => this.streams.getStream(liveTarget)),
|
||||
defer(() => {
|
||||
const legacyTarget = this.createLegacyLiveTarget(target, options);
|
||||
return this.streams.getLegacyStream(legacyTarget);
|
||||
})
|
||||
)
|
||||
),
|
||||
map(data => ({
|
||||
data,
|
||||
key: `loki-${liveTarget.refId}`,
|
||||
state: LoadingState.Streaming,
|
||||
}))
|
||||
);
|
||||
};
|
||||
|
||||
interpolateVariablesInQueries(queries: LokiQuery[]): LokiQuery[] {
|
||||
let expandedQueries = queries;
|
||||
if (queries && queries.length > 0) {
|
||||
expandedQueries = queries.map(query => {
|
||||
const expandedQuery = {
|
||||
...query,
|
||||
datasource: this.name,
|
||||
expr: this.templateSrv.replace(query.expr, {}, this.interpolateQueryExpr),
|
||||
};
|
||||
return expandedQuery;
|
||||
});
|
||||
if (queries && queries.length) {
|
||||
expandedQueries = queries.map(query => ({
|
||||
...query,
|
||||
datasource: this.name,
|
||||
expr: this.templateSrv.replace(query.expr, {}, this.interpolateQueryExpr),
|
||||
}));
|
||||
}
|
||||
|
||||
return expandedQueries;
|
||||
}
|
||||
|
||||
@ -250,13 +389,11 @@ export class LokiDatasource extends DataSourceApi<LokiQuery, LokiOptions> {
|
||||
return this.languageProvider.importQueries(queries, originMeta.id);
|
||||
}
|
||||
|
||||
metadataRequest(url: string, params?: any) {
|
||||
// HACK to get label values for {job=|}, will be replaced when implementing LokiQueryField
|
||||
const apiUrl = url.replace('v1', 'prom');
|
||||
return this._request(apiUrl, params, { silent: true }).then((res: DataQueryResponse) => {
|
||||
const data: any = { data: { data: res.data.values || [] } };
|
||||
return data;
|
||||
});
|
||||
async metadataRequest(url: string, params?: Record<string, string>) {
|
||||
const res = await this._request(url, params, { silent: true }).toPromise();
|
||||
return {
|
||||
data: { data: res.data.values || [] },
|
||||
};
|
||||
}
|
||||
|
||||
interpolateQueryExpr(value: any, variable: any) {
|
||||
@ -288,6 +425,7 @@ export class LokiDatasource extends DataSourceApi<LokiQuery, LokiOptions> {
|
||||
default:
|
||||
break;
|
||||
}
|
||||
|
||||
const expression = formatQuery(selector, parsed.regexp);
|
||||
return { ...query, expr: expression };
|
||||
}
|
||||
@ -297,70 +435,87 @@ export class LokiDatasource extends DataSourceApi<LokiQuery, LokiOptions> {
|
||||
}
|
||||
|
||||
getTime(date: string | DateTime, roundUp: boolean) {
|
||||
if (isString(date)) {
|
||||
if (typeof date === 'string') {
|
||||
date = dateMath.parse(date, roundUp);
|
||||
}
|
||||
|
||||
return Math.ceil(date.valueOf() * 1e6);
|
||||
}
|
||||
|
||||
prepareLogRowContextQueryTarget = (row: LogRowModel, limit: number, direction: 'BACKWARD' | 'FORWARD') => {
|
||||
const query = Object.keys(row.labels)
|
||||
.map(label => {
|
||||
return `${label}="${row.labels[label]}"`;
|
||||
})
|
||||
.join(',');
|
||||
const contextTimeBuffer = 2 * 60 * 60 * 1000 * 1e6; // 2h buffer
|
||||
const timeEpochNs = row.timeEpochMs * 1e6;
|
||||
|
||||
const commontTargetOptons = {
|
||||
limit,
|
||||
query: `{${query}}`,
|
||||
direction,
|
||||
};
|
||||
|
||||
if (direction === 'BACKWARD') {
|
||||
return {
|
||||
...commontTargetOptons,
|
||||
start: timeEpochNs - contextTimeBuffer,
|
||||
end: row.timestamp, // using RFC3339Nano format to avoid precision loss
|
||||
direction,
|
||||
};
|
||||
} else {
|
||||
return {
|
||||
...commontTargetOptons,
|
||||
start: row.timestamp, // start param in Loki API is inclusive so we'll have to filter out the row that this request is based from
|
||||
end: timeEpochNs + contextTimeBuffer,
|
||||
};
|
||||
}
|
||||
};
|
||||
|
||||
getLogRowContext = async (row: LogRowModel, options?: LokiContextQueryOptions) => {
|
||||
getLogRowContext = (row: LogRowModel, options?: LokiContextQueryOptions) => {
|
||||
const target = this.prepareLogRowContextQueryTarget(
|
||||
row,
|
||||
(options && options.limit) || 10,
|
||||
(options && options.direction) || 'BACKWARD'
|
||||
);
|
||||
const series: DataFrame[] = [];
|
||||
|
||||
try {
|
||||
const reverse = options && options.direction === 'FORWARD';
|
||||
const result = await this._request('/api/prom/query', target);
|
||||
if (result.data) {
|
||||
for (const stream of result.data.streams || []) {
|
||||
series.push(logStreamToDataFrame(stream, reverse));
|
||||
}
|
||||
}
|
||||
const reverse = options && options.direction === 'FORWARD';
|
||||
return this._request(RANGE_QUERY_ENDPOINT, target)
|
||||
.pipe(
|
||||
catchError((err: any) => {
|
||||
if (err.status === 404) {
|
||||
return of(err);
|
||||
}
|
||||
|
||||
const error: DataQueryError = {
|
||||
message: 'Error during context query. Please check JS console logs.',
|
||||
status: err.status,
|
||||
statusText: err.statusText,
|
||||
};
|
||||
throw error;
|
||||
}),
|
||||
switchMap((res: { data: LokiStreamResponse; status: number }) =>
|
||||
iif(
|
||||
() => res.status === 404,
|
||||
this._request(LEGACY_QUERY_ENDPOINT, target).pipe(
|
||||
catchError((err: any) => {
|
||||
const error: DataQueryError = {
|
||||
message: 'Error during context query. Please check JS console logs.',
|
||||
status: err.status,
|
||||
statusText: err.statusText,
|
||||
};
|
||||
throw error;
|
||||
}),
|
||||
map((res: { data: LokiLegacyStreamResponse }) => ({
|
||||
data: res.data ? res.data.streams.map(stream => legacyLogStreamToDataFrame(stream, reverse)) : [],
|
||||
}))
|
||||
),
|
||||
of({
|
||||
data: res.data ? res.data.data.result.map(stream => lokiStreamResultToDataFrame(stream, reverse)) : [],
|
||||
})
|
||||
)
|
||||
)
|
||||
)
|
||||
.toPromise();
|
||||
};
|
||||
|
||||
prepareLogRowContextQueryTarget = (row: LogRowModel, limit: number, direction: 'BACKWARD' | 'FORWARD') => {
|
||||
const query = Object.keys(row.labels)
|
||||
.map(label => `${label}="${row.labels[label]}"`)
|
||||
.join(',');
|
||||
|
||||
const contextTimeBuffer = 2 * 60 * 60 * 1000 * 1e6; // 2h buffer
|
||||
const timeEpochNs = row.timeEpochMs * 1e6;
|
||||
const commonTargetOptions = {
|
||||
limit,
|
||||
query: `{${query}}`,
|
||||
expr: `{${query}}`,
|
||||
direction,
|
||||
};
|
||||
|
||||
if (direction === 'BACKWARD') {
|
||||
return {
|
||||
data: series,
|
||||
...commonTargetOptions,
|
||||
start: timeEpochNs - contextTimeBuffer,
|
||||
end: timeEpochNs, // using RFC3339Nano format to avoid precision loss
|
||||
direction,
|
||||
};
|
||||
} catch (e) {
|
||||
const error: DataQueryError = {
|
||||
message: 'Error during context query. Please check JS console logs.',
|
||||
status: e.status,
|
||||
statusText: e.statusText,
|
||||
} else {
|
||||
return {
|
||||
...commonTargetOptions,
|
||||
start: timeEpochNs, // start param in Loki API is inclusive so we'll have to filter out the row that this request is based from
|
||||
end: timeEpochNs + contextTimeBuffer,
|
||||
};
|
||||
throw error;
|
||||
}
|
||||
};
|
||||
|
||||
@ -368,36 +523,52 @@ export class LokiDatasource extends DataSourceApi<LokiQuery, LokiOptions> {
|
||||
// Consider only last 10 minutes otherwise request takes too long
|
||||
const startMs = Date.now() - 10 * 60 * 1000;
|
||||
const start = `${startMs}000000`; // API expects nanoseconds
|
||||
return this._request('/api/prom/label', { start })
|
||||
.then((res: DataQueryResponse) => {
|
||||
if (res && res.data && res.data.values && res.data.values.length > 0) {
|
||||
return { status: 'success', message: 'Data source connected and labels found.' };
|
||||
}
|
||||
return {
|
||||
status: 'error',
|
||||
message:
|
||||
'Data source connected, but no labels received. Verify that Loki and Promtail is configured properly.',
|
||||
};
|
||||
})
|
||||
.catch((err: any) => {
|
||||
let message = 'Loki: ';
|
||||
if (err.statusText) {
|
||||
message += err.statusText;
|
||||
} else {
|
||||
message += 'Cannot connect to Loki';
|
||||
}
|
||||
return this._request('/loki/api/v1/label', { start })
|
||||
.pipe(
|
||||
catchError((err: any) => {
|
||||
if (err.status === 404) {
|
||||
return of(err);
|
||||
}
|
||||
|
||||
if (err.status) {
|
||||
message += `. ${err.status}`;
|
||||
}
|
||||
throw err;
|
||||
}),
|
||||
switchMap((response: { data: { values: string[] }; status: number }) =>
|
||||
iif<DataQueryResponse, DataQueryResponse>(
|
||||
() => response.status === 404,
|
||||
defer(() => this._request('/api/prom/label', { start })),
|
||||
defer(() => of(response))
|
||||
)
|
||||
),
|
||||
map(res =>
|
||||
res && res.data && res.data.values && res.data.values.length
|
||||
? { status: 'success', message: 'Data source connected and labels found.' }
|
||||
: {
|
||||
status: 'error',
|
||||
message:
|
||||
'Data source connected, but no labels received. Verify that Loki and Promtail is configured properly.',
|
||||
}
|
||||
),
|
||||
catchError((err: any) => {
|
||||
let message = 'Loki: ';
|
||||
if (err.statusText) {
|
||||
message += err.statusText;
|
||||
} else {
|
||||
message += 'Cannot connect to Loki';
|
||||
}
|
||||
|
||||
if (err.data && err.data.message) {
|
||||
message += `. ${err.data.message}`;
|
||||
} else if (err.data) {
|
||||
message += `. ${err.data}`;
|
||||
}
|
||||
return { status: 'error', message: message };
|
||||
});
|
||||
if (err.status) {
|
||||
message += `. ${err.status}`;
|
||||
}
|
||||
|
||||
if (err.data && err.data.message) {
|
||||
message += `. ${err.data.message}`;
|
||||
} else if (err.data) {
|
||||
message += `. ${err.data}`;
|
||||
}
|
||||
return of({ status: 'error', message: message });
|
||||
})
|
||||
)
|
||||
.toPromise();
|
||||
}
|
||||
|
||||
async annotationQuery(options: AnnotationQueryRequest<LokiQuery>): Promise<AnnotationEvent[]> {
|
||||
@ -405,8 +576,8 @@ export class LokiDatasource extends DataSourceApi<LokiQuery, LokiOptions> {
|
||||
return [];
|
||||
}
|
||||
|
||||
const request = queryRequestFromAnnotationOptions(options);
|
||||
const { data } = await this.runQuery(request, request.targets[0]).toPromise();
|
||||
const query = { refId: `annotation-${options.annotation.name}`, expr: options.annotation.expr };
|
||||
const { data } = await this.runRangeQueryWithFallback(query, options).toPromise();
|
||||
const annotations: AnnotationEvent[] = [];
|
||||
|
||||
for (const frame of data) {
|
||||
@ -474,29 +645,48 @@ export class LokiDatasource extends DataSourceApi<LokiQuery, LokiOptions> {
|
||||
dataFrame.fields = [...dataFrame.fields, ...Object.values(fields)];
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function queryRequestFromAnnotationOptions(options: AnnotationQueryRequest<LokiQuery>): DataQueryRequest<LokiQuery> {
|
||||
const refId = `annotation-${options.annotation.name}`;
|
||||
const target: LokiQuery = { refId, expr: options.annotation.expr };
|
||||
throwUnless = (err: any, condition: boolean, target: LokiQuery) => {
|
||||
if (condition) {
|
||||
return of(err);
|
||||
}
|
||||
|
||||
return {
|
||||
requestId: refId,
|
||||
range: options.range,
|
||||
targets: [target],
|
||||
dashboardId: options.dashboard.id,
|
||||
scopedVars: null,
|
||||
startTime: Date.now(),
|
||||
|
||||
// This should mean the default defined on datasource is used.
|
||||
maxDataPoints: 0,
|
||||
|
||||
// Dummy values, are required in type but not used here.
|
||||
timezone: 'utc',
|
||||
panelId: 0,
|
||||
interval: '',
|
||||
intervalMs: 0,
|
||||
const error: DataQueryError = this.processError(err, target);
|
||||
throw error;
|
||||
};
|
||||
|
||||
processError = (err: any, target: LokiQuery): DataQueryError => {
|
||||
const error: DataQueryError = {
|
||||
message: (err && err.statusText) || 'Unknown error during query transaction. Please check JS console logs.',
|
||||
refId: target.refId,
|
||||
};
|
||||
|
||||
if (err.data) {
|
||||
if (typeof err.data === 'string') {
|
||||
error.message = err.data;
|
||||
} else if (err.data.error) {
|
||||
error.message = safeStringifyValue(err.data.error);
|
||||
}
|
||||
} else if (err.message) {
|
||||
error.message = err.message;
|
||||
} else if (typeof err === 'string') {
|
||||
error.message = err;
|
||||
}
|
||||
|
||||
error.status = err.status;
|
||||
error.statusText = err.statusText;
|
||||
|
||||
return error;
|
||||
};
|
||||
|
||||
adjustInterval(interval: number, range: number) {
|
||||
// Loki will drop queries that might return more than 11000 data points.
|
||||
// Calibrate interval if it is too small.
|
||||
if (interval !== 0 && range / interval > 11000) {
|
||||
interval = Math.ceil(range / 11000);
|
||||
}
|
||||
return Math.max(interval, 1000);
|
||||
}
|
||||
}
|
||||
|
||||
export function lokiRegularEscape(value: any) {
|
||||
@ -514,3 +704,7 @@ export function lokiSpecialRegexEscape(value: any) {
|
||||
}
|
||||
|
||||
export default LokiDatasource;
|
||||
|
||||
function isTimeSeries(data: any): data is TimeSeries {
|
||||
return data.hasOwnProperty('datapoints');
|
||||
}
|
||||
|
@ -10,6 +10,18 @@ import { beforeEach } from 'test/lib/common';
|
||||
import { makeMockLokiDatasource } from './mocks';
|
||||
import LokiDatasource from './datasource';
|
||||
|
||||
jest.mock('app/store/store', () => ({
|
||||
store: {
|
||||
getState: jest.fn().mockReturnValue({
|
||||
explore: {
|
||||
left: {
|
||||
mode: 'Logs',
|
||||
},
|
||||
},
|
||||
}),
|
||||
},
|
||||
}));
|
||||
|
||||
describe('Language completion provider', () => {
|
||||
const datasource = makeMockLokiDatasource({});
|
||||
|
||||
|
@ -3,15 +3,18 @@ import _ from 'lodash';
|
||||
|
||||
// Services & Utils
|
||||
import { parseSelector, labelRegexp, selectorRegexp } from 'app/plugins/datasource/prometheus/language_utils';
|
||||
import syntax from './syntax';
|
||||
import { store } from 'app/store/store';
|
||||
import syntax, { FUNCTIONS } from './syntax';
|
||||
|
||||
// Types
|
||||
import { LokiQuery } from './types';
|
||||
import { dateTime, AbsoluteTimeRange, LanguageProvider, HistoryItem } from '@grafana/data';
|
||||
import { PromQuery } from '../prometheus/types';
|
||||
import { RATE_RANGES } from '../prometheus/promql';
|
||||
|
||||
import LokiDatasource from './datasource';
|
||||
import { CompletionItem, TypeaheadInput, TypeaheadOutput } from '@grafana/ui';
|
||||
import { ExploreMode } from 'app/types/explore';
|
||||
|
||||
const DEFAULT_KEYS = ['job', 'namespace'];
|
||||
const EMPTY_SELECTOR = '{}';
|
||||
@ -32,14 +35,15 @@ type TypeaheadContext = {
|
||||
|
||||
export function addHistoryMetadata(item: CompletionItem, history: LokiHistoryItem[]): CompletionItem {
|
||||
const cutoffTs = Date.now() - HISTORY_COUNT_CUTOFF;
|
||||
const historyForItem = history.filter(h => h.ts > cutoffTs && (h.query.expr as string) === item.label);
|
||||
const count = historyForItem.length;
|
||||
const historyForItem = history.filter(h => h.ts > cutoffTs && h.query.expr === item.label);
|
||||
let hint = `Queried ${historyForItem.length} times in the last 24h.`;
|
||||
const recent = historyForItem[0];
|
||||
let hint = `Queried ${count} times in the last 24h.`;
|
||||
|
||||
if (recent) {
|
||||
const lastQueried = dateTime(recent.ts).fromNow();
|
||||
hint = `${hint} Last queried ${lastQueried}.`;
|
||||
}
|
||||
|
||||
return {
|
||||
...item,
|
||||
documentation: hint,
|
||||
@ -72,7 +76,7 @@ export default class LokiLanguageProvider extends LanguageProvider {
|
||||
return syntax;
|
||||
}
|
||||
|
||||
request = (url: string, params?: any) => {
|
||||
request = (url: string, params?: any): Promise<{ data: { data: string[] } }> => {
|
||||
return this.datasource.metadataRequest(url, params);
|
||||
};
|
||||
|
||||
@ -87,6 +91,7 @@ export default class LokiLanguageProvider extends LanguageProvider {
|
||||
return [];
|
||||
});
|
||||
}
|
||||
|
||||
return this.startTask;
|
||||
};
|
||||
|
||||
@ -108,15 +113,53 @@ export default class LokiLanguageProvider extends LanguageProvider {
|
||||
* @param context.history Optional used only in getEmptyCompletionItems
|
||||
*/
|
||||
async provideCompletionItems(input: TypeaheadInput, context?: TypeaheadContext): Promise<TypeaheadOutput> {
|
||||
const { wrapperClasses, value } = input;
|
||||
const exploreMode = store.getState().explore.left.mode;
|
||||
|
||||
if (exploreMode === ExploreMode.Logs) {
|
||||
return this.provideLogCompletionItems(input, context);
|
||||
}
|
||||
|
||||
return this.provideMetricsCompletionItems(input, context);
|
||||
}
|
||||
|
||||
async provideMetricsCompletionItems(input: TypeaheadInput, context?: TypeaheadContext): Promise<TypeaheadOutput> {
|
||||
const { wrapperClasses, value, prefix, text } = input;
|
||||
|
||||
// Local text properties
|
||||
const empty = value.document.text.length === 0;
|
||||
const selectedLines = value.document.getTextsAtRange(value.selection);
|
||||
const currentLine = selectedLines.size === 1 ? selectedLines.first().getText() : null;
|
||||
|
||||
const nextCharacter = currentLine ? currentLine[value.selection.anchor.offset] : null;
|
||||
|
||||
// Syntax spans have 3 classes by default. More indicate a recognized token
|
||||
const tokenRecognized = wrapperClasses.length > 3;
|
||||
|
||||
// Non-empty prefix, but not inside known token
|
||||
const prefixUnrecognized = prefix && !tokenRecognized;
|
||||
|
||||
// Prevent suggestions in `function(|suffix)`
|
||||
const noSuffix = !nextCharacter || nextCharacter === ')';
|
||||
|
||||
// Empty prefix is safe if it does not immediately follow a complete expression and has no text after it
|
||||
const safeEmptyPrefix = prefix === '' && !text.match(/^[\]})\s]+$/) && noSuffix;
|
||||
|
||||
// About to type next operand if preceded by binary operator
|
||||
const operatorsPattern = /[+\-*/^%]/;
|
||||
const isNextOperand = text.match(operatorsPattern);
|
||||
|
||||
// Determine candidates by CSS context
|
||||
if (_.includes(wrapperClasses, 'context-labels')) {
|
||||
if (wrapperClasses.includes('context-range')) {
|
||||
// Suggestions for metric[|]
|
||||
return this.getRangeCompletionItems();
|
||||
} else if (wrapperClasses.includes('context-labels')) {
|
||||
// Suggestions for {|} and {foo=|}
|
||||
return await this.getLabelCompletionItems(input, context);
|
||||
} else if (empty) {
|
||||
return this.getEmptyCompletionItems(context || {});
|
||||
return this.getEmptyCompletionItems(context || {}, ExploreMode.Metrics);
|
||||
} else if ((prefixUnrecognized && noSuffix) || safeEmptyPrefix || isNextOperand) {
|
||||
// Show term suggestions in a couple of scenarios
|
||||
return this.getTermCompletionItems();
|
||||
}
|
||||
|
||||
return {
|
||||
@ -124,13 +167,30 @@ export default class LokiLanguageProvider extends LanguageProvider {
|
||||
};
|
||||
}
|
||||
|
||||
getEmptyCompletionItems(context: any): TypeaheadOutput {
|
||||
async provideLogCompletionItems(input: TypeaheadInput, context?: TypeaheadContext): Promise<TypeaheadOutput> {
|
||||
const { wrapperClasses, value } = input;
|
||||
// Local text properties
|
||||
const empty = value.document.text.length === 0;
|
||||
// Determine candidates by CSS context
|
||||
if (wrapperClasses.includes('context-labels')) {
|
||||
// Suggestions for {|} and {foo=|}
|
||||
return await this.getLabelCompletionItems(input, context);
|
||||
} else if (empty) {
|
||||
return this.getEmptyCompletionItems(context || {}, ExploreMode.Logs);
|
||||
}
|
||||
|
||||
return {
|
||||
suggestions: [],
|
||||
};
|
||||
}
|
||||
|
||||
getEmptyCompletionItems(context: TypeaheadContext, mode?: ExploreMode): TypeaheadOutput {
|
||||
const { history } = context;
|
||||
const suggestions = [];
|
||||
|
||||
if (history && history.length > 0) {
|
||||
if (history && history.length) {
|
||||
const historyItems = _.chain(history)
|
||||
.map((h: any) => h.query.expr)
|
||||
.map(h => h.query.expr)
|
||||
.filter()
|
||||
.uniq()
|
||||
.take(HISTORY_ITEM_COUNT)
|
||||
@ -146,9 +206,38 @@ export default class LokiLanguageProvider extends LanguageProvider {
|
||||
});
|
||||
}
|
||||
|
||||
if (mode === ExploreMode.Metrics) {
|
||||
const termCompletionItems = this.getTermCompletionItems();
|
||||
suggestions.push(...termCompletionItems.suggestions);
|
||||
}
|
||||
|
||||
return { suggestions };
|
||||
}
|
||||
|
||||
getTermCompletionItems = (): TypeaheadOutput => {
|
||||
const suggestions = [];
|
||||
|
||||
suggestions.push({
|
||||
prefixMatch: true,
|
||||
label: 'Functions',
|
||||
items: FUNCTIONS.map(suggestion => ({ ...suggestion, kind: 'function' })),
|
||||
});
|
||||
|
||||
return { suggestions };
|
||||
};
|
||||
|
||||
getRangeCompletionItems(): TypeaheadOutput {
|
||||
return {
|
||||
context: 'context-range',
|
||||
suggestions: [
|
||||
{
|
||||
label: 'Range vector',
|
||||
items: [...RATE_RANGES],
|
||||
},
|
||||
],
|
||||
};
|
||||
}
|
||||
|
||||
async getLabelCompletionItems(
|
||||
{ text, wrapperClasses, labelKey, value }: TypeaheadInput,
|
||||
{ absoluteRange }: any
|
||||
@ -186,7 +275,7 @@ export default class LokiLanguageProvider extends LanguageProvider {
|
||||
const labelKeys = this.labelKeys[selector] || DEFAULT_KEYS;
|
||||
if (labelKeys) {
|
||||
const possibleKeys = _.difference(labelKeys, existingKeys);
|
||||
if (possibleKeys.length > 0) {
|
||||
if (possibleKeys.length) {
|
||||
context = 'context-labels';
|
||||
suggestions.push({ label: `Labels`, items: possibleKeys.map(wrapLabel) });
|
||||
}
|
||||
@ -223,40 +312,40 @@ export default class LokiLanguageProvider extends LanguageProvider {
|
||||
|
||||
// Consider only first selector in query
|
||||
const selectorMatch = query.match(selectorRegexp);
|
||||
if (selectorMatch) {
|
||||
const selector = selectorMatch[0];
|
||||
const labels: { [key: string]: { value: any; operator: any } } = {};
|
||||
selector.replace(labelRegexp, (_, key, operator, value) => {
|
||||
labels[key] = { value, operator };
|
||||
return '';
|
||||
});
|
||||
|
||||
// Keep only labels that exist on origin and target datasource
|
||||
await this.start(); // fetches all existing label keys
|
||||
const existingKeys = this.labelKeys[EMPTY_SELECTOR];
|
||||
let labelsToKeep: { [key: string]: { value: any; operator: any } } = {};
|
||||
if (existingKeys && existingKeys.length > 0) {
|
||||
// Check for common labels
|
||||
for (const key in labels) {
|
||||
if (existingKeys && existingKeys.includes(key)) {
|
||||
// Should we check for label value equality here?
|
||||
labelsToKeep[key] = labels[key];
|
||||
}
|
||||
}
|
||||
} else {
|
||||
// Keep all labels by default
|
||||
labelsToKeep = labels;
|
||||
}
|
||||
|
||||
const labelKeys = Object.keys(labelsToKeep).sort();
|
||||
const cleanSelector = labelKeys
|
||||
.map(key => `${key}${labelsToKeep[key].operator}${labelsToKeep[key].value}`)
|
||||
.join(',');
|
||||
|
||||
return ['{', cleanSelector, '}'].join('');
|
||||
if (!selectorMatch) {
|
||||
return '';
|
||||
}
|
||||
|
||||
return '';
|
||||
const selector = selectorMatch[0];
|
||||
const labels: { [key: string]: { value: any; operator: any } } = {};
|
||||
selector.replace(labelRegexp, (_, key, operator, value) => {
|
||||
labels[key] = { value, operator };
|
||||
return '';
|
||||
});
|
||||
|
||||
// Keep only labels that exist on origin and target datasource
|
||||
await this.start(); // fetches all existing label keys
|
||||
const existingKeys = this.labelKeys[EMPTY_SELECTOR];
|
||||
let labelsToKeep: { [key: string]: { value: any; operator: any } } = {};
|
||||
if (existingKeys && existingKeys.length) {
|
||||
// Check for common labels
|
||||
for (const key in labels) {
|
||||
if (existingKeys && existingKeys.includes(key)) {
|
||||
// Should we check for label value equality here?
|
||||
labelsToKeep[key] = labels[key];
|
||||
}
|
||||
}
|
||||
} else {
|
||||
// Keep all labels by default
|
||||
labelsToKeep = labels;
|
||||
}
|
||||
|
||||
const labelKeys = Object.keys(labelsToKeep).sort();
|
||||
const cleanSelector = labelKeys
|
||||
.map(key => `${key}${labelsToKeep[key].operator}${labelsToKeep[key].value}`)
|
||||
.join(',');
|
||||
|
||||
return ['{', cleanSelector, '}'].join('');
|
||||
}
|
||||
|
||||
async fetchLogLabels(absoluteRange: AbsoluteTimeRange): Promise<any> {
|
||||
@ -265,8 +354,8 @@ export default class LokiLanguageProvider extends LanguageProvider {
|
||||
this.logLabelFetchTs = Date.now();
|
||||
|
||||
const res = await this.request(url, rangeToParams(absoluteRange));
|
||||
const body = await (res.data || res.json());
|
||||
const labelKeys = body.data.slice().sort();
|
||||
const labelKeys = res.data.data.slice().sort();
|
||||
|
||||
this.labelKeys = {
|
||||
...this.labelKeys,
|
||||
[EMPTY_SELECTOR]: labelKeys,
|
||||
@ -291,15 +380,14 @@ export default class LokiLanguageProvider extends LanguageProvider {
|
||||
const url = `/api/prom/label/${key}/values`;
|
||||
try {
|
||||
const res = await this.request(url, rangeToParams(absoluteRange));
|
||||
const body = await (res.data || res.json());
|
||||
const values = body.data.slice().sort();
|
||||
const values = res.data.data.slice().sort();
|
||||
|
||||
// Add to label options
|
||||
this.logLabelOptions = this.logLabelOptions.map(keyOption => {
|
||||
if (keyOption.value === key) {
|
||||
return {
|
||||
...keyOption,
|
||||
children: values.map((value: string) => ({ label: value, value })),
|
||||
children: values.map(value => ({ label: value, value })),
|
||||
};
|
||||
}
|
||||
return keyOption;
|
||||
|
@ -36,7 +36,7 @@ describe('Live Stream Tests', () => {
|
||||
fakeSocket = new Subject<any>();
|
||||
const labels: Labels = { job: 'varlogs' };
|
||||
const target = makeTarget('fake', labels);
|
||||
const stream = new LiveStreams().getStream(target);
|
||||
const stream = new LiveStreams().getLegacyStream(target);
|
||||
expect.assertions(4);
|
||||
|
||||
const tests = [
|
||||
@ -74,21 +74,21 @@ describe('Live Stream Tests', () => {
|
||||
it('returns the same subscription if the url matches existing one', () => {
|
||||
fakeSocket = new Subject<any>();
|
||||
const liveStreams = new LiveStreams();
|
||||
const stream1 = liveStreams.getStream(makeTarget('url_to_match'));
|
||||
const stream2 = liveStreams.getStream(makeTarget('url_to_match'));
|
||||
const stream1 = liveStreams.getLegacyStream(makeTarget('url_to_match'));
|
||||
const stream2 = liveStreams.getLegacyStream(makeTarget('url_to_match'));
|
||||
expect(stream1).toBe(stream2);
|
||||
});
|
||||
|
||||
it('returns new subscription when the previous unsubscribed', () => {
|
||||
fakeSocket = new Subject<any>();
|
||||
const liveStreams = new LiveStreams();
|
||||
const stream1 = liveStreams.getStream(makeTarget('url_to_match'));
|
||||
const stream1 = liveStreams.getLegacyStream(makeTarget('url_to_match'));
|
||||
const subscription = stream1.subscribe({
|
||||
next: noop,
|
||||
});
|
||||
subscription.unsubscribe();
|
||||
|
||||
const stream2 = liveStreams.getStream(makeTarget('url_to_match'));
|
||||
const stream2 = liveStreams.getLegacyStream(makeTarget('url_to_match'));
|
||||
expect(stream1).not.toBe(stream2);
|
||||
});
|
||||
|
||||
@ -101,7 +101,7 @@ describe('Live Stream Tests', () => {
|
||||
spy.and.returnValue(fakeSocket);
|
||||
|
||||
const liveStreams = new LiveStreams();
|
||||
const stream1 = liveStreams.getStream(makeTarget('url_to_match'));
|
||||
const stream1 = liveStreams.getLegacyStream(makeTarget('url_to_match'));
|
||||
const subscription = stream1.subscribe({
|
||||
next: noop,
|
||||
});
|
||||
|
@ -1,14 +1,14 @@
|
||||
import { DataFrame, FieldType, parseLabels, KeyValue, CircularDataFrame } from '@grafana/data';
|
||||
import { Observable } from 'rxjs';
|
||||
import { webSocket } from 'rxjs/webSocket';
|
||||
import { LokiResponse } from './types';
|
||||
import { LokiLegacyStreamResponse, LokiTailResponse } from './types';
|
||||
import { finalize, map } from 'rxjs/operators';
|
||||
import { appendResponseToBufferedData } from './result_transformer';
|
||||
import { appendLegacyResponseToBufferedData, appendResponseToBufferedData } from './result_transformer';
|
||||
|
||||
/**
|
||||
* Maps directly to a query in the UI (refId is key)
|
||||
*/
|
||||
export interface LiveTarget {
|
||||
export interface LegacyTarget {
|
||||
query: string;
|
||||
regexp: string;
|
||||
url: string;
|
||||
@ -16,6 +16,13 @@ export interface LiveTarget {
|
||||
size: number;
|
||||
}
|
||||
|
||||
export interface LiveTarget {
|
||||
query: string;
|
||||
delay_for?: string;
|
||||
limit?: string;
|
||||
start?: string;
|
||||
}
|
||||
|
||||
/**
|
||||
* Cache of websocket streams that can be returned as observable. In case there already is a stream for particular
|
||||
* target it is returned and on subscription returns the latest dataFrame.
|
||||
@ -23,26 +30,59 @@ export interface LiveTarget {
|
||||
export class LiveStreams {
|
||||
private streams: KeyValue<Observable<DataFrame[]>> = {};
|
||||
|
||||
getStream(target: LiveTarget): Observable<DataFrame[]> {
|
||||
getLegacyStream(target: LegacyTarget): Observable<DataFrame[]> {
|
||||
let stream = this.streams[target.url];
|
||||
if (!stream) {
|
||||
const data = new CircularDataFrame({ capacity: target.size });
|
||||
data.addField({ name: 'ts', type: FieldType.time, config: { title: 'Time' } });
|
||||
data.addField({ name: 'line', type: FieldType.string }).labels = parseLabels(target.query);
|
||||
data.addField({ name: 'labels', type: FieldType.other }); // The labels for each line
|
||||
data.addField({ name: 'id', type: FieldType.string });
|
||||
|
||||
stream = webSocket(target.url).pipe(
|
||||
finalize(() => {
|
||||
delete this.streams[target.url];
|
||||
}),
|
||||
map((response: LokiResponse) => {
|
||||
appendResponseToBufferedData(response, data);
|
||||
return [data];
|
||||
})
|
||||
);
|
||||
this.streams[target.url] = stream;
|
||||
if (stream) {
|
||||
return stream;
|
||||
}
|
||||
|
||||
const data = new CircularDataFrame({ capacity: target.size });
|
||||
data.addField({ name: 'ts', type: FieldType.time, config: { title: 'Time' } });
|
||||
data.addField({ name: 'line', type: FieldType.string }).labels = parseLabels(target.query);
|
||||
data.addField({ name: 'labels', type: FieldType.other }); // The labels for each line
|
||||
data.addField({ name: 'id', type: FieldType.string });
|
||||
|
||||
stream = webSocket(target.url).pipe(
|
||||
finalize(() => {
|
||||
delete this.streams[target.url];
|
||||
}),
|
||||
|
||||
map((response: LokiLegacyStreamResponse) => {
|
||||
appendLegacyResponseToBufferedData(response, data);
|
||||
return [data];
|
||||
})
|
||||
);
|
||||
this.streams[target.url] = stream;
|
||||
|
||||
return stream;
|
||||
}
|
||||
|
||||
getStream(target: LegacyTarget): Observable<DataFrame[]> {
|
||||
let stream = this.streams[target.url];
|
||||
|
||||
if (stream) {
|
||||
return stream;
|
||||
}
|
||||
|
||||
const data = new CircularDataFrame({ capacity: target.size });
|
||||
data.addField({ name: 'ts', type: FieldType.time, config: { title: 'Time' } });
|
||||
data.addField({ name: 'line', type: FieldType.string }).labels = parseLabels(target.query);
|
||||
data.addField({ name: 'labels', type: FieldType.other }); // The labels for each line
|
||||
data.addField({ name: 'id', type: FieldType.string });
|
||||
|
||||
stream = webSocket(target.url).pipe(
|
||||
finalize(() => {
|
||||
delete this.streams[target.url];
|
||||
}),
|
||||
|
||||
map((response: LokiTailResponse) => {
|
||||
appendResponseToBufferedData(response, data);
|
||||
return [data];
|
||||
})
|
||||
);
|
||||
this.streams[target.url] = stream;
|
||||
|
||||
return stream;
|
||||
}
|
||||
}
|
||||
|
@ -4,10 +4,10 @@
|
||||
"id": "loki",
|
||||
"category": "logging",
|
||||
|
||||
"logs": true,
|
||||
"metrics": true,
|
||||
"alerting": false,
|
||||
"annotations": true,
|
||||
"logs": true,
|
||||
"streaming": true,
|
||||
|
||||
"queryOptions": {
|
||||
|
@ -1,8 +1,8 @@
|
||||
import { logStreamToDataFrame, appendResponseToBufferedData } from './result_transformer';
|
||||
import { legacyLogStreamToDataFrame, appendLegacyResponseToBufferedData } from './result_transformer';
|
||||
import { FieldType, MutableDataFrame } from '@grafana/data';
|
||||
import { LokiLogsStream } from './types';
|
||||
import { LokiLegacyStreamResult } from './types';
|
||||
|
||||
const streams: LokiLogsStream[] = [
|
||||
const streams: LokiLegacyStreamResult[] = [
|
||||
{
|
||||
labels: '{foo="bar"}',
|
||||
entries: [
|
||||
@ -25,7 +25,7 @@ const streams: LokiLogsStream[] = [
|
||||
|
||||
describe('logStreamToDataFrame', () => {
|
||||
it('converts streams to series', () => {
|
||||
const data = streams.map(stream => logStreamToDataFrame(stream));
|
||||
const data = streams.map(stream => legacyLogStreamToDataFrame(stream));
|
||||
|
||||
expect(data.length).toBe(2);
|
||||
expect(data[0].fields[1].labels['foo']).toEqual('bar');
|
||||
@ -46,7 +46,7 @@ describe('appendResponseToBufferedData', () => {
|
||||
data.addField({ name: 'labels', type: FieldType.other });
|
||||
data.addField({ name: 'id', type: FieldType.string });
|
||||
|
||||
appendResponseToBufferedData({ streams }, data);
|
||||
appendLegacyResponseToBufferedData({ streams }, data);
|
||||
expect(data.get(0)).toEqual({
|
||||
ts: '1970-01-01T00:00:00Z',
|
||||
line: "foo: [32m'bar'[39m",
|
||||
|
@ -1,18 +1,43 @@
|
||||
import { LokiLogsStream, LokiResponse } from './types';
|
||||
import _ from 'lodash';
|
||||
|
||||
import {
|
||||
parseLabels,
|
||||
FieldType,
|
||||
TimeSeries,
|
||||
Labels,
|
||||
DataFrame,
|
||||
ArrayVector,
|
||||
MutableDataFrame,
|
||||
findUniqueLabels,
|
||||
dateTime,
|
||||
} from '@grafana/data';
|
||||
import templateSrv from 'app/features/templating/template_srv';
|
||||
import TableModel from 'app/core/table_model';
|
||||
import {
|
||||
LokiLegacyStreamResult,
|
||||
LokiRangeQueryRequest,
|
||||
LokiResponse,
|
||||
LokiMatrixResult,
|
||||
LokiVectorResult,
|
||||
TransformerOptions,
|
||||
LokiLegacyStreamResponse,
|
||||
LokiResultType,
|
||||
LokiStreamResult,
|
||||
LokiTailResponse,
|
||||
LokiQuery,
|
||||
} from './types';
|
||||
|
||||
import { formatQuery, getHighlighterExpressionsFromQuery } from './query_utils';
|
||||
import { of } from 'rxjs';
|
||||
|
||||
/**
|
||||
* Transforms LokiLogStream structure into a dataFrame. Used when doing standard queries.
|
||||
*/
|
||||
export function logStreamToDataFrame(stream: LokiLogsStream, reverse?: boolean, refId?: string): DataFrame {
|
||||
export function legacyLogStreamToDataFrame(
|
||||
stream: LokiLegacyStreamResult,
|
||||
reverse?: boolean,
|
||||
refId?: string
|
||||
): DataFrame {
|
||||
let labels: Labels = stream.parsedLabels;
|
||||
if (!labels && stream.labels) {
|
||||
labels = parseLabels(stream.labels);
|
||||
@ -44,6 +69,39 @@ export function logStreamToDataFrame(stream: LokiLogsStream, reverse?: boolean,
|
||||
};
|
||||
}
|
||||
|
||||
export function lokiStreamResultToDataFrame(stream: LokiStreamResult, reverse?: boolean, refId?: string): DataFrame {
|
||||
const labels: Labels = stream.stream;
|
||||
|
||||
const times = new ArrayVector<string>([]);
|
||||
const lines = new ArrayVector<string>([]);
|
||||
const uids = new ArrayVector<string>([]);
|
||||
|
||||
for (const [ts, line] of stream.values) {
|
||||
times.add(dateTime(Number.parseFloat(ts) / 1e6).format('YYYY-MM-DD HH:mm:ss'));
|
||||
lines.add(line);
|
||||
uids.add(
|
||||
`${ts}_${Object.entries(labels)
|
||||
.map(([key, val]) => `${key}=${val}`)
|
||||
.join('')}`
|
||||
);
|
||||
}
|
||||
|
||||
if (reverse) {
|
||||
times.buffer = times.buffer.reverse();
|
||||
lines.buffer = lines.buffer.reverse();
|
||||
}
|
||||
|
||||
return {
|
||||
refId,
|
||||
fields: [
|
||||
{ name: 'ts', type: FieldType.time, config: { title: 'Time' }, values: times }, // Time
|
||||
{ name: 'line', type: FieldType.string, config: {}, values: lines, labels }, // Line
|
||||
{ name: 'id', type: FieldType.string, config: {}, values: uids },
|
||||
],
|
||||
length: times.length,
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Transform LokiResponse data and appends it to MutableDataFrame. Used for streaming where the dataFrame can be
|
||||
* a CircularDataFrame creating a fixed size rolling buffer.
|
||||
@ -51,35 +109,304 @@ export function logStreamToDataFrame(stream: LokiLogsStream, reverse?: boolean,
|
||||
* @param response
|
||||
* @param data Needs to have ts, line, labels, id as fields
|
||||
*/
|
||||
export function appendResponseToBufferedData(response: LokiResponse, data: MutableDataFrame) {
|
||||
export function appendLegacyResponseToBufferedData(response: LokiLegacyStreamResponse, data: MutableDataFrame) {
|
||||
// Should we do anything with: response.dropped_entries?
|
||||
|
||||
const streams: LokiLogsStream[] = response.streams;
|
||||
if (streams && streams.length) {
|
||||
const { values } = data;
|
||||
let baseLabels: Labels = {};
|
||||
for (const f of data.fields) {
|
||||
if (f.type === FieldType.string) {
|
||||
if (f.labels) {
|
||||
baseLabels = f.labels;
|
||||
}
|
||||
break;
|
||||
const streams: LokiLegacyStreamResult[] = response.streams;
|
||||
if (!streams || !streams.length) {
|
||||
return;
|
||||
}
|
||||
|
||||
let baseLabels: Labels = {};
|
||||
for (const f of data.fields) {
|
||||
if (f.type === FieldType.string) {
|
||||
if (f.labels) {
|
||||
baseLabels = f.labels;
|
||||
}
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
for (const stream of streams) {
|
||||
// Find unique labels
|
||||
const labels = parseLabels(stream.labels);
|
||||
const unique = findUniqueLabels(labels, baseLabels);
|
||||
for (const stream of streams) {
|
||||
// Find unique labels
|
||||
const labels = parseLabels(stream.labels);
|
||||
const unique = findUniqueLabels(labels, baseLabels);
|
||||
|
||||
// Add each line
|
||||
for (const entry of stream.entries) {
|
||||
const ts = entry.ts || entry.timestamp;
|
||||
values.ts.add(ts);
|
||||
values.line.add(entry.line);
|
||||
values.labels.add(unique);
|
||||
values.id.add(`${ts}_${stream.labels}`);
|
||||
}
|
||||
// Add each line
|
||||
for (const entry of stream.entries) {
|
||||
const ts = entry.ts || entry.timestamp;
|
||||
data.values.ts.add(ts);
|
||||
data.values.line.add(entry.line);
|
||||
data.values.labels.add(unique);
|
||||
data.values.id.add(`${ts}_${stream.labels}`);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export function appendResponseToBufferedData(response: LokiTailResponse, data: MutableDataFrame) {
|
||||
// Should we do anything with: response.dropped_entries?
|
||||
|
||||
const streams: LokiStreamResult[] = response.streams;
|
||||
if (!streams || !streams.length) {
|
||||
return;
|
||||
}
|
||||
|
||||
let baseLabels: Labels = {};
|
||||
for (const f of data.fields) {
|
||||
if (f.type === FieldType.string) {
|
||||
if (f.labels) {
|
||||
baseLabels = f.labels;
|
||||
}
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
for (const stream of streams) {
|
||||
// Find unique labels
|
||||
const unique = findUniqueLabels(stream.stream, baseLabels);
|
||||
|
||||
// Add each line
|
||||
for (const [ts, line] of stream.values) {
|
||||
data.values.ts.add(parseInt(ts, 10) / 1e6);
|
||||
data.values.line.add(line);
|
||||
data.values.labels.add(unique);
|
||||
data.values.id.add(
|
||||
`${ts}_${Object.entries(unique)
|
||||
.map(([key, val]) => `${key}=${val}`)
|
||||
.join('')}`
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function lokiMatrixToTimeSeries(matrixResult: LokiMatrixResult, options: TransformerOptions): TimeSeries {
|
||||
return {
|
||||
target: createMetricLabel(matrixResult.metric, options),
|
||||
datapoints: lokiPointsToTimeseriesPoints(matrixResult.values, options),
|
||||
tags: matrixResult.metric,
|
||||
};
|
||||
}
|
||||
|
||||
function lokiPointsToTimeseriesPoints(
|
||||
data: Array<[number, string]>,
|
||||
options: TransformerOptions
|
||||
): Array<[number, number]> {
|
||||
const stepMs = options.step * 1000;
|
||||
const datapoints: Array<[number, number]> = [];
|
||||
|
||||
let baseTimestampMs = options.start / 1e6;
|
||||
for (const [time, value] of data) {
|
||||
let datapointValue = parseFloat(value);
|
||||
if (isNaN(datapointValue)) {
|
||||
datapointValue = null;
|
||||
}
|
||||
|
||||
const timestamp = time * 1000;
|
||||
for (let t = baseTimestampMs; t < timestamp; t += stepMs) {
|
||||
datapoints.push([0, t]);
|
||||
}
|
||||
|
||||
baseTimestampMs = timestamp + stepMs;
|
||||
datapoints.push([datapointValue, timestamp]);
|
||||
}
|
||||
|
||||
const endTimestamp = options.end / 1e6;
|
||||
for (let t = baseTimestampMs; t <= endTimestamp; t += stepMs) {
|
||||
datapoints.push([0, t]);
|
||||
}
|
||||
|
||||
return datapoints;
|
||||
}
|
||||
|
||||
export function lokiResultsToTableModel(
|
||||
lokiResults: Array<LokiMatrixResult | LokiVectorResult>,
|
||||
resultCount: number,
|
||||
refId: string,
|
||||
valueWithRefId?: boolean
|
||||
): TableModel {
|
||||
if (!lokiResults || lokiResults.length === 0) {
|
||||
return new TableModel();
|
||||
}
|
||||
|
||||
// Collect all labels across all metrics
|
||||
const metricLabels: Set<string> = new Set<string>(
|
||||
lokiResults.reduce((acc, cur) => acc.concat(Object.keys(cur.metric)), [])
|
||||
);
|
||||
|
||||
// Sort metric labels, create columns for them and record their index
|
||||
const sortedLabels = [...metricLabels.values()].sort();
|
||||
const table = new TableModel();
|
||||
table.columns = [
|
||||
{ text: 'Time', type: FieldType.time },
|
||||
...sortedLabels.map(label => ({ text: label, filterable: true })),
|
||||
{ text: resultCount > 1 || valueWithRefId ? `Value #${refId}` : 'Value', type: FieldType.time },
|
||||
];
|
||||
|
||||
// Populate rows, set value to empty string when label not present.
|
||||
lokiResults.forEach(series => {
|
||||
const newSeries: LokiMatrixResult = {
|
||||
metric: series.metric,
|
||||
values: (series as LokiVectorResult).value
|
||||
? [(series as LokiVectorResult).value]
|
||||
: (series as LokiMatrixResult).values,
|
||||
};
|
||||
|
||||
if (!newSeries.values) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (!newSeries.metric) {
|
||||
table.rows.concat(newSeries.values.map(([a, b]) => [a * 1000, parseFloat(b)]));
|
||||
} else {
|
||||
table.rows.push(
|
||||
...newSeries.values.map(([a, b]) => [
|
||||
a * 1000,
|
||||
...sortedLabels.map(label => newSeries.metric[label] || ''),
|
||||
parseFloat(b),
|
||||
])
|
||||
);
|
||||
}
|
||||
});
|
||||
|
||||
return table;
|
||||
}
|
||||
|
||||
function createMetricLabel(labelData: { [key: string]: string }, options?: TransformerOptions) {
|
||||
let label =
|
||||
options === undefined || _.isEmpty(options.legendFormat)
|
||||
? getOriginalMetricName(labelData)
|
||||
: renderTemplate(templateSrv.replace(options.legendFormat), labelData);
|
||||
|
||||
if (!label || label === '{}') {
|
||||
label = options.query;
|
||||
}
|
||||
return label;
|
||||
}
|
||||
|
||||
function renderTemplate(aliasPattern: string, aliasData: { [key: string]: string }) {
|
||||
const aliasRegex = /\{\{\s*(.+?)\s*\}\}/g;
|
||||
return aliasPattern.replace(aliasRegex, (_, g1) => (aliasData[g1] ? aliasData[g1] : g1));
|
||||
}
|
||||
|
||||
function getOriginalMetricName(labelData: { [key: string]: string }) {
|
||||
const metricName = labelData.__name__ || '';
|
||||
delete labelData.__name__;
|
||||
const labelPart = Object.entries(labelData)
|
||||
.map(label => `${label[0]}="${label[1]}"`)
|
||||
.join(',');
|
||||
return `${metricName}{${labelPart}}`;
|
||||
}
|
||||
|
||||
export function lokiStreamsToDataframes(
|
||||
data: LokiStreamResult[],
|
||||
target: { refId: string; expr?: string; regexp?: string },
|
||||
limit: number,
|
||||
reverse = false
|
||||
): DataFrame[] {
|
||||
const series: DataFrame[] = data.map(stream => ({
|
||||
...lokiStreamResultToDataFrame(stream, reverse),
|
||||
refId: target.refId,
|
||||
meta: {
|
||||
searchWords: getHighlighterExpressionsFromQuery(formatQuery(target.expr, target.regexp)),
|
||||
limit,
|
||||
},
|
||||
}));
|
||||
|
||||
return series;
|
||||
}
|
||||
|
||||
export function lokiLegacyStreamsToDataframes(
|
||||
data: LokiLegacyStreamResult | LokiLegacyStreamResponse,
|
||||
target: { refId: string; query?: string; regexp?: string },
|
||||
limit: number,
|
||||
reverse = false
|
||||
): DataFrame[] {
|
||||
if (Object.keys(data).length === 0) {
|
||||
return [];
|
||||
}
|
||||
|
||||
if (isLokiLogsStream(data)) {
|
||||
return [legacyLogStreamToDataFrame(data, reverse, target.refId)];
|
||||
}
|
||||
|
||||
const series: DataFrame[] = data.streams.map(stream => ({
|
||||
...legacyLogStreamToDataFrame(stream, reverse),
|
||||
refId: target.refId,
|
||||
meta: {
|
||||
searchWords: getHighlighterExpressionsFromQuery(formatQuery(target.query, target.regexp)),
|
||||
limit,
|
||||
},
|
||||
}));
|
||||
|
||||
return series;
|
||||
}
|
||||
|
||||
export function rangeQueryResponseToTimeSeries(
|
||||
response: LokiResponse,
|
||||
query: LokiRangeQueryRequest,
|
||||
target: LokiQuery,
|
||||
responseListLength: number
|
||||
): TimeSeries[] {
|
||||
const transformerOptions: TransformerOptions = {
|
||||
format: target.format,
|
||||
legendFormat: target.legendFormat,
|
||||
start: query.start,
|
||||
end: query.end,
|
||||
step: query.step,
|
||||
query: query.query,
|
||||
responseListLength,
|
||||
refId: target.refId,
|
||||
valueWithRefId: target.valueWithRefId,
|
||||
};
|
||||
|
||||
switch (response.data.resultType) {
|
||||
case LokiResultType.Vector:
|
||||
return response.data.result.map(vecResult =>
|
||||
lokiMatrixToTimeSeries({ metric: vecResult.metric, values: [vecResult.value] }, transformerOptions)
|
||||
);
|
||||
case LokiResultType.Matrix:
|
||||
return response.data.result.map(matrixResult => lokiMatrixToTimeSeries(matrixResult, transformerOptions));
|
||||
default:
|
||||
return [];
|
||||
}
|
||||
}
|
||||
|
||||
export function processRangeQueryResponse(
|
||||
response: LokiResponse,
|
||||
target: LokiQuery,
|
||||
query: LokiRangeQueryRequest,
|
||||
responseListLength: number,
|
||||
limit: number,
|
||||
reverse = false
|
||||
) {
|
||||
switch (response.data.resultType) {
|
||||
case LokiResultType.Stream:
|
||||
return of({
|
||||
data: lokiStreamsToDataframes(response.data.result, target, limit, reverse),
|
||||
key: `${target.refId}_log`,
|
||||
});
|
||||
|
||||
case LokiResultType.Vector:
|
||||
case LokiResultType.Matrix:
|
||||
return of({
|
||||
data: rangeQueryResponseToTimeSeries(
|
||||
response,
|
||||
query,
|
||||
{
|
||||
...target,
|
||||
format: 'time_series',
|
||||
},
|
||||
responseListLength
|
||||
),
|
||||
key: target.refId,
|
||||
});
|
||||
default:
|
||||
throw new Error(`Unknown result type "${(response.data as any).resultType}".`);
|
||||
}
|
||||
}
|
||||
|
||||
export function isLokiLogsStream(
|
||||
data: LokiLegacyStreamResult | LokiLegacyStreamResponse
|
||||
): data is LokiLegacyStreamResult {
|
||||
return !data.hasOwnProperty('streams');
|
||||
}
|
||||
|
@ -1,14 +1,90 @@
|
||||
import { Grammar } from 'prismjs';
|
||||
import { CompletionItem } from '@grafana/ui';
|
||||
|
||||
/* tslint:disable max-line-length */
|
||||
const AGGREGATION_OPERATORS: CompletionItem[] = [
|
||||
{
|
||||
label: 'sum',
|
||||
insertText: 'sum',
|
||||
documentation: 'Calculate sum over dimensions',
|
||||
},
|
||||
{
|
||||
label: 'min',
|
||||
insertText: 'min',
|
||||
documentation: 'Select minimum over dimensions',
|
||||
},
|
||||
{
|
||||
label: 'max',
|
||||
insertText: 'max',
|
||||
documentation: 'Select maximum over dimensions',
|
||||
},
|
||||
{
|
||||
label: 'avg',
|
||||
insertText: 'avg',
|
||||
documentation: 'Calculate the average over dimensions',
|
||||
},
|
||||
{
|
||||
label: 'stddev',
|
||||
insertText: 'stddev',
|
||||
documentation: 'Calculate population standard deviation over dimensions',
|
||||
},
|
||||
{
|
||||
label: 'stdvar',
|
||||
insertText: 'stdvar',
|
||||
documentation: 'Calculate population standard variance over dimensions',
|
||||
},
|
||||
{
|
||||
label: 'count',
|
||||
insertText: 'count',
|
||||
documentation: 'Count number of elements in the vector',
|
||||
},
|
||||
{
|
||||
label: 'bottomk',
|
||||
insertText: 'bottomk',
|
||||
documentation: 'Smallest k elements by sample value',
|
||||
},
|
||||
{
|
||||
label: 'topk',
|
||||
insertText: 'topk',
|
||||
documentation: 'Largest k elements by sample value',
|
||||
},
|
||||
];
|
||||
|
||||
export const RANGE_VEC_FUNCTIONS = [
|
||||
{
|
||||
insertText: 'count_over_time',
|
||||
label: 'count_over_time',
|
||||
detail: 'count_over_time(range-vector)',
|
||||
documentation: 'The count of all values in the specified interval.',
|
||||
},
|
||||
{
|
||||
insertText: 'rate',
|
||||
label: 'rate',
|
||||
detail: 'rate(v range-vector)',
|
||||
documentation:
|
||||
"Calculates the per-second average rate of increase of the time series in the range vector. Breaks in monotonicity (such as counter resets due to target restarts) are automatically adjusted for. Also, the calculation extrapolates to the ends of the time range, allowing for missed scrapes or imperfect alignment of scrape cycles with the range's time period.",
|
||||
},
|
||||
];
|
||||
|
||||
export const FUNCTIONS = [...AGGREGATION_OPERATORS, ...RANGE_VEC_FUNCTIONS];
|
||||
|
||||
const tokenizer: Grammar = {
|
||||
comment: {
|
||||
pattern: /(^|[^\n])#.*/,
|
||||
lookbehind: true,
|
||||
},
|
||||
'context-aggregation': {
|
||||
pattern: /((without|by)\s*)\([^)]*\)/, // by ()
|
||||
lookbehind: true,
|
||||
inside: {
|
||||
'label-key': {
|
||||
pattern: /[^(),\s][^,)]*[^),\s]*/,
|
||||
alias: 'attr-name',
|
||||
},
|
||||
punctuation: /[()]/,
|
||||
},
|
||||
},
|
||||
'context-labels': {
|
||||
pattern: /(^|\s)\{[^}]*(?=})/,
|
||||
pattern: /\{[^}]*(?=})/,
|
||||
lookbehind: true,
|
||||
inside: {
|
||||
'label-key': {
|
||||
@ -23,9 +99,31 @@ const tokenizer: Grammar = {
|
||||
punctuation: /[{]/,
|
||||
},
|
||||
},
|
||||
// number: /\b-?\d+((\.\d*)?([eE][+-]?\d+)?)?\b/,
|
||||
function: new RegExp(`\\b(?:${FUNCTIONS.map(f => f.label).join('|')})(?=\\s*\\()`, 'i'),
|
||||
'context-range': [
|
||||
{
|
||||
pattern: /\[[^\]]*(?=\])/, // [1m]
|
||||
inside: {
|
||||
'range-duration': {
|
||||
pattern: /\b\d+[smhdwy]\b/i,
|
||||
alias: 'number',
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
pattern: /(offset\s+)\w+/, // offset 1m
|
||||
lookbehind: true,
|
||||
inside: {
|
||||
'range-duration': {
|
||||
pattern: /\b\d+[smhdwy]\b/i,
|
||||
alias: 'number',
|
||||
},
|
||||
},
|
||||
},
|
||||
],
|
||||
number: /\b-?\d+((\.\d*)?([eE][+-]?\d+)?)?\b/,
|
||||
operator: new RegExp(`/&&?|\\|?\\||!=?|<(?:=>?|<|>)?|>[>=]?`, 'i'),
|
||||
punctuation: /[{}`,.]/,
|
||||
punctuation: /[{}()`,.]/,
|
||||
};
|
||||
|
||||
export default tokenizer;
|
||||
|
@ -1,10 +1,47 @@
|
||||
import { Labels, DataQuery, DataSourceJsonData } from '@grafana/data';
|
||||
|
||||
export interface LokiLegacyQueryRequest {
|
||||
query: string;
|
||||
limit?: number;
|
||||
start?: number;
|
||||
end?: number;
|
||||
direction?: 'BACKWARD' | 'FORWARD';
|
||||
regexp?: string;
|
||||
|
||||
refId: string;
|
||||
}
|
||||
|
||||
export interface LokiInstantQueryRequest {
|
||||
query: string;
|
||||
limit?: number;
|
||||
time?: string;
|
||||
direction?: 'BACKWARD' | 'FORWARD';
|
||||
}
|
||||
|
||||
export interface LokiRangeQueryRequest {
|
||||
query: string;
|
||||
limit?: number;
|
||||
start?: number;
|
||||
end?: number;
|
||||
step?: number;
|
||||
direction?: 'BACKWARD' | 'FORWARD';
|
||||
}
|
||||
|
||||
export enum LokiResultType {
|
||||
Stream = 'streams',
|
||||
Vector = 'vector',
|
||||
Matrix = 'matrix',
|
||||
}
|
||||
|
||||
export interface LokiQuery extends DataQuery {
|
||||
expr: string;
|
||||
liveStreaming?: boolean;
|
||||
query?: string;
|
||||
regexp?: string;
|
||||
format?: string;
|
||||
reverse?: boolean;
|
||||
legendFormat?: string;
|
||||
valueWithRefId?: boolean;
|
||||
}
|
||||
|
||||
export interface LokiOptions extends DataSourceJsonData {
|
||||
@ -12,11 +49,46 @@ export interface LokiOptions extends DataSourceJsonData {
|
||||
derivedFields?: DerivedFieldConfig[];
|
||||
}
|
||||
|
||||
export interface LokiResponse {
|
||||
streams: LokiLogsStream[];
|
||||
export interface LokiVectorResult {
|
||||
metric: { [label: string]: string };
|
||||
value: [number, string];
|
||||
}
|
||||
|
||||
export interface LokiLogsStream {
|
||||
export interface LokiVectorResponse {
|
||||
status: string;
|
||||
data: {
|
||||
resultType: LokiResultType.Vector;
|
||||
result: LokiVectorResult[];
|
||||
};
|
||||
}
|
||||
|
||||
export interface LokiMatrixResult {
|
||||
metric: { [label: string]: string };
|
||||
values: Array<[number, string]>;
|
||||
}
|
||||
|
||||
export interface LokiMatrixResponse {
|
||||
status: string;
|
||||
data: {
|
||||
resultType: LokiResultType.Matrix;
|
||||
result: LokiMatrixResult[];
|
||||
};
|
||||
}
|
||||
|
||||
export interface LokiStreamResult {
|
||||
stream: Record<string, string>;
|
||||
values: Array<[string, string]>;
|
||||
}
|
||||
|
||||
export interface LokiStreamResponse {
|
||||
status: string;
|
||||
data: {
|
||||
resultType: LokiResultType.Stream;
|
||||
result: LokiStreamResult[];
|
||||
};
|
||||
}
|
||||
|
||||
export interface LokiLegacyStreamResult {
|
||||
labels: string;
|
||||
entries: LokiLogsStreamEntry[];
|
||||
search?: string;
|
||||
@ -24,6 +96,21 @@ export interface LokiLogsStream {
|
||||
uniqueLabels?: Labels;
|
||||
}
|
||||
|
||||
export interface LokiLegacyStreamResponse {
|
||||
streams: LokiLegacyStreamResult[];
|
||||
}
|
||||
|
||||
export interface LokiTailResponse {
|
||||
streams: LokiStreamResult[];
|
||||
dropped_entries?: Array<{
|
||||
labels: Record<string, string>;
|
||||
timestamp: string;
|
||||
}>;
|
||||
}
|
||||
|
||||
export type LokiResult = LokiVectorResult | LokiMatrixResult | LokiStreamResult | LokiLegacyStreamResult;
|
||||
export type LokiResponse = LokiVectorResponse | LokiMatrixResponse | LokiStreamResponse;
|
||||
|
||||
export interface LokiLogsStreamEntry {
|
||||
line: string;
|
||||
ts: string;
|
||||
@ -41,3 +128,15 @@ export type DerivedFieldConfig = {
|
||||
name: string;
|
||||
url?: string;
|
||||
};
|
||||
|
||||
export interface TransformerOptions {
|
||||
format: string;
|
||||
legendFormat: string;
|
||||
step: number;
|
||||
start: number;
|
||||
end: number;
|
||||
query: string;
|
||||
responseListLength: number;
|
||||
refId: string;
|
||||
valueWithRefId?: boolean;
|
||||
}
|
||||
|
@ -6,6 +6,7 @@ import { setStore } from './store';
|
||||
import { StoreState } from 'app/types/store';
|
||||
import { toggleLogActionsMiddleware } from 'app/core/middlewares/application';
|
||||
import { addReducer, createRootReducer } from '../core/reducers/root';
|
||||
import { ActionOf } from 'app/core/redux';
|
||||
|
||||
export function addRootReducer(reducers: any) {
|
||||
// this is ok now because we add reducers before configureStore is called
|
||||
@ -27,7 +28,11 @@ export function configureStore() {
|
||||
? applyMiddleware(toggleLogActionsMiddleware, thunk, logger)
|
||||
: applyMiddleware(thunk);
|
||||
|
||||
const store: any = createStore(createRootReducer(), {}, composeEnhancers(storeEnhancers));
|
||||
const store = createStore<StoreState, ActionOf<any>, any, any>(
|
||||
createRootReducer(),
|
||||
{},
|
||||
composeEnhancers(storeEnhancers)
|
||||
);
|
||||
setStore(store);
|
||||
return store;
|
||||
}
|
||||
|
@ -1,5 +1,8 @@
|
||||
export let store: any;
|
||||
import { StoreState } from 'app/types';
|
||||
import { Store } from 'redux';
|
||||
|
||||
export function setStore(newStore: any) {
|
||||
export let store: Store<StoreState>;
|
||||
|
||||
export function setStore(newStore: Store<StoreState>) {
|
||||
store = newStore;
|
||||
}
|
||||
|
@ -228,6 +228,7 @@ export interface QueryOptions {
|
||||
liveStreaming?: boolean;
|
||||
showingGraph?: boolean;
|
||||
showingTable?: boolean;
|
||||
mode?: ExploreMode;
|
||||
}
|
||||
|
||||
export interface QueryTransaction {
|
||||
|
Loading…
Reference in New Issue
Block a user