mirror of
https://github.com/grafana/grafana.git
synced 2025-02-25 18:55:37 -06:00
Elasticsearch: Visualize logs in Explore (#17605)
* explore: try to use existing mode when switching datasource * elasticsearch: initial explore logs support * Elasticsearch: Adds ElasticsearchOptions type Updates tests accordingly * Elasticsearch: Adds typing to query method * Elasticsearch: Makes maxConcurrentShardRequests optional * Explore: Allows empty query for elasticsearch datasource * Elasticsearch: Unifies ElasticsearchQuery interface definition Removes check for context === 'explore' * Elasticsearch: Removes context property from ElasticsearchQuery interface Adds field property Removes metricAggs property Adds typing to metrics property * Elasticsearch: Runs default 'empty' query when 'clear all' button is pressed * Elasticsearch: Removes index property from ElasticsearchOptions interface * Elasticsearch: Removes commented code from ElasticsearchQueryField.tsx * Elasticsearch: Adds comment warning usage of for...in to elastic_response.ts * Elasticsearch: adds tests related to log queries
This commit is contained in:
parent
2fb45eeec8
commit
eecd8d1064
devenv
pkg/api
public/app
features/explore/state
plugins/datasource/elasticsearch
@ -153,6 +153,9 @@ datasources:
|
||||
interval: Daily
|
||||
timeField: "@timestamp"
|
||||
esVersion: 70
|
||||
timeInterval: "10s"
|
||||
logMessageField: message
|
||||
logLevelField: fields.level
|
||||
|
||||
- name: gdev-elasticsearch-v7-metricbeat
|
||||
type: elasticsearch
|
||||
|
@ -115,11 +115,7 @@ func (hs *HTTPServer) getFrontendSettingsMap(c *m.ReqContext) (map[string]interf
|
||||
}
|
||||
}
|
||||
|
||||
if ds.Type == m.DS_ES {
|
||||
dsMap["index"] = ds.Database
|
||||
}
|
||||
|
||||
if ds.Type == m.DS_INFLUXDB {
|
||||
if (ds.Type == m.DS_INFLUXDB) || (ds.Type == m.DS_ES) {
|
||||
dsMap["database"] = ds.Database
|
||||
}
|
||||
|
||||
|
@ -240,7 +240,7 @@ export const itemReducer = reducerFactory<ExploreItemState>({} as ExploreItemSta
|
||||
const supportsGraph = datasourceInstance.meta.metrics;
|
||||
const supportsLogs = datasourceInstance.meta.logs;
|
||||
|
||||
let mode = ExploreMode.Metrics;
|
||||
let mode = state.mode || ExploreMode.Metrics;
|
||||
const supportedModes: ExploreMode[] = [];
|
||||
|
||||
if (supportsGraph) {
|
||||
|
@ -0,0 +1,91 @@
|
||||
import _ from 'lodash';
|
||||
import React from 'react';
|
||||
// @ts-ignore
|
||||
import PluginPrism from 'slate-prism';
|
||||
// @ts-ignore
|
||||
import Prism from 'prismjs';
|
||||
|
||||
// dom also includes Element polyfills
|
||||
import QueryField from 'app/features/explore/QueryField';
|
||||
import { ExploreQueryFieldProps } from '@grafana/ui';
|
||||
import { ElasticDatasource } from '../datasource';
|
||||
import { ElasticsearchOptions, ElasticsearchQuery } from '../types';
|
||||
|
||||
interface Props extends ExploreQueryFieldProps<ElasticDatasource, ElasticsearchQuery, ElasticsearchOptions> {}
|
||||
|
||||
interface State {
|
||||
syntaxLoaded: boolean;
|
||||
}
|
||||
|
||||
class ElasticsearchQueryField extends React.PureComponent<Props, State> {
|
||||
plugins: any[];
|
||||
|
||||
constructor(props: Props, context: React.Context<any>) {
|
||||
super(props, context);
|
||||
|
||||
this.plugins = [
|
||||
PluginPrism({
|
||||
onlyIn: (node: any) => node.type === 'code_block',
|
||||
getSyntax: (node: any) => 'lucene',
|
||||
}),
|
||||
];
|
||||
|
||||
this.state = {
|
||||
syntaxLoaded: false,
|
||||
};
|
||||
}
|
||||
|
||||
componentDidMount() {
|
||||
this.onChangeQuery('', true);
|
||||
}
|
||||
|
||||
componentWillUnmount() {}
|
||||
|
||||
componentDidUpdate(prevProps: Props) {
|
||||
// if query changed from the outside (i.e. cleared via explore toolbar)
|
||||
if (!this.props.query.isLogsQuery) {
|
||||
this.onChangeQuery('', true);
|
||||
}
|
||||
}
|
||||
|
||||
onChangeQuery = (value: string, override?: boolean) => {
|
||||
// Send text change to parent
|
||||
const { query, onChange, onRunQuery } = this.props;
|
||||
if (onChange) {
|
||||
const nextQuery: ElasticsearchQuery = { ...query, query: value, isLogsQuery: true };
|
||||
onChange(nextQuery);
|
||||
|
||||
if (override && onRunQuery) {
|
||||
onRunQuery();
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
render() {
|
||||
const { queryResponse, query } = this.props;
|
||||
const { syntaxLoaded } = this.state;
|
||||
|
||||
return (
|
||||
<>
|
||||
<div className="gf-form-inline gf-form-inline--nowrap">
|
||||
<div className="gf-form gf-form--grow flex-shrink-1">
|
||||
<QueryField
|
||||
additionalPlugins={this.plugins}
|
||||
initialQuery={query.query}
|
||||
onChange={this.onChangeQuery}
|
||||
onRunQuery={this.props.onRunQuery}
|
||||
placeholder="Enter a Lucene query"
|
||||
portalOrigin="elasticsearch"
|
||||
syntaxLoaded={syntaxLoaded}
|
||||
/>
|
||||
</div>
|
||||
</div>
|
||||
{queryResponse && queryResponse.error ? (
|
||||
<div className="prom-query-field-info text-error">{queryResponse.error.message}</div>
|
||||
) : null}
|
||||
</>
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
export default ElasticsearchQueryField;
|
@ -11,6 +11,8 @@ export class ElasticConfigCtrl {
|
||||
const defaultMaxConcurrentShardRequests = this.current.jsonData.esVersion >= 70 ? 5 : 256;
|
||||
this.current.jsonData.maxConcurrentShardRequests =
|
||||
this.current.jsonData.maxConcurrentShardRequests || defaultMaxConcurrentShardRequests;
|
||||
this.current.jsonData.logMessageField = this.current.jsonData.logMessageField || '';
|
||||
this.current.jsonData.logLevelField = this.current.jsonData.logLevelField || '';
|
||||
}
|
||||
|
||||
indexPatternTypes = [
|
||||
|
@ -1,11 +1,17 @@
|
||||
import angular from 'angular';
|
||||
import angular, { IQService } from 'angular';
|
||||
import _ from 'lodash';
|
||||
import { DataSourceApi, DataSourceInstanceSettings, DataQueryRequest, DataQueryResponse } from '@grafana/ui';
|
||||
import { ElasticResponse } from './elastic_response';
|
||||
import { IndexPattern } from './index_pattern';
|
||||
import { ElasticQueryBuilder } from './query_builder';
|
||||
import { toUtc } from '@grafana/ui/src/utils/moment_wrapper';
|
||||
import * as queryDef from './query_def';
|
||||
import { BackendSrv } from 'app/core/services/backend_srv';
|
||||
import { TemplateSrv } from 'app/features/templating/template_srv';
|
||||
import { TimeSrv } from 'app/features/dashboard/services/TimeSrv';
|
||||
import { ElasticsearchOptions, ElasticsearchQuery } from './types';
|
||||
|
||||
export class ElasticDatasource {
|
||||
export class ElasticDatasource extends DataSourceApi<ElasticsearchQuery, ElasticsearchOptions> {
|
||||
basicAuth: string;
|
||||
withCredentials: boolean;
|
||||
url: string;
|
||||
@ -17,23 +23,44 @@ export class ElasticDatasource {
|
||||
maxConcurrentShardRequests: number;
|
||||
queryBuilder: ElasticQueryBuilder;
|
||||
indexPattern: IndexPattern;
|
||||
logMessageField?: string;
|
||||
logLevelField?: string;
|
||||
|
||||
/** @ngInject */
|
||||
constructor(instanceSettings, private $q, private backendSrv, private templateSrv, private timeSrv) {
|
||||
constructor(
|
||||
instanceSettings: DataSourceInstanceSettings<ElasticsearchOptions>,
|
||||
private $q: IQService,
|
||||
private backendSrv: BackendSrv,
|
||||
private templateSrv: TemplateSrv,
|
||||
private timeSrv: TimeSrv
|
||||
) {
|
||||
super(instanceSettings);
|
||||
this.basicAuth = instanceSettings.basicAuth;
|
||||
this.withCredentials = instanceSettings.withCredentials;
|
||||
this.url = instanceSettings.url;
|
||||
this.name = instanceSettings.name;
|
||||
this.index = instanceSettings.index;
|
||||
this.timeField = instanceSettings.jsonData.timeField;
|
||||
this.esVersion = instanceSettings.jsonData.esVersion;
|
||||
this.indexPattern = new IndexPattern(instanceSettings.index, instanceSettings.jsonData.interval);
|
||||
this.interval = instanceSettings.jsonData.timeInterval;
|
||||
this.maxConcurrentShardRequests = instanceSettings.jsonData.maxConcurrentShardRequests;
|
||||
this.index = instanceSettings.database;
|
||||
const settingsData = instanceSettings.jsonData || ({} as ElasticsearchOptions);
|
||||
|
||||
this.timeField = settingsData.timeField;
|
||||
this.esVersion = settingsData.esVersion;
|
||||
this.indexPattern = new IndexPattern(this.index, settingsData.interval);
|
||||
this.interval = settingsData.timeInterval;
|
||||
this.maxConcurrentShardRequests = settingsData.maxConcurrentShardRequests;
|
||||
this.queryBuilder = new ElasticQueryBuilder({
|
||||
timeField: this.timeField,
|
||||
esVersion: this.esVersion,
|
||||
});
|
||||
this.logMessageField = settingsData.logMessageField || '';
|
||||
this.logLevelField = settingsData.logLevelField || '';
|
||||
|
||||
if (this.logMessageField === '') {
|
||||
this.logMessageField = null;
|
||||
}
|
||||
|
||||
if (this.logLevelField === '') {
|
||||
this.logLevelField = null;
|
||||
}
|
||||
}
|
||||
|
||||
private request(method, url, data?) {
|
||||
@ -200,7 +227,6 @@ export class ElasticDatasource {
|
||||
}
|
||||
|
||||
testDatasource() {
|
||||
this.timeSrv.setTime({ from: 'now-1m', to: 'now' }, true);
|
||||
// validate that the index exist and has date field
|
||||
return this.getFields({ type: 'date' }).then(
|
||||
dateFields => {
|
||||
@ -240,10 +266,10 @@ export class ElasticDatasource {
|
||||
return angular.toJson(queryHeader);
|
||||
}
|
||||
|
||||
query(options) {
|
||||
query(options: DataQueryRequest<ElasticsearchQuery>): Promise<DataQueryResponse> {
|
||||
let payload = '';
|
||||
const targets = _.cloneDeep(options.targets);
|
||||
const sentTargets = [];
|
||||
const sentTargets: ElasticsearchQuery[] = [];
|
||||
|
||||
// add global adhoc filters to timeFilter
|
||||
const adhocFilters = this.templateSrv.getAdhocFilters(this.name);
|
||||
@ -253,16 +279,25 @@ export class ElasticDatasource {
|
||||
continue;
|
||||
}
|
||||
|
||||
if (target.alias) {
|
||||
target.alias = this.templateSrv.replace(target.alias, options.scopedVars, 'lucene');
|
||||
}
|
||||
|
||||
let queryString = this.templateSrv.replace(target.query, options.scopedVars, 'lucene');
|
||||
// Elasticsearch queryString should always be '*' if empty string
|
||||
if (!queryString || queryString === '') {
|
||||
queryString = '*';
|
||||
}
|
||||
const queryObj = this.queryBuilder.build(target, adhocFilters, queryString);
|
||||
|
||||
let queryObj;
|
||||
if (target.isLogsQuery) {
|
||||
target.bucketAggs = [queryDef.defaultBucketAgg()];
|
||||
target.metrics = [queryDef.defaultMetricAgg()];
|
||||
queryObj = this.queryBuilder.getLogsQuery(target, queryString);
|
||||
} else {
|
||||
if (target.alias) {
|
||||
target.alias = this.templateSrv.replace(target.alias, options.scopedVars, 'lucene');
|
||||
}
|
||||
|
||||
queryObj = this.queryBuilder.build(target, adhocFilters, queryString);
|
||||
}
|
||||
|
||||
const esQuery = angular.toJson(queryObj);
|
||||
|
||||
const searchType = queryObj.size === 0 && this.esVersion < 5 ? 'count' : 'query_then_fetch';
|
||||
@ -270,21 +305,27 @@ export class ElasticDatasource {
|
||||
payload += header + '\n';
|
||||
|
||||
payload += esQuery + '\n';
|
||||
|
||||
sentTargets.push(target);
|
||||
}
|
||||
|
||||
if (sentTargets.length === 0) {
|
||||
return this.$q.when([]);
|
||||
return Promise.resolve({ data: [] });
|
||||
}
|
||||
|
||||
payload = payload.replace(/\$timeFrom/g, options.range.from.valueOf());
|
||||
payload = payload.replace(/\$timeTo/g, options.range.to.valueOf());
|
||||
payload = payload.replace(/\$timeFrom/g, options.range.from.valueOf().toString());
|
||||
payload = payload.replace(/\$timeTo/g, options.range.to.valueOf().toString());
|
||||
payload = this.templateSrv.replace(payload, options.scopedVars);
|
||||
|
||||
const url = this.getMultiSearchUrl();
|
||||
|
||||
return this.post(url, payload).then(res => {
|
||||
return new ElasticResponse(sentTargets, res).getTimeSeries();
|
||||
const er = new ElasticResponse(sentTargets, res);
|
||||
if (sentTargets.some(target => target.isLogsQuery)) {
|
||||
return er.getLogs(this.logMessageField, this.logLevelField);
|
||||
}
|
||||
|
||||
return er.getTimeSeries();
|
||||
});
|
||||
}
|
||||
|
||||
@ -380,8 +421,8 @@ export class ElasticDatasource {
|
||||
const header = this.getQueryHeader(searchType, range.from, range.to);
|
||||
let esQuery = angular.toJson(this.queryBuilder.getTermsQuery(queryDef));
|
||||
|
||||
esQuery = esQuery.replace(/\$timeFrom/g, range.from.valueOf());
|
||||
esQuery = esQuery.replace(/\$timeTo/g, range.to.valueOf());
|
||||
esQuery = esQuery.replace(/\$timeFrom/g, range.from.valueOf().toString());
|
||||
esQuery = esQuery.replace(/\$timeTo/g, range.to.valueOf().toString());
|
||||
esQuery = header + '\n' + esQuery + '\n';
|
||||
|
||||
const url = this.getMultiSearchUrl();
|
||||
|
@ -1,6 +1,8 @@
|
||||
import _ from 'lodash';
|
||||
import flatten from 'app/core/utils/flatten';
|
||||
import * as queryDef from './query_def';
|
||||
import TableModel from 'app/core/table_model';
|
||||
import { SeriesData, DataQueryResponse, toSeriesData, FieldType } from '@grafana/ui';
|
||||
|
||||
export class ElasticResponse {
|
||||
constructor(private targets, private response) {
|
||||
@ -410,4 +412,142 @@ export class ElasticResponse {
|
||||
|
||||
return { data: seriesList };
|
||||
}
|
||||
|
||||
getLogs(logMessageField?: string, logLevelField?: string): DataQueryResponse {
|
||||
const seriesData: SeriesData[] = [];
|
||||
const docs: any[] = [];
|
||||
|
||||
for (let n = 0; n < this.response.responses.length; n++) {
|
||||
const response = this.response.responses[n];
|
||||
if (response.error) {
|
||||
throw this.getErrorFromElasticResponse(this.response, response.error);
|
||||
}
|
||||
|
||||
const hits = response.hits;
|
||||
let propNames: string[] = [];
|
||||
let propName, hit, doc, i;
|
||||
|
||||
for (i = 0; i < hits.hits.length; i++) {
|
||||
hit = hits.hits[i];
|
||||
const flattened = hit._source ? flatten(hit._source, null) : {};
|
||||
doc = {};
|
||||
doc[this.targets[0].timeField] = null;
|
||||
doc = {
|
||||
...doc,
|
||||
_id: hit._id,
|
||||
_type: hit._type,
|
||||
_index: hit._index,
|
||||
...flattened,
|
||||
};
|
||||
|
||||
// Note: the order of for...in is arbitrary amd implementation dependant
|
||||
// and should probably not be relied upon.
|
||||
for (propName in hit.fields) {
|
||||
if (propNames.indexOf(propName) === -1) {
|
||||
propNames.push(propName);
|
||||
}
|
||||
doc[propName] = hit.fields[propName];
|
||||
}
|
||||
|
||||
for (propName in doc) {
|
||||
if (propNames.indexOf(propName) === -1) {
|
||||
propNames.push(propName);
|
||||
}
|
||||
}
|
||||
|
||||
doc._source = { ...flattened };
|
||||
|
||||
docs.push(doc);
|
||||
}
|
||||
|
||||
if (docs.length > 0) {
|
||||
propNames = propNames.sort();
|
||||
const series: SeriesData = {
|
||||
fields: [
|
||||
{
|
||||
name: this.targets[0].timeField,
|
||||
type: FieldType.time,
|
||||
},
|
||||
],
|
||||
rows: [],
|
||||
};
|
||||
|
||||
if (logMessageField) {
|
||||
series.fields.push({
|
||||
name: logMessageField,
|
||||
type: FieldType.string,
|
||||
});
|
||||
} else {
|
||||
series.fields.push({
|
||||
name: '_source',
|
||||
type: FieldType.string,
|
||||
});
|
||||
}
|
||||
|
||||
if (logLevelField) {
|
||||
series.fields.push({
|
||||
name: 'level',
|
||||
type: FieldType.string,
|
||||
});
|
||||
}
|
||||
|
||||
for (const propName of propNames) {
|
||||
if (propName === this.targets[0].timeField || propName === '_source') {
|
||||
continue;
|
||||
}
|
||||
|
||||
series.fields.push({
|
||||
name: propName,
|
||||
type: FieldType.string,
|
||||
});
|
||||
}
|
||||
|
||||
for (const doc of docs) {
|
||||
const row: any[] = [];
|
||||
row.push(doc[this.targets[0].timeField][0]);
|
||||
|
||||
if (logMessageField) {
|
||||
row.push(doc[logMessageField] || '');
|
||||
} else {
|
||||
row.push(JSON.stringify(doc._source, null, 2));
|
||||
}
|
||||
|
||||
if (logLevelField) {
|
||||
row.push(doc[logLevelField] || '');
|
||||
}
|
||||
|
||||
for (const propName of propNames) {
|
||||
if (doc.hasOwnProperty(propName)) {
|
||||
row.push(doc[propName]);
|
||||
} else {
|
||||
row.push(null);
|
||||
}
|
||||
}
|
||||
|
||||
series.rows.push(row);
|
||||
}
|
||||
|
||||
seriesData.push(series);
|
||||
}
|
||||
|
||||
if (response.aggregations) {
|
||||
const aggregations = response.aggregations;
|
||||
const target = this.targets[n];
|
||||
const tmpSeriesList = [];
|
||||
const table = new TableModel();
|
||||
|
||||
this.processBuckets(aggregations, target, tmpSeriesList, table, {}, 0);
|
||||
this.trimDatapoints(tmpSeriesList, target);
|
||||
this.nameSeries(tmpSeriesList, target);
|
||||
|
||||
for (let y = 0; y < tmpSeriesList.length; y++) {
|
||||
const series = toSeriesData(tmpSeriesList[y]);
|
||||
series.labels = {};
|
||||
seriesData.push(series);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return { data: seriesData };
|
||||
}
|
||||
}
|
||||
|
@ -1,11 +1,12 @@
|
||||
import coreModule from 'app/core/core_module';
|
||||
import _ from 'lodash';
|
||||
import * as queryDef from './query_def';
|
||||
import { ElasticsearchAggregation } from './types';
|
||||
|
||||
export class ElasticMetricAggCtrl {
|
||||
/** @ngInject */
|
||||
constructor($scope, uiSegmentSrv, $q, $rootScope) {
|
||||
const metricAggs = $scope.target.metrics;
|
||||
const metricAggs: ElasticsearchAggregation[] = $scope.target.metrics;
|
||||
$scope.metricAggTypes = queryDef.getMetricAggTypes($scope.esVersion);
|
||||
$scope.extendedStats = queryDef.extendedStats;
|
||||
$scope.pipelineAggOptions = [];
|
||||
|
@ -1,14 +1,15 @@
|
||||
import { DataSourcePlugin } from '@grafana/ui';
|
||||
import { ElasticDatasource } from './datasource';
|
||||
import { ElasticQueryCtrl } from './query_ctrl';
|
||||
import { ElasticConfigCtrl } from './config_ctrl';
|
||||
import ElasticsearchQueryField from './components/ElasticsearchQueryField';
|
||||
|
||||
class ElasticAnnotationsQueryCtrl {
|
||||
static templateUrl = 'partials/annotations.editor.html';
|
||||
}
|
||||
|
||||
export {
|
||||
ElasticDatasource as Datasource,
|
||||
ElasticQueryCtrl as QueryCtrl,
|
||||
ElasticConfigCtrl as ConfigCtrl,
|
||||
ElasticAnnotationsQueryCtrl as AnnotationsQueryCtrl,
|
||||
};
|
||||
export const plugin = new DataSourcePlugin(ElasticDatasource)
|
||||
.setQueryCtrl(ElasticQueryCtrl)
|
||||
.setConfigCtrl(ElasticConfigCtrl)
|
||||
.setExploreLogsQueryField(ElasticsearchQueryField)
|
||||
.setAnnotationQueryCtrl(ElasticAnnotationsQueryCtrl);
|
||||
|
@ -51,3 +51,16 @@
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<b>Logs</b>
|
||||
|
||||
<div class="gf-form-group">
|
||||
<div class="gf-form max-width-30">
|
||||
<span class="gf-form-label width-11">Message field name</span>
|
||||
<input class="gf-form-input" type="text" ng-model='ctrl.current.jsonData.logMessageField' placeholder="_source" />
|
||||
</div>
|
||||
<div class="gf-form max-width-30">
|
||||
<span class="gf-form-label width-11">Level field name</span>
|
||||
<input class="gf-form-input" type="text" ng-model='ctrl.current.jsonData.logLevelField' placeholder="" />
|
||||
</div>
|
||||
</div>
|
||||
|
@ -21,6 +21,7 @@
|
||||
"alerting": true,
|
||||
"annotations": true,
|
||||
"metrics": true,
|
||||
"logs": true,
|
||||
|
||||
"queryOptions": {
|
||||
"minInterval": true
|
||||
|
@ -367,4 +367,31 @@ export class ElasticQueryBuilder {
|
||||
|
||||
return query;
|
||||
}
|
||||
|
||||
getLogsQuery(target, querystring) {
|
||||
let query: any = {
|
||||
size: 0,
|
||||
query: {
|
||||
bool: {
|
||||
filter: [{ range: this.getRangeFilter() }],
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
if (target.query) {
|
||||
query.query.bool.filter.push({
|
||||
query_string: {
|
||||
analyze_wildcard: true,
|
||||
query: target.query,
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
query = this.documentQuery(query, 500);
|
||||
|
||||
return {
|
||||
...query,
|
||||
aggs: this.build(target, null, querystring).aggs,
|
||||
};
|
||||
}
|
||||
}
|
||||
|
@ -6,6 +6,7 @@ import angular from 'angular';
|
||||
import _ from 'lodash';
|
||||
import * as queryDef from './query_def';
|
||||
import { QueryCtrl } from 'app/plugins/sdk';
|
||||
import { ElasticsearchAggregation } from './types';
|
||||
|
||||
export class ElasticQueryCtrl extends QueryCtrl {
|
||||
static templateUrl = 'partials/query.editor.html';
|
||||
@ -53,7 +54,7 @@ export class ElasticQueryCtrl extends QueryCtrl {
|
||||
}
|
||||
|
||||
getCollapsedText() {
|
||||
const metricAggs = this.target.metrics;
|
||||
const metricAggs: ElasticsearchAggregation[] = this.target.metrics;
|
||||
const bucketAggs = this.target.bucketAggs;
|
||||
const metricAggTypes = queryDef.getMetricAggTypes(this.esVersion);
|
||||
const bucketAggTypes = queryDef.bucketAggTypes;
|
||||
|
@ -1,20 +1,25 @@
|
||||
import angular from 'angular';
|
||||
import angular, { IQService } from 'angular';
|
||||
import * as dateMath from '@grafana/ui/src/utils/datemath';
|
||||
import _ from 'lodash';
|
||||
import { ElasticDatasource } from '../datasource';
|
||||
import { toUtc, dateTime } from '@grafana/ui/src/utils/moment_wrapper';
|
||||
import { BackendSrv } from 'app/core/services/backend_srv';
|
||||
import { TimeSrv } from 'app/features/dashboard/services/TimeSrv';
|
||||
import { TemplateSrv } from 'app/features/templating/template_srv';
|
||||
import { DataSourceInstanceSettings } from '@grafana/ui';
|
||||
import { ElasticsearchOptions } from '../types';
|
||||
|
||||
describe('ElasticDatasource', function(this: any) {
|
||||
const backendSrv = {
|
||||
const backendSrv: any = {
|
||||
datasourceRequest: jest.fn(),
|
||||
};
|
||||
|
||||
const $rootScope = {
|
||||
const $rootScope: any = {
|
||||
$on: jest.fn(),
|
||||
appEvent: jest.fn(),
|
||||
};
|
||||
|
||||
const templateSrv = {
|
||||
const templateSrv: any = {
|
||||
replace: jest.fn(text => {
|
||||
if (text.startsWith('$')) {
|
||||
return `resolvedVariable`;
|
||||
@ -25,12 +30,12 @@ describe('ElasticDatasource', function(this: any) {
|
||||
getAdhocFilters: jest.fn(() => []),
|
||||
};
|
||||
|
||||
const timeSrv = {
|
||||
const timeSrv: any = {
|
||||
time: { from: 'now-1h', to: 'now' },
|
||||
timeRange: jest.fn(() => {
|
||||
return {
|
||||
from: dateMath.parse(this.time.from, false),
|
||||
to: dateMath.parse(this.time.to, true),
|
||||
from: dateMath.parse(timeSrv.time.from, false),
|
||||
to: dateMath.parse(timeSrv.time.to, true),
|
||||
};
|
||||
}),
|
||||
setTime: jest.fn(time => {
|
||||
@ -43,18 +48,24 @@ describe('ElasticDatasource', function(this: any) {
|
||||
backendSrv,
|
||||
} as any;
|
||||
|
||||
function createDatasource(instanceSettings) {
|
||||
instanceSettings.jsonData = instanceSettings.jsonData || {};
|
||||
ctx.ds = new ElasticDatasource(instanceSettings, {}, backendSrv, templateSrv, timeSrv);
|
||||
function createDatasource(instanceSettings: DataSourceInstanceSettings<ElasticsearchOptions>) {
|
||||
instanceSettings.jsonData = instanceSettings.jsonData || ({} as ElasticsearchOptions);
|
||||
ctx.ds = new ElasticDatasource(
|
||||
instanceSettings,
|
||||
{} as IQService,
|
||||
backendSrv as BackendSrv,
|
||||
templateSrv as TemplateSrv,
|
||||
timeSrv as TimeSrv
|
||||
);
|
||||
}
|
||||
|
||||
describe('When testing datasource with index pattern', () => {
|
||||
beforeEach(() => {
|
||||
createDatasource({
|
||||
url: 'http://es.com',
|
||||
index: '[asd-]YYYY.MM.DD',
|
||||
jsonData: { interval: 'Daily', esVersion: '2' },
|
||||
});
|
||||
database: '[asd-]YYYY.MM.DD',
|
||||
jsonData: { interval: 'Daily', esVersion: 2 } as ElasticsearchOptions,
|
||||
} as DataSourceInstanceSettings<ElasticsearchOptions>);
|
||||
});
|
||||
|
||||
it('should translate index pattern to current day', () => {
|
||||
@ -77,9 +88,9 @@ describe('ElasticDatasource', function(this: any) {
|
||||
beforeEach(async () => {
|
||||
createDatasource({
|
||||
url: 'http://es.com',
|
||||
index: '[asd-]YYYY.MM.DD',
|
||||
jsonData: { interval: 'Daily', esVersion: '2' },
|
||||
});
|
||||
database: '[asd-]YYYY.MM.DD',
|
||||
jsonData: { interval: 'Daily', esVersion: 2 } as ElasticsearchOptions,
|
||||
} as DataSourceInstanceSettings<ElasticsearchOptions>);
|
||||
|
||||
ctx.backendSrv.datasourceRequest = jest.fn(options => {
|
||||
requestOptions = options;
|
||||
@ -142,15 +153,110 @@ describe('ElasticDatasource', function(this: any) {
|
||||
});
|
||||
});
|
||||
|
||||
describe('When issuing logs query with interval pattern', () => {
|
||||
let query, queryBuilderSpy;
|
||||
|
||||
beforeEach(async () => {
|
||||
createDatasource({
|
||||
url: 'http://es.com',
|
||||
database: 'mock-index',
|
||||
jsonData: { interval: 'Daily', esVersion: 2, timeField: '@timestamp' } as ElasticsearchOptions,
|
||||
} as DataSourceInstanceSettings<ElasticsearchOptions>);
|
||||
|
||||
ctx.backendSrv.datasourceRequest = jest.fn(options => {
|
||||
return Promise.resolve({
|
||||
data: {
|
||||
responses: [
|
||||
{
|
||||
aggregations: {
|
||||
'2': {
|
||||
buckets: [
|
||||
{
|
||||
doc_count: 10,
|
||||
key: 1000,
|
||||
},
|
||||
{
|
||||
doc_count: 15,
|
||||
key: 2000,
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
hits: {
|
||||
hits: [
|
||||
{
|
||||
'@timestamp': ['2019-06-24T09:51:19.765Z'],
|
||||
_id: 'fdsfs',
|
||||
_type: '_doc',
|
||||
_index: 'mock-index',
|
||||
_source: {
|
||||
'@timestamp': '2019-06-24T09:51:19.765Z',
|
||||
host: 'djisaodjsoad',
|
||||
message: 'hello, i am a message',
|
||||
},
|
||||
fields: {
|
||||
'@timestamp': ['2019-06-24T09:51:19.765Z'],
|
||||
},
|
||||
},
|
||||
{
|
||||
'@timestamp': ['2019-06-24T09:52:19.765Z'],
|
||||
_id: 'kdospaidopa',
|
||||
_type: '_doc',
|
||||
_index: 'mock-index',
|
||||
_source: {
|
||||
'@timestamp': '2019-06-24T09:52:19.765Z',
|
||||
host: 'dsalkdakdop',
|
||||
message: 'hello, i am also message',
|
||||
},
|
||||
fields: {
|
||||
'@timestamp': ['2019-06-24T09:52:19.765Z'],
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
});
|
||||
});
|
||||
|
||||
query = {
|
||||
range: {
|
||||
from: toUtc([2015, 4, 30, 10]),
|
||||
to: toUtc([2019, 7, 1, 10]),
|
||||
},
|
||||
targets: [
|
||||
{
|
||||
alias: '$varAlias',
|
||||
refId: 'A',
|
||||
bucketAggs: [{ type: 'date_histogram', settings: { interval: 'auto' }, id: '2' }],
|
||||
metrics: [{ type: 'count', id: '1' }],
|
||||
query: 'escape\\:test',
|
||||
interval: '10s',
|
||||
isLogsQuery: true,
|
||||
timeField: '@timestamp',
|
||||
},
|
||||
],
|
||||
};
|
||||
|
||||
queryBuilderSpy = jest.spyOn(ctx.ds.queryBuilder, 'getLogsQuery');
|
||||
await ctx.ds.query(query);
|
||||
});
|
||||
|
||||
it('should call getLogsQuery()', () => {
|
||||
expect(queryBuilderSpy).toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
|
||||
describe('When issuing document query', () => {
|
||||
let requestOptions, parts, header;
|
||||
|
||||
beforeEach(() => {
|
||||
createDatasource({
|
||||
url: 'http://es.com',
|
||||
index: 'test',
|
||||
jsonData: { esVersion: '2' },
|
||||
});
|
||||
database: 'test',
|
||||
jsonData: { esVersion: 2 } as ElasticsearchOptions,
|
||||
} as DataSourceInstanceSettings<ElasticsearchOptions>);
|
||||
|
||||
ctx.backendSrv.datasourceRequest = jest.fn(options => {
|
||||
requestOptions = options;
|
||||
@ -187,7 +293,11 @@ describe('ElasticDatasource', function(this: any) {
|
||||
|
||||
describe('When getting fields', () => {
|
||||
beforeEach(() => {
|
||||
createDatasource({ url: 'http://es.com', index: 'metricbeat', jsonData: { esVersion: 50 } });
|
||||
createDatasource({
|
||||
url: 'http://es.com',
|
||||
database: 'metricbeat',
|
||||
jsonData: { esVersion: 50 } as ElasticsearchOptions,
|
||||
} as DataSourceInstanceSettings<ElasticsearchOptions>);
|
||||
|
||||
ctx.backendSrv.datasourceRequest = jest.fn(options => {
|
||||
return Promise.resolve({
|
||||
@ -279,7 +389,11 @@ describe('ElasticDatasource', function(this: any) {
|
||||
|
||||
describe('When getting fields from ES 7.0', () => {
|
||||
beforeEach(() => {
|
||||
createDatasource({ url: 'http://es.com', index: 'genuine.es7._mapping.response', jsonData: { esVersion: 70 } });
|
||||
createDatasource({
|
||||
url: 'http://es.com',
|
||||
database: 'genuine.es7._mapping.response',
|
||||
jsonData: { esVersion: 70 } as ElasticsearchOptions,
|
||||
} as DataSourceInstanceSettings<ElasticsearchOptions>);
|
||||
|
||||
ctx.backendSrv.datasourceRequest = jest.fn(options => {
|
||||
return Promise.resolve({
|
||||
@ -430,9 +544,9 @@ describe('ElasticDatasource', function(this: any) {
|
||||
beforeEach(() => {
|
||||
createDatasource({
|
||||
url: 'http://es.com',
|
||||
index: 'test',
|
||||
jsonData: { esVersion: '5' },
|
||||
});
|
||||
database: 'test',
|
||||
jsonData: { esVersion: 5 } as ElasticsearchOptions,
|
||||
} as DataSourceInstanceSettings<ElasticsearchOptions>);
|
||||
|
||||
ctx.backendSrv.datasourceRequest = jest.fn(options => {
|
||||
requestOptions = options;
|
||||
@ -473,9 +587,9 @@ describe('ElasticDatasource', function(this: any) {
|
||||
beforeEach(() => {
|
||||
createDatasource({
|
||||
url: 'http://es.com',
|
||||
index: 'test',
|
||||
jsonData: { esVersion: '5' },
|
||||
});
|
||||
database: 'test',
|
||||
jsonData: { esVersion: 5 } as ElasticsearchOptions,
|
||||
} as DataSourceInstanceSettings<ElasticsearchOptions>);
|
||||
|
||||
ctx.backendSrv.datasourceRequest = jest.fn(options => {
|
||||
requestOptions = options;
|
||||
|
@ -784,4 +784,94 @@ describe('ElasticResponse', () => {
|
||||
expect(result.data[2].datapoints[1][0]).toBe(12);
|
||||
});
|
||||
});
|
||||
|
||||
describe('simple logs query and count', () => {
|
||||
beforeEach(() => {
|
||||
targets = [
|
||||
{
|
||||
refId: 'A',
|
||||
metrics: [{ type: 'count', id: '1' }],
|
||||
bucketAggs: [{ type: 'date_histogram', settings: { interval: 'auto' }, id: '2' }],
|
||||
context: 'explore',
|
||||
interval: '10s',
|
||||
isLogsQuery: true,
|
||||
key: 'Q-1561369883389-0.7611823271062786-0',
|
||||
live: false,
|
||||
maxDataPoints: 1620,
|
||||
query: '',
|
||||
timeField: '@timestamp',
|
||||
},
|
||||
];
|
||||
response = {
|
||||
responses: [
|
||||
{
|
||||
aggregations: {
|
||||
'2': {
|
||||
buckets: [
|
||||
{
|
||||
doc_count: 10,
|
||||
key: 1000,
|
||||
},
|
||||
{
|
||||
doc_count: 15,
|
||||
key: 2000,
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
hits: {
|
||||
hits: [
|
||||
{
|
||||
_id: 'fdsfs',
|
||||
_type: '_doc',
|
||||
_index: 'mock-index',
|
||||
_source: {
|
||||
'@timestamp': '2019-06-24T09:51:19.765Z',
|
||||
host: 'djisaodjsoad',
|
||||
message: 'hello, i am a message',
|
||||
},
|
||||
fields: {
|
||||
'@timestamp': ['2019-06-24T09:51:19.765Z'],
|
||||
},
|
||||
},
|
||||
{
|
||||
_id: 'kdospaidopa',
|
||||
_type: '_doc',
|
||||
_index: 'mock-index',
|
||||
_source: {
|
||||
'@timestamp': '2019-06-24T09:52:19.765Z',
|
||||
host: 'dsalkdakdop',
|
||||
message: 'hello, i am also message',
|
||||
},
|
||||
fields: {
|
||||
'@timestamp': ['2019-06-24T09:52:19.765Z'],
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
],
|
||||
};
|
||||
|
||||
result = new ElasticResponse(targets, response).getLogs();
|
||||
});
|
||||
|
||||
it('should return histogram aggregation and documents', () => {
|
||||
expect(result.data.length).toBe(2);
|
||||
expect(result.data[0].fields).toContainEqual({ name: '@timestamp', type: 'time' });
|
||||
expect(result.data[0].fields).toContainEqual({ name: 'host', type: 'string' });
|
||||
expect(result.data[0].fields).toContainEqual({ name: 'message', type: 'string' });
|
||||
result.data[0].rows.forEach((row, i) => {
|
||||
expect(row).toContain(response.responses[0].hits.hits[i]._id);
|
||||
expect(row).toContain(response.responses[0].hits.hits[i]._type);
|
||||
expect(row).toContain(response.responses[0].hits.hits[i]._index);
|
||||
expect(row).toContain(JSON.stringify(response.responses[0].hits.hits[i]._source, undefined, 2));
|
||||
});
|
||||
|
||||
expect(result.data[1]).toHaveProperty('name', 'Count');
|
||||
response.responses[0].aggregations['2'].buckets.forEach(bucket => {
|
||||
expect(result.data[1].rows).toContainEqual([bucket.doc_count, bucket.key]);
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
|
@ -490,4 +490,10 @@ describe('ElasticQueryBuilder', () => {
|
||||
const query = builder6x.getTermsQuery({});
|
||||
expect(query.aggs['1'].terms.order._key).toBe('asc');
|
||||
});
|
||||
|
||||
it('getTermsQuery should request documents and date histogram', () => {
|
||||
const query = builder.getLogsQuery({});
|
||||
expect(query).toHaveProperty('query.bool.filter');
|
||||
expect(query.aggs['2']).toHaveProperty('date_histogram');
|
||||
});
|
||||
});
|
||||
|
26
public/app/plugins/datasource/elasticsearch/types.ts
Normal file
26
public/app/plugins/datasource/elasticsearch/types.ts
Normal file
@ -0,0 +1,26 @@
|
||||
import { DataQuery, DataSourceJsonData } from '@grafana/ui';
|
||||
|
||||
export interface ElasticsearchOptions extends DataSourceJsonData {
|
||||
timeField: string;
|
||||
esVersion: number;
|
||||
interval: string;
|
||||
timeInterval: string;
|
||||
maxConcurrentShardRequests?: number;
|
||||
logMessageField?: string;
|
||||
logLevelField?: string;
|
||||
}
|
||||
|
||||
export interface ElasticsearchAggregation {
|
||||
id: string;
|
||||
type: string;
|
||||
settings?: any;
|
||||
field?: string;
|
||||
}
|
||||
|
||||
export interface ElasticsearchQuery extends DataQuery {
|
||||
isLogsQuery: boolean;
|
||||
alias?: string;
|
||||
query?: string;
|
||||
bucketAggs?: ElasticsearchAggregation[];
|
||||
metrics?: ElasticsearchAggregation[];
|
||||
}
|
Loading…
Reference in New Issue
Block a user