Elasticsearch: Add Top Metrics Aggregation and X-Pack support (#33041)

* Elasticsearch: Add Top Metrics Aggregation

* Adding support for non-timeseries visualizations

* removing console.logs

* restoring loadOptions type

* Honor xpack setting

* Adding test for elastic_response

* adding test for query builder

* Adding support of alerting

* Fixing separator spelling

* Fixing linting issues

* attempting to reduce cyclomatic complexity

* Adding elastic77 Docker block

* Update public/app/plugins/datasource/elasticsearch/components/QueryEditor/MetricAggregationsEditor/MetricEditor.test.tsx

Co-authored-by: Giordano Ricci <grdnricci@gmail.com>

* refactoring MetricsEditor tests

* Fixing typo

* Change getFields type & move TopMetrics to a separate component

* Fix SegmentAsync styles in TopMetrics Settings

* Fix field types for TopMetrics

* WIP

* Refactoring client side to support multiple top metrics

* Adding tests and finishing go implimentation

* removing fmt lib from debugging

* fixing tests

* reducing the cyclomatic complexity

* Update public/app/plugins/datasource/elasticsearch/elastic_response.ts

Co-authored-by: Giordano Ricci <grdnricci@gmail.com>

* Update public/app/plugins/datasource/elasticsearch/hooks/useFields.ts

Co-authored-by: Giordano Ricci <grdnricci@gmail.com>

* Checking for possible nil value

* Fixing types

* fix fake-data-gen param

* fix useFields hook

* Removing aggregateBy and size

* Fixing go tests

* Fixing TS tests

* fixing tests

* Fixes

* Remove date from top_metrics fields

* Restore previous formatting

* Update pkg/tsdb/elasticsearch/client/models.go

Co-authored-by: Dimitris Sotirakis <dimitrios.sotirakis@grafana.com>

* Update pkg/tsdb/elasticsearch/client/models.go

Co-authored-by: Dimitris Sotirakis <dimitrios.sotirakis@grafana.com>

* Fix code review comments on processTopMetricValue

* Remove underscore from variable names

* Remove intermediate array definition

* Refactor test to use testify

Co-authored-by: Giordano Ricci <grdnricci@gmail.com>
Co-authored-by: Elfo404 <me@giordanoricci.com>
Co-authored-by: Dimitris Sotirakis <dimitrios.sotirakis@grafana.com>
This commit is contained in:
Chris Cowan 2021-06-04 03:07:59 -07:00 committed by GitHub
parent f683a497eb
commit f580c9149c
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
30 changed files with 2718 additions and 111 deletions

View File

@ -0,0 +1,44 @@
# You need to run 'sysctl -w vm.max_map_count=262144' on the host machine
elasticsearch77:
image: docker.elastic.co/elasticsearch/elasticsearch:7.7.1
command: elasticsearch
environment:
- "discovery.type=single-node"
ports:
- "13200:9200"
- "13300:9300"
fake-elastic77-data:
image: grafana/fake-data-gen
links:
- elasticsearch77
environment:
FD_SERVER: elasticsearch77
FD_DATASOURCE: elasticsearch7
FD_PORT: 9200
filebeat77:
image: docker.elastic.co/beats/filebeat:7.7.1
command: filebeat -e -strict.perms=false
volumes:
- ./docker/blocks/elastic77/filebeat.yml:/usr/share/filebeat/filebeat.yml:ro
- /var/log:/var/log:ro
- ../data/log:/var/log/grafana:ro
metricbeat77:
image: docker.elastic.co/beats/metricbeat:7.7.1
command: metricbeat -e -strict.perms=false
user: root
volumes:
- ./docker/blocks/elastic77/metricbeat.yml:/usr/share/metricbeat/metricbeat.yml:ro
- /var/run/docker.sock:/var/run/docker.sock:ro
kibana77:
image: docker.elastic.co/kibana/kibana:7.7.1
ports:
- "5601:5601"
links:
- elasticsearch77
environment:
ELASTICSEARCH_HOSTS: http://elasticsearch77:9200

View File

@ -0,0 +1,3 @@
script.inline: on
script.indexed: on
xpack.license.self_generated.type: basic

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,38 @@
metricbeat.config:
modules:
path: ${path.config}/modules.d/*.yml
# Reload module configs as they change:
reload.enabled: false
metricbeat.autodiscover:
providers:
- type: docker
hints.enabled: true
metricbeat.modules:
- module: docker
metricsets:
- "container"
- "cpu"
- "diskio"
- "healthcheck"
- "info"
#- "image"
- "memory"
- "network"
hosts: ["unix:///var/run/docker.sock"]
period: 10s
enabled: true
processors:
- add_cloud_metadata: ~
output.elasticsearch:
hosts: ["elasticsearch77:9200"]
index: "metricbeat-%{+yyyy.MM.dd}"
setup.template.name: "metricbeat"
setup.template.pattern: "metricbeat-*"
setup.template.settings:
index.number_of_shards: 1
index.number_of_replicas: 1

View File

@ -57,7 +57,7 @@ a time pattern for the index name or a wildcard.
### Elasticsearch version
Select the version of your Elasticsearch data source from the version selection dropdown. Different query compositions and functionalities are available in the query editor for different versions.
Available Elasticsearch versions are `2.x`, `5.x`, `5.6+`, `6.0+`, and `7.0+`. Select the option that best matches your data source version.
Available Elasticsearch versions are `2.x`, `5.x`, `5.6+`, `6.0+`, `7.0+` and `7.7+`. Select the option that best matches your data source version.
Grafana assumes that you are running the lowest possible version for a specified range. This ensures that new features or breaking changes in a future Elasticsearch release will not affect your configuration.

View File

@ -53,6 +53,8 @@ func coerceVersion(v *simplejson.Json) (*semver.Version, error) {
return nil, err
}
// Legacy version numbers (before Grafana 8)
// valid values were 2,5,56,60,70
switch versionNumber {
case 2:
return semver.NewVersion("2.0.0")

View File

@ -274,12 +274,41 @@ type GeoHashGridAggregation struct {
// MetricAggregation represents a metric aggregation
type MetricAggregation struct {
Type string
Field string
Settings map[string]interface{}
}
// MarshalJSON returns the JSON encoding of the metric aggregation
func (a *MetricAggregation) MarshalJSON() ([]byte, error) {
if a.Type == "top_metrics" {
root := map[string]interface{}{}
var rootMetrics []map[string]string
order, hasOrder := a.Settings["order"]
orderBy, hasOrderBy := a.Settings["orderBy"]
root["size"] = "1"
metrics, hasMetrics := a.Settings["metrics"].([]interface{})
if hasMetrics {
for _, v := range metrics {
metricValue := map[string]string{"field": v.(string)}
rootMetrics = append(rootMetrics, metricValue)
}
root["metrics"] = rootMetrics
}
if hasOrderBy && hasOrder {
root["sort"] = []map[string]interface{}{
{
orderBy.(string): order,
},
}
}
return json.Marshal(root)
}
root := map[string]interface{}{}
if a.Field != "" {

View File

@ -419,9 +419,11 @@ func (b *aggBuilderImpl) GeoHashGrid(key, field string, fn func(a *GeoHashGridAg
func (b *aggBuilderImpl) Metric(key, metricType, field string, fn func(a *MetricAggregation)) AggBuilder {
innerAgg := &MetricAggregation{
Type: metricType,
Field: field,
Settings: make(map[string]interface{}),
}
aggDef := newAggDef(key, &aggContainer{
Type: metricType,
Aggregation: innerAgg,

View File

@ -43,6 +43,7 @@ var metricAggType = map[string]string{
"min": "Min",
"extended_stats": "Extended Stats",
"percentiles": "Percentiles",
"top_metrics": "Top Metrics",
"cardinality": "Unique Count",
"moving_avg": "Moving Average",
"moving_fn": "Moving Function",

View File

@ -18,6 +18,7 @@ const (
countType = "count"
percentilesType = "percentiles"
extendedStatsType = "extended_stats"
topMetricsType = "top_metrics"
// Bucket types
dateHistType = "date_histogram"
histogramType = "histogram"
@ -225,6 +226,10 @@ func (rp *responseParser) processMetrics(esAgg *simplejson.Json, target *Query,
}
*series = append(*series, newSeries)
}
case topMetricsType:
topMetricSeries := processTopMetrics(metric, esAgg, props)
*series = append(*series, topMetricSeries...)
case extendedStatsType:
buckets := esAgg.Get("buckets").MustArray()
@ -592,3 +597,47 @@ func getErrorFromElasticResponse(response *es.SearchResponse) plugins.DataQueryR
return result
}
func processTopMetricValues(stats *simplejson.Json, field string) null.Float {
for _, stat := range stats.MustArray() {
stat := stat.(map[string]interface{})
metrics, hasMetrics := stat["metrics"]
if hasMetrics {
metrics := metrics.(map[string]interface{})
metricValue, hasMetricValue := metrics[field]
if hasMetricValue && metricValue != nil {
return null.FloatFrom(metricValue.(float64))
}
}
}
return null.NewFloat(0, false)
}
func processTopMetrics(metric *MetricAgg, esAgg *simplejson.Json, props map[string]string) plugins.DataTimeSeriesSlice {
var series plugins.DataTimeSeriesSlice
metrics, hasMetrics := metric.Settings.MustMap()["metrics"].([]interface{})
if hasMetrics {
for _, metricField := range metrics {
newSeries := plugins.DataTimeSeries{
Tags: make(map[string]string),
}
for _, v := range esAgg.Get("buckets").MustArray() {
bucket := simplejson.NewFromAny(v)
stats := bucket.GetPath(metric.ID, "top")
value := processTopMetricValues(stats, metricField.(string))
key := castToNullFloat(bucket.Get("key"))
newSeries.Points = append(newSeries.Points, plugins.DataTimePoint{value, key})
}
for k, v := range props {
newSeries.Tags[k] = v
}
newSeries.Tags["metric"] = "top_metrics"
newSeries.Tags["field"] = metricField.(string)
series = append(series, newSeries)
}
}
return series
}

View File

@ -10,6 +10,7 @@ import (
"github.com/grafana/grafana/pkg/components/simplejson"
"github.com/grafana/grafana/pkg/plugins"
es "github.com/grafana/grafana/pkg/tsdb/elasticsearch/client"
"github.com/stretchr/testify/assert"
. "github.com/smartystreets/goconvey/convey"
)
@ -992,6 +993,80 @@ func TestResponseParser(t *testing.T) {
// So(rows[0][2].(null.Float).Float64, ShouldEqual, 3000)
// })
})
t.Run("With top_metrics", func(t *testing.T) {
targets := map[string]string{
"A": `{
"timeField": "@timestamp",
"metrics": [
{
"type": "top_metrics",
"settings": {
"order": "desc",
"orderBy": "@timestamp",
"metrics": ["@value", "@anotherValue"]
},
"id": "1"
}
],
"bucketAggs": [{ "type": "date_histogram", "field": "@timestamp", "id": "3" }]
}`,
}
response := `{
"responses": [{
"aggregations": {
"3": {
"buckets": [
{
"key": 1609459200000,
"key_as_string": "2021-01-01T00:00:00.000Z",
"1": {
"top": [
{ "sort": ["2021-01-01T00:00:00.000Z"], "metrics": { "@value": 1, "@anotherValue": 2 } }
]
}
},
{
"key": 1609459210000,
"key_as_string": "2021-01-01T00:00:10.000Z",
"1": {
"top": [
{ "sort": ["2021-01-01T00:00:10.000Z"], "metrics": { "@value": 1, "@anotherValue": 2 } }
]
}
}
]
}
}
}]
}`
rp, err := newResponseParserForTest(targets, response)
assert.Nil(t, err)
result, err := rp.getTimeSeries()
assert.Nil(t, err)
assert.Len(t, result.Results, 1)
queryRes := result.Results["A"]
assert.NotNil(t, queryRes)
assert.Len(t, queryRes.Series, 2)
seriesOne := queryRes.Series[0]
assert.Equal(t, seriesOne.Name, "Top Metrics @value")
assert.Len(t, seriesOne.Points, 2)
assert.Equal(t, seriesOne.Points[0][0].Float64, 1.)
assert.Equal(t, seriesOne.Points[0][1].Float64, 1609459200000.)
assert.Equal(t, seriesOne.Points[1][0].Float64, 1.)
assert.Equal(t, seriesOne.Points[1][1].Float64, 1609459210000.)
seriesTwo := queryRes.Series[1]
assert.Equal(t, seriesTwo.Name, "Top Metrics @anotherValue")
assert.Len(t, seriesTwo.Points, 2)
assert.Equal(t, seriesTwo.Points[0][0].Float64, 2.)
assert.Equal(t, seriesTwo.Points[0][1].Float64, 1609459200000.)
assert.Equal(t, seriesTwo.Points[1][0].Float64, 2.)
assert.Equal(t, seriesTwo.Points[1][1].Float64, 1609459210000.)
})
}
func newResponseParserForTest(tsdbQueries map[string]string, responseBody string) (*responseParser, error) {

View File

@ -66,3 +66,14 @@ export type BucketAggregation = DateHistogram | Histogram | Terms | Filters | Ge
export const isBucketAggregationWithField = (
bucketAgg: BucketAggregation | BucketAggregationWithField
): bucketAgg is BucketAggregationWithField => bucketAggregationConfig[bucketAgg.type].requiresField;
export const BUCKET_AGGREGATION_TYPES: BucketAggregationType[] = [
'date_histogram',
'histogram',
'terms',
'filters',
'geohash_grid',
];
export const isBucketAggregationType = (s: BucketAggregationType | string): s is BucketAggregationType =>
BUCKET_AGGREGATION_TYPES.includes(s as BucketAggregationType);

View File

@ -126,18 +126,22 @@ function createOrderByOptionsForPercentiles(metric: Percentiles): OrderByOption[
});
}
const INCOMPATIBLE_ORDER_BY_AGGS = ['top_metrics'];
/**
* This creates all the valid order by options based on the metrics
*/
export const createOrderByOptionsFromMetrics = (metrics: MetricAggregation[] = []): OrderByOption[] => {
const metricOptions = metrics.flatMap((metric) => {
if (metric.type === 'extended_stats') {
return createOrderByOptionsForExtendedStats(metric);
} else if (metric.type === 'percentiles') {
return createOrderByOptionsForPercentiles(metric);
} else {
return { label: describeMetric(metric), value: metric.id };
}
});
const metricOptions = metrics
.filter((metric) => !INCOMPATIBLE_ORDER_BY_AGGS.includes(metric.type))
.flatMap((metric) => {
if (metric.type === 'extended_stats') {
return createOrderByOptionsForExtendedStats(metric);
} else if (metric.type === 'percentiles') {
return createOrderByOptionsForPercentiles(metric);
} else {
return { label: describeMetric(metric), value: metric.id };
}
});
return [...orderByOptions, ...metricOptions];
};

View File

@ -1,12 +1,12 @@
import { act, fireEvent, render, screen } from '@testing-library/react';
import { ElasticsearchProvider } from '../ElasticsearchQueryContext';
import { MetricEditor } from './MetricEditor';
import React, { PropsWithChildren } from 'react';
import React, { ReactNode, PropsWithChildren } from 'react';
import { ElasticDatasource } from '../../../datasource';
import { getDefaultTimeRange } from '@grafana/data';
import { ElasticsearchQuery } from '../../../types';
import { Average, UniqueCount } from './aggregations';
import { defaultBucketAgg } from '../../../query_def';
import { defaultBucketAgg, defaultMetricAgg } from '../../../query_def';
import { from } from 'rxjs';
describe('Metric Editor', () => {
@ -82,4 +82,50 @@ describe('Metric Editor', () => {
expect(await screen.findByText('No options found')).toBeInTheDocument();
expect(screen.queryByText('None')).not.toBeInTheDocument();
});
describe('Top Metrics Aggregation', () => {
const setupTopMetricsScreen = (esVersion: string, xpack: boolean) => {
const query: ElasticsearchQuery = {
refId: 'A',
query: '',
metrics: [defaultMetricAgg('1')],
bucketAggs: [defaultBucketAgg('2')],
};
const getFields: ElasticDatasource['getFields'] = jest.fn(() => from([[]]));
const wrapper = ({ children }: { children: ReactNode }) => (
<ElasticsearchProvider
datasource={{ getFields, esVersion, xpack } as ElasticDatasource}
query={query}
range={getDefaultTimeRange()}
onChange={() => {}}
onRunQuery={() => {}}
>
{children}
</ElasticsearchProvider>
);
render(<MetricEditor value={defaultMetricAgg('1')} />, { wrapper });
act(() => {
fireEvent.click(screen.getByText('Count'));
});
};
it('Should include top metrics aggregation when esVersion is 77 and X-Pack is enabled', () => {
setupTopMetricsScreen('7.7.0', true);
expect(screen.getByText('Top Metrics')).toBeInTheDocument();
});
it('Should NOT include top metrics aggregation where esVersion is 77 and X-Pack is disabled', () => {
setupTopMetricsScreen('7.7.0', false);
expect(screen.queryByText('Top Metrics')).toBe(null);
});
it('Should NOT include top metrics aggregation when esVersion is 70 and X-Pack is enabled', () => {
setupTopMetricsScreen('7.0.0', true);
expect(screen.queryByText('Top Metrics')).toBe(null);
});
});
});

View File

@ -40,7 +40,8 @@ const isBasicAggregation = (metric: MetricAggregation) => !metricAggregationConf
const getTypeOptions = (
previousMetrics: MetricAggregation[],
esVersion: string
esVersion: string,
xpack = false
): Array<SelectableValue<MetricAggregationType>> => {
// we'll include Pipeline Aggregations only if at least one previous metric is a "Basic" one
const includePipelineAggregations = previousMetrics.some(isBasicAggregation);
@ -51,6 +52,8 @@ const getTypeOptions = (
.filter(([_, { versionRange = '*' }]) => satisfies(esVersion, versionRange))
// Filtering out Pipeline Aggregations if there's no basic metric selected before
.filter(([_, config]) => includePipelineAggregations || !config.isPipelineAgg)
// Filtering out X-Pack plugins if X-Pack is disabled
.filter(([_, config]) => (config.xpack ? xpack : true))
.map(([key, { label }]) => ({
label,
value: key as MetricAggregationType,
@ -86,7 +89,7 @@ export const MetricEditor = ({ value }: Props) => {
<InlineSegmentGroup>
<Segment
className={cx(styles.color, segmentStyles)}
options={getTypeOptions(previousMetrics, datasource.esVersion)}
options={getTypeOptions(previousMetrics, datasource.esVersion, datasource.xpack)}
onChange={(e) => dispatch(changeMetricType(value.id, e.value!))}
value={toOption(value)}
/>

View File

@ -0,0 +1,69 @@
import { AsyncMultiSelect, InlineField, SegmentAsync, Select } from '@grafana/ui';
import React, { FunctionComponent } from 'react';
import { useDispatch } from '../../../../hooks/useStatelessReducer';
import { useFields } from '../../../../hooks/useFields';
import { TopMetrics } from '../aggregations';
import { changeMetricSetting } from '../state/actions';
import { orderOptions } from '../../BucketAggregationsEditor/utils';
import { css } from '@emotion/css';
import { SelectableValue } from '@grafana/data';
interface Props {
metric: TopMetrics;
}
const toMultiSelectValue = (value: string): SelectableValue<string> => ({ value, label: value });
export const TopMetricsSettingsEditor: FunctionComponent<Props> = ({ metric }) => {
const dispatch = useDispatch();
const getOrderByOptions = useFields(['number', 'date']);
const getMetricsOptions = useFields(metric.type);
return (
<>
<InlineField label="Metrics" labelWidth={16}>
<AsyncMultiSelect
onChange={(e) =>
dispatch(
changeMetricSetting(
metric,
'metrics',
e.map((v) => v.value!)
)
)
}
loadOptions={getMetricsOptions}
value={metric.settings?.metrics?.map(toMultiSelectValue)}
closeMenuOnSelect={false}
defaultOptions
/>
</InlineField>
<InlineField label="Order" labelWidth={16}>
<Select
onChange={(e) => dispatch(changeMetricSetting(metric, 'order', e.value))}
options={orderOptions}
value={metric.settings?.order}
/>
</InlineField>
<InlineField
label="Order By"
labelWidth={16}
className={css`
& > div {
width: 100%;
}
`}
>
<SegmentAsync
className={css`
margin-right: 0;
`}
loadOptions={getOrderByOptions}
onChange={(e) => dispatch(changeMetricSetting(metric, 'orderBy', e.value))}
placeholder="Select Field"
value={metric.settings?.orderBy}
/>
</InlineField>
</>
);
};

View File

@ -14,6 +14,7 @@ import { SettingField } from './SettingField';
import { SettingsEditorContainer } from '../../SettingsEditorContainer';
import { useDescription } from './useDescription';
import { MovingAverageSettingsEditor } from './MovingAverageSettingsEditor';
import { TopMetricsSettingsEditor } from './TopMetricsSettingsEditor';
import { uniqueId } from 'lodash';
import { metricAggregationConfig } from '../utils';
import { useQuery } from '../../ElasticsearchQueryContext';
@ -51,6 +52,8 @@ export const SettingsEditor = ({ metric, previousMetrics }: Props) => {
</>
)}
{metric.type === 'top_metrics' && <TopMetricsSettingsEditor metric={metric} />}
{metric.type === 'bucket_script' && (
<BucketScriptSettingsEditor value={metric} previousMetrics={previousMetrics} />
)}

View File

@ -20,6 +20,7 @@ export type MetricAggregationType =
| 'raw_document'
| 'raw_data'
| 'logs'
| 'top_metrics'
| PipelineMetricAggregationType;
interface BaseMetricAggregation {
@ -282,6 +283,15 @@ export interface BucketScript extends PipelineMetricAggregationWithMultipleBucke
};
}
export interface TopMetrics extends BaseMetricAggregation {
type: 'top_metrics';
settings?: {
order?: string;
orderBy?: string;
metrics?: string[];
};
}
type PipelineMetricAggregation = MovingAverage | Derivative | CumulativeSum | BucketScript;
export type MetricAggregationWithSettings =
@ -300,7 +310,8 @@ export type MetricAggregationWithSettings =
| Average
| MovingAverage
| MovingFunction
| Logs;
| Logs
| TopMetrics;
export type MetricAggregationWithMeta = ExtendedStats;
@ -344,7 +355,7 @@ export const isMetricAggregationWithInlineScript = (
metric: BaseMetricAggregation | MetricAggregationWithInlineScript
): metric is MetricAggregationWithInlineScript => metricAggregationConfig[metric.type].supportsInlineScript;
export const METRIC_AGGREGATION_TYPES = [
export const METRIC_AGGREGATION_TYPES: MetricAggregationType[] = [
'count',
'avg',
'sum',
@ -362,7 +373,8 @@ export const METRIC_AGGREGATION_TYPES = [
'serial_diff',
'cumulative_sum',
'bucket_script',
'top_metrics',
];
export const isMetricAggregationType = (s: MetricAggregationType | string): s is MetricAggregationType =>
METRIC_AGGREGATION_TYPES.includes(s);
METRIC_AGGREGATION_TYPES.includes(s as MetricAggregationType);

View File

@ -240,6 +240,23 @@ export const metricAggregationConfig: MetricsConfiguration = {
},
},
},
top_metrics: {
label: 'Top Metrics',
xpack: true,
requiresField: false,
isPipelineAgg: false,
supportsMissing: false,
supportsMultipleBucketPaths: false,
hasSettings: true,
supportsInlineScript: false,
versionRange: '>=7.7.0',
hasMeta: false,
defaults: {
settings: {
order: 'desc',
},
},
},
};
interface PipelineOption {

View File

@ -7,6 +7,7 @@ import { segmentStyles } from './styles';
const getStyles = stylesFactory((theme: GrafanaTheme, hidden: boolean) => {
return {
wrapper: css`
max-width: 500px;
display: flex;
flex-direction: column;
`,

View File

@ -1,6 +1,6 @@
import React from 'react';
import { EventsWithValidation, regexValidation, LegacyForms } from '@grafana/ui';
const { Select, Input, FormField } = LegacyForms;
const { Switch, Select, Input, FormField } = LegacyForms;
import { ElasticsearchOptions, Interval } from '../types';
import { DataSourceSettings, SelectableValue } from '@grafana/data';
import { gte, lt } from 'semver';
@ -20,6 +20,7 @@ const esVersions = [
{ label: '5.6+', value: '5.6.0' },
{ label: '6.0+', value: '6.0.0' },
{ label: '7.0+', value: '7.0.0' },
{ label: '7.7+', value: '7.7.0' },
];
type Props = {
@ -141,6 +142,14 @@ export const ElasticDetails = ({ value, onChange }: Props) => {
/>
</div>
</div>
<div className="gf-form-inline">
<Switch
label="X-Pack Enabled"
labelClass="width-13"
checked={value.jsonData.xpack || false}
onChange={jsonDataSwitchChangeHandler('xpack', value, onChange)}
/>
</div>
</div>
</>
);
@ -169,6 +178,20 @@ const jsonDataChangeHandler = (key: keyof ElasticsearchOptions, value: Props['va
});
};
const jsonDataSwitchChangeHandler = (
key: keyof ElasticsearchOptions,
value: Props['value'],
onChange: Props['onChange']
) => (event: React.SyntheticEvent<HTMLInputElement>) => {
onChange({
...value,
jsonData: {
...value.jsonData,
[key]: event.currentTarget.checked,
},
});
};
const intervalHandler = (value: Props['value'], onChange: Props['onChange']) => (
option: SelectableValue<Interval | 'none'>
) => {

View File

@ -478,7 +478,7 @@ describe('ElasticDatasource', function (this: any) {
it('should return number fields', async () => {
const { ds } = getTestContext({ data, jsonData: { esVersion: 50 }, database: 'metricbeat' });
await expect(ds.getFields('number')).toEmitValuesWith((received) => {
await expect(ds.getFields(['number'])).toEmitValuesWith((received) => {
expect(received.length).toBe(1);
const fieldObjects = received[0];
const fields = map(fieldObjects, 'text');
@ -490,7 +490,7 @@ describe('ElasticDatasource', function (this: any) {
it('should return date fields', async () => {
const { ds } = getTestContext({ data, jsonData: { esVersion: 50 }, database: 'metricbeat' });
await expect(ds.getFields('date')).toEmitValuesWith((received) => {
await expect(ds.getFields(['date'])).toEmitValuesWith((received) => {
expect(received.length).toBe(1);
const fieldObjects = received[0];
const fields = map(fieldObjects, 'text');
@ -686,6 +686,16 @@ describe('ElasticDatasource', function (this: any) {
},
};
const dateFields = ['@timestamp_millis'];
const numberFields = [
'justification_blob.overall_vote_score',
'justification_blob.shallow.jsi.sdb.dsel2.bootlegged-gille.botness',
'justification_blob.shallow.jsi.sdb.dsel2.bootlegged-gille.general_algorithm_score',
'justification_blob.shallow.jsi.sdb.dsel2.uncombed-boris.botness',
'justification_blob.shallow.jsi.sdb.dsel2.uncombed-boris.general_algorithm_score',
'overall_vote_score',
];
it('should return nested fields', async () => {
const { ds } = getTestContext({ data, database: 'genuine.es7._mapping.response', jsonData: { esVersion: 70 } });
@ -716,31 +726,24 @@ describe('ElasticDatasource', function (this: any) {
it('should return number fields', async () => {
const { ds } = getTestContext({ data, database: 'genuine.es7._mapping.response', jsonData: { esVersion: 70 } });
await expect(ds.getFields('number')).toEmitValuesWith((received) => {
await expect(ds.getFields(['number'])).toEmitValuesWith((received) => {
expect(received.length).toBe(1);
const fieldObjects = received[0];
const fields = map(fieldObjects, 'text');
expect(fields).toEqual([
'justification_blob.overall_vote_score',
'justification_blob.shallow.jsi.sdb.dsel2.bootlegged-gille.botness',
'justification_blob.shallow.jsi.sdb.dsel2.bootlegged-gille.general_algorithm_score',
'justification_blob.shallow.jsi.sdb.dsel2.uncombed-boris.botness',
'justification_blob.shallow.jsi.sdb.dsel2.uncombed-boris.general_algorithm_score',
'overall_vote_score',
]);
expect(fields).toEqual(numberFields);
});
});
it('should return date fields', async () => {
const { ds } = getTestContext({ data, database: 'genuine.es7._mapping.response', jsonData: { esVersion: 70 } });
await expect(ds.getFields('date')).toEmitValuesWith((received) => {
await expect(ds.getFields(['date'])).toEmitValuesWith((received) => {
expect(received.length).toBe(1);
const fieldObjects = received[0];
const fields = map(fieldObjects, 'text');
expect(fields).toEqual(['@timestamp_millis']);
expect(fields).toEqual(dateFields);
});
});
});

View File

@ -64,6 +64,7 @@ export class ElasticDatasource extends DataSourceApi<ElasticsearchQuery, Elastic
index: string;
timeField: string;
esVersion: string;
xpack: boolean;
interval: string;
maxConcurrentShardRequests?: number;
queryBuilder: ElasticQueryBuilder;
@ -87,6 +88,7 @@ export class ElasticDatasource extends DataSourceApi<ElasticsearchQuery, Elastic
this.timeField = settingsData.timeField;
this.esVersion = coerceESVersion(settingsData.esVersion);
this.xpack = Boolean(settingsData.xpack);
this.indexPattern = new IndexPattern(this.index, settingsData.interval);
this.interval = settingsData.timeInterval;
this.maxConcurrentShardRequests = settingsData.maxConcurrentShardRequests;
@ -393,7 +395,7 @@ export class ElasticDatasource extends DataSourceApi<ElasticsearchQuery, Elastic
testDatasource() {
// validate that the index exist and has date field
return this.getFields('date')
return this.getFields(['date'])
.pipe(
mergeMap((dateFields) => {
const timeField: any = find(dateFields, { text: this.timeField });
@ -642,34 +644,33 @@ export class ElasticDatasource extends DataSourceApi<ElasticsearchQuery, Elastic
// TODO: instead of being a string, this could be a custom type representing all the elastic types
// FIXME: This doesn't seem to return actual MetricFindValues, we should either change the return type
// or fix the implementation.
getFields(type?: string, range?: TimeRange): Observable<MetricFindValue[]> {
getFields(type?: string[], range?: TimeRange): Observable<MetricFindValue[]> {
const typeMap: Record<string, string> = {
float: 'number',
double: 'number',
integer: 'number',
long: 'number',
date: 'date',
date_nanos: 'date',
string: 'string',
text: 'string',
scaled_float: 'number',
nested: 'nested',
histogram: 'number',
};
return this.get('/_mapping', range).pipe(
map((result) => {
const typeMap: any = {
float: 'number',
double: 'number',
integer: 'number',
long: 'number',
date: 'date',
date_nanos: 'date',
string: 'string',
text: 'string',
scaled_float: 'number',
nested: 'nested',
histogram: 'number',
};
const shouldAddField = (obj: any, key: string) => {
if (this.isMetadataField(key)) {
return false;
}
if (!type) {
if (!type || type.length === 0) {
return true;
}
// equal query type filter, or via typemap translation
return type === obj.type || type === typeMap[obj.type];
return type.includes(obj.type) || type.includes(typeMap[obj.type]);
};
// Store subfield names: [system, process, cpu, total] -> system.process.cpu.total

View File

@ -14,11 +14,18 @@ import { ElasticsearchAggregation, ElasticsearchQuery } from './types';
import {
ExtendedStatMetaType,
isMetricAggregationWithField,
TopMetrics,
} from './components/QueryEditor/MetricAggregationsEditor/aggregations';
import { describeMetric, getScriptValue } from './utils';
import { metricAggregationConfig } from './components/QueryEditor/MetricAggregationsEditor/utils';
const HIGHLIGHT_TAGS_EXP = `${queryDef.highlightTags.pre}([^@]+)${queryDef.highlightTags.post}`;
type TopMetricMetric = Record<string, number>;
interface TopMetricBucket {
top: Array<{
metrics: TopMetricMetric;
}>;
}
export class ElasticResponse {
constructor(private targets: ElasticsearchQuery[], private response: any) {
@ -103,6 +110,33 @@ export class ElasticResponse {
break;
}
case 'top_metrics': {
if (metric.settings?.metrics?.length) {
for (const metricField of metric.settings?.metrics) {
newSeries = {
datapoints: [],
metric: metric.type,
props: props,
refId: target.refId,
field: metricField,
};
for (let i = 0; i < esAgg.buckets.length; i++) {
const bucket = esAgg.buckets[i];
const stats = bucket[metric.id] as TopMetricBucket;
const values = stats.top.map((hit) => {
if (hit.metrics[metricField]) {
return hit.metrics[metricField];
}
return null;
});
const point = [values[values.length - 1], bucket.key];
newSeries.datapoints.push(point);
}
seriesList.push(newSeries);
}
}
break;
}
default: {
newSeries = {
datapoints: [],
@ -195,6 +229,23 @@ export class ElasticResponse {
}
break;
}
case 'top_metrics': {
const baseName = this.getMetricName(metric.type);
if (metric.settings?.metrics) {
for (const metricField of metric.settings.metrics) {
// If we selected more than one metric we also add each metric name
const metricName = metric.settings.metrics.length > 1 ? `${baseName} ${metricField}` : baseName;
const stats = bucket[metric.id] as TopMetricBucket;
// Size of top_metrics is fixed to 1.
addMetricValue(values, metricName, stats.top[0].metrics[metricField]);
}
}
break;
}
default: {
let metricName = this.getMetricName(metric.type);
const otherMetrics = filter(target.metrics, { type: metric.type });
@ -276,7 +327,7 @@ export class ElasticResponse {
return metric;
}
private getSeriesName(series: any, target: ElasticsearchQuery, metricTypeCount: any) {
private getSeriesName(series: any, target: ElasticsearchQuery, dedup: boolean) {
let metricName = this.getMetricName(series.metric);
if (target.alias) {
@ -339,19 +390,22 @@ export class ElasticResponse {
name += series.props[propName] + ' ';
}
if (metricTypeCount === 1) {
return name.trim();
if (dedup) {
return name.trim() + ' ' + metricName;
}
return name.trim() + ' ' + metricName;
return name.trim();
}
nameSeries(seriesList: any, target: ElasticsearchQuery) {
const metricTypeCount = uniq(map(seriesList, 'metric')).length;
const hasTopMetricWithMultipleMetrics = (target.metrics?.filter(
(m) => m.type === 'top_metrics'
) as TopMetrics[]).some((m) => (m?.settings?.metrics?.length || 0) > 1);
for (let i = 0; i < seriesList.length; i++) {
const series = seriesList[i];
series.target = this.getSeriesName(series, target, metricTypeCount);
series.target = this.getSeriesName(series, target, metricTypeCount > 1 || hasTopMetricWithMultipleMetrics);
}
}

View File

@ -45,12 +45,12 @@ describe('useFields hook', () => {
{ wrapper, initialProps: 'cardinality' }
);
result.current();
expect(getFields).toHaveBeenLastCalledWith(undefined, timeRange);
expect(getFields).toHaveBeenLastCalledWith([], timeRange);
// All other metric aggregations only work on numbers
rerender('avg');
result.current();
expect(getFields).toHaveBeenLastCalledWith('number', timeRange);
expect(getFields).toHaveBeenLastCalledWith(['number'], timeRange);
//
// BUCKET AGGREGATIONS
@ -58,16 +58,21 @@ describe('useFields hook', () => {
// Date Histrogram only works on dates
rerender('date_histogram');
result.current();
expect(getFields).toHaveBeenLastCalledWith('date', timeRange);
expect(getFields).toHaveBeenLastCalledWith(['date'], timeRange);
// Geohash Grid only works on geo_point data
rerender('geohash_grid');
result.current();
expect(getFields).toHaveBeenLastCalledWith('geo_point', timeRange);
expect(getFields).toHaveBeenLastCalledWith(['geo_point'], timeRange);
// All other bucket aggregation work on any kind of data
rerender('terms');
result.current();
expect(getFields).toHaveBeenLastCalledWith(undefined, timeRange);
expect(getFields).toHaveBeenLastCalledWith([], timeRange);
// top_metrics work on only on numeric data in 7.7
rerender('top_metrics');
result.current();
expect(getFields).toHaveBeenLastCalledWith(['number'], timeRange);
});
});

View File

@ -1,5 +1,8 @@
import { MetricFindValue, SelectableValue } from '@grafana/data';
import { BucketAggregationType } from '../components/QueryEditor/BucketAggregationsEditor/aggregations';
import {
BucketAggregationType,
isBucketAggregationType,
} from '../components/QueryEditor/BucketAggregationsEditor/aggregations';
import { useDatasource, useRange } from '../components/QueryEditor/ElasticsearchQueryContext';
import {
isMetricAggregationType,
@ -8,27 +11,34 @@ import {
type AggregationType = BucketAggregationType | MetricAggregationType;
const getFilter = (aggregationType: AggregationType) => {
// For all metric types we want only numbers, except for cardinality
// TODO: To have a more configuration-driven editor, it would be nice to move this logic in
// metricAggregationConfig and bucketAggregationConfig so that each aggregation type can specify on
// which kind of data it operates.
if (isMetricAggregationType(aggregationType)) {
if (aggregationType !== 'cardinality') {
return 'number';
const getFilter = (type: AggregationType) => {
if (isMetricAggregationType(type)) {
switch (type) {
case 'cardinality':
return [];
case 'top_metrics':
// top_metrics was introduced in 7.7 where `metrics` only supported number:
// https://www.elastic.co/guide/en/elasticsearch/reference/7.7/search-aggregations-metrics-top-metrics.html#_metrics
// TODO: starting from 7.11 it supports ips and keywords as well:
// https://www.elastic.co/guide/en/elasticsearch/reference/7.11/search-aggregations-metrics-top-metrics.html#_metrics
return ['number'];
default:
return ['number'];
}
return void 0;
}
switch (aggregationType) {
case 'date_histogram':
return 'date';
case 'geohash_grid':
return 'geo_point';
default:
return void 0;
if (isBucketAggregationType(type)) {
switch (type) {
case 'date_histogram':
return ['date'];
case 'geohash_grid':
return ['geo_point'];
default:
return [];
}
}
return [];
};
const toSelectableValue = ({ text }: MetricFindValue): SelectableValue<string> => ({
@ -37,17 +47,24 @@ const toSelectableValue = ({ text }: MetricFindValue): SelectableValue<string> =
});
/**
* Returns a function to query the configured datasource for autocomplete values for the specified aggregation type.
* Returns a function to query the configured datasource for autocomplete values for the specified aggregation type or data types.
* Each aggregation can be run on different types, for example avg only operates on numeric fields, geohash_grid only on geo_point fields.
* If an aggregation type is provided, the promise will resolve with all fields suitable to be used as a field for the given aggregation.
* If an array of types is providem the promise will resolve with all the fields matching the provided types.
* @param aggregationType the type of aggregation to get fields for
*/
export const useFields = (aggregationType: AggregationType) => {
export const useFields = (type: AggregationType | string[]) => {
const datasource = useDatasource();
const range = useRange();
const filter = getFilter(aggregationType);
const filter = Array.isArray(type) ? type : getFilter(type);
let rawFields: MetricFindValue[];
return async () => {
const rawFields = await datasource.getFields(filter, range).toPromise();
return rawFields.map(toSelectableValue);
return async (q?: string) => {
// _mapping doesn't support filtering, we avoid sending a request everytime q changes
if (!rawFields) {
rawFields = await datasource.getFields(filter, range).toPromise();
}
return rawFields.filter(({ text }) => q === undefined || text.includes(q)).map(toSelectableValue);
};
};

View File

@ -298,7 +298,7 @@ export class ElasticQueryBuilder {
}
const aggField: any = {};
let metricAgg: any = null;
let metricAgg: any = {};
if (isPipelineAggregation(metric)) {
if (isPipelineAggregationWithMultipleBucketPaths(metric)) {
@ -353,32 +353,47 @@ export class ElasticQueryBuilder {
// Elasticsearch isn't generally too picky about the data types in the request body,
// however some fields are required to be numeric.
// Users might have already created some of those with before, where the values were numbers.
if (metric.type === 'moving_avg') {
metricAgg = {
...metricAgg,
...(metricAgg?.window !== undefined && { window: this.toNumber(metricAgg.window) }),
...(metricAgg?.predict !== undefined && { predict: this.toNumber(metricAgg.predict) }),
...(isMovingAverageWithModelSettings(metric) && {
settings: {
...metricAgg.settings,
...Object.fromEntries(
Object.entries(metricAgg.settings || {})
// Only format properties that are required to be numbers
.filter(([settingName]) => ['alpha', 'beta', 'gamma', 'period'].includes(settingName))
// omitting undefined
.filter(([_, stringValue]) => stringValue !== undefined)
.map(([_, stringValue]) => [_, this.toNumber(stringValue)])
),
},
}),
};
} else if (metric.type === 'serial_diff') {
metricAgg = {
...metricAgg,
...(metricAgg.lag !== undefined && {
lag: this.toNumber(metricAgg.lag),
}),
};
switch (metric.type) {
case 'moving_avg':
metricAgg = {
...metricAgg,
...(metricAgg?.window !== undefined && { window: this.toNumber(metricAgg.window) }),
...(metricAgg?.predict !== undefined && { predict: this.toNumber(metricAgg.predict) }),
...(isMovingAverageWithModelSettings(metric) && {
settings: {
...metricAgg.settings,
...Object.fromEntries(
Object.entries(metricAgg.settings || {})
// Only format properties that are required to be numbers
.filter(([settingName]) => ['alpha', 'beta', 'gamma', 'period'].includes(settingName))
// omitting undefined
.filter(([_, stringValue]) => stringValue !== undefined)
.map(([_, stringValue]) => [_, this.toNumber(stringValue)])
),
},
}),
};
break;
case 'serial_diff':
metricAgg = {
...metricAgg,
...(metricAgg.lag !== undefined && {
lag: this.toNumber(metricAgg.lag),
}),
};
break;
case 'top_metrics':
metricAgg = {
metrics: metric.settings?.metrics?.map((field) => ({ field })),
size: 1,
};
if (metric.settings?.orderBy) {
metricAgg.sort = [{ [metric.settings?.orderBy]: metric.settings?.order }];
}
break;
}
}

View File

@ -603,6 +603,75 @@ describe('ElasticResponse', () => {
});
});
describe('with top_metrics', () => {
beforeEach(() => {
targets = [
{
refId: 'A',
metrics: [
{
type: 'top_metrics',
settings: {
order: 'top',
orderBy: '@timestamp',
metrics: ['@value', '@anotherValue'],
},
id: '1',
},
],
bucketAggs: [{ type: 'date_histogram', id: '2' }],
},
];
response = {
responses: [
{
aggregations: {
'2': {
buckets: [
{
key: new Date('2021-01-01T00:00:00.000Z').valueOf(),
key_as_string: '2021-01-01T00:00:00.000Z',
'1': {
top: [{ sort: ['2021-01-01T00:00:00.000Z'], metrics: { '@value': 1, '@anotherValue': 2 } }],
},
},
{
key: new Date('2021-01-01T00:00:10.000Z').valueOf(),
key_as_string: '2021-01-01T00:00:10.000Z',
'1': {
top: [{ sort: ['2021-01-01T00:00:10.000Z'], metrics: { '@value': 1, '@anotherValue': 2 } }],
},
},
],
},
},
},
],
};
});
it('should return 2 series', () => {
const result = new ElasticResponse(targets, response).getTimeSeries();
expect(result.data.length).toBe(2);
const firstSeries = result.data[0];
expect(firstSeries.target).toBe('Top Metrics @value');
expect(firstSeries.datapoints.length).toBe(2);
expect(firstSeries.datapoints).toEqual([
[1, new Date('2021-01-01T00:00:00.000Z').valueOf()],
[1, new Date('2021-01-01T00:00:10.000Z').valueOf()],
]);
const secondSeries = result.data[1];
expect(secondSeries.target).toBe('Top Metrics @anotherValue');
expect(secondSeries.datapoints.length).toBe(2);
expect(secondSeries.datapoints).toEqual([
[2, new Date('2021-01-01T00:00:00.000Z').valueOf()],
[2, new Date('2021-01-01T00:00:10.000Z').valueOf()],
]);
});
});
describe('single group by with alias pattern', () => {
let result: any;

View File

@ -8,8 +8,9 @@ describe('ElasticQueryBuilder', () => {
const builder56 = new ElasticQueryBuilder({ timeField: '@timestamp', esVersion: '5.6.0' });
const builder6x = new ElasticQueryBuilder({ timeField: '@timestamp', esVersion: '6.0.0' });
const builder7x = new ElasticQueryBuilder({ timeField: '@timestamp', esVersion: '7.0.0' });
const builder77 = new ElasticQueryBuilder({ timeField: '@timestamp', esVersion: '7.7.0' });
const allBuilders = [builder, builder5x, builder56, builder6x, builder7x];
const allBuilders = [builder, builder5x, builder56, builder6x, builder7x, builder77];
allBuilders.forEach((builder) => {
describe(`version ${builder.esVersion}`, () => {
@ -433,6 +434,36 @@ describe('ElasticQueryBuilder', () => {
expect(firstLevel.aggs['4']).toBe(undefined);
});
it('with top_metrics', () => {
const query = builder.build({
refId: 'A',
metrics: [
{
id: '2',
type: 'top_metrics',
settings: {
order: 'desc',
orderBy: '@timestamp',
metrics: ['@value'],
},
},
],
bucketAggs: [{ type: 'date_histogram', field: '@timestamp', id: '3' }],
});
const firstLevel = query.aggs['3'];
expect(firstLevel.aggs['2']).not.toBe(undefined);
expect(firstLevel.aggs['2'].top_metrics).not.toBe(undefined);
expect(firstLevel.aggs['2'].top_metrics.metrics).not.toBe(undefined);
expect(firstLevel.aggs['2'].top_metrics.size).not.toBe(undefined);
expect(firstLevel.aggs['2'].top_metrics.sort).not.toBe(undefined);
expect(firstLevel.aggs['2'].top_metrics.metrics.length).toBe(1);
expect(firstLevel.aggs['2'].top_metrics.metrics).toEqual([{ field: '@value' }]);
expect(firstLevel.aggs['2'].top_metrics.sort).toEqual([{ '@timestamp': 'desc' }]);
expect(firstLevel.aggs['2'].top_metrics.size).toBe(1);
});
it('with derivative', () => {
const query = builder.build({
refId: 'A',

View File

@ -13,6 +13,7 @@ export type Interval = 'Hourly' | 'Daily' | 'Weekly' | 'Monthly' | 'Yearly';
export interface ElasticsearchOptions extends DataSourceJsonData {
timeField: string;
esVersion: string;
xpack?: boolean;
interval?: Interval;
timeInterval: string;
maxConcurrentShardRequests?: number;
@ -27,6 +28,7 @@ interface MetricConfiguration<T extends MetricAggregationType> {
supportsInlineScript: boolean;
supportsMissing: boolean;
isPipelineAgg: boolean;
xpack?: boolean;
/**
* A valid semver range for which the metric is known to be available.
* If omitted defaults to '*'.