mirror of
https://github.com/grafana/grafana.git
synced 2025-02-25 18:55:37 -06:00
Elasticsearch: Remove enableElasticsearchBackendQuerying
feature toggle (#88668)
* wip * Fix newly introduced any * Remove toggle * Remove unused code in LegacyQueryRunner
This commit is contained in:
parent
8dcbd74803
commit
023857625a
@ -6289,11 +6289,6 @@ exports[`better eslint`] = {
|
||||
[0, 0, 0, "Unexpected any. Specify a different type.", "2"],
|
||||
[0, 0, 0, "Unexpected any. Specify a different type.", "3"]
|
||||
],
|
||||
"public/app/plugins/datasource/elasticsearch/LegacyQueryRunner.ts:5381": [
|
||||
[0, 0, 0, "Unexpected any. Specify a different type.", "0"],
|
||||
[0, 0, 0, "Do not use any type assertions.", "1"],
|
||||
[0, 0, 0, "Unexpected any. Specify a different type.", "2"]
|
||||
],
|
||||
"public/app/plugins/datasource/elasticsearch/QueryBuilder.ts:5381": [
|
||||
[0, 0, 0, "Unexpected any. Specify a different type.", "0"],
|
||||
[0, 0, 0, "Unexpected any. Specify a different type.", "1"],
|
||||
|
@ -21,50 +21,49 @@ For more information about feature release stages, refer to [Release life cycle
|
||||
|
||||
Most [generally available](https://grafana.com/docs/release-life-cycle/#general-availability) features are enabled by default. You can disable these feature by setting the feature flag to "false" in the configuration.
|
||||
|
||||
| Feature toggle name | Description | Enabled by default |
|
||||
| ------------------------------------ | ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ------------------ |
|
||||
| `disableEnvelopeEncryption` | Disable envelope encryption (emergency only) | |
|
||||
| `publicDashboards` | [Deprecated] Public dashboards are now enabled by default; to disable them, use the configuration setting. This feature toggle will be removed in the next major version. | Yes |
|
||||
| `featureHighlights` | Highlight Grafana Enterprise features | |
|
||||
| `correlations` | Correlations page | Yes |
|
||||
| `exploreContentOutline` | Content outline sidebar | Yes |
|
||||
| `cloudWatchCrossAccountQuerying` | Enables cross-account querying in CloudWatch datasources | Yes |
|
||||
| `nestedFolders` | Enable folder nesting | Yes |
|
||||
| `logsContextDatasourceUi` | Allow datasource to provide custom UI for context view | Yes |
|
||||
| `lokiQuerySplitting` | Split large interval queries into subqueries with smaller time intervals | Yes |
|
||||
| `prometheusMetricEncyclopedia` | Adds the metrics explorer component to the Prometheus query builder as an option in metric select | Yes |
|
||||
| `influxdbBackendMigration` | Query InfluxDB InfluxQL without the proxy | Yes |
|
||||
| `prometheusDataplane` | Changes responses to from Prometheus to be compliant with the dataplane specification. In particular, when this feature toggle is active, the numeric `Field.Name` is set from 'Value' to the value of the `__name__` label. | Yes |
|
||||
| `lokiMetricDataplane` | Changes metric responses from Loki to be compliant with the dataplane specification. | Yes |
|
||||
| `dataplaneFrontendFallback` | Support dataplane contract field name change for transformations and field name matchers where the name is different | Yes |
|
||||
| `enableElasticsearchBackendQuerying` | Enable the processing of queries and responses in the Elasticsearch data source through backend | Yes |
|
||||
| `recordedQueriesMulti` | Enables writing multiple items from a single query within Recorded Queries | Yes |
|
||||
| `logsExploreTableVisualisation` | A table visualisation for logs in Explore | Yes |
|
||||
| `transformationsRedesign` | Enables the transformations redesign | Yes |
|
||||
| `traceQLStreaming` | Enables response streaming of TraceQL queries of the Tempo data source | |
|
||||
| `awsAsyncQueryCaching` | Enable caching for async queries for Redshift and Athena. Requires that the datasource has caching and async query support enabled | Yes |
|
||||
| `prometheusConfigOverhaulAuth` | Update the Prometheus configuration page with the new auth component | Yes |
|
||||
| `alertingNoDataErrorExecution` | Changes how Alerting state manager handles execution of NoData/Error | Yes |
|
||||
| `angularDeprecationUI` | Display Angular warnings in dashboards and panels | Yes |
|
||||
| `dashgpt` | Enable AI powered features in dashboards | Yes |
|
||||
| `alertingInsights` | Show the new alerting insights landing page | Yes |
|
||||
| `panelMonitoring` | Enables panel monitoring through logs and measurements | Yes |
|
||||
| `kubernetesPlaylists` | Use the kubernetes API in the frontend for playlists, and route /api/playlist requests to k8s | Yes |
|
||||
| `recoveryThreshold` | Enables feature recovery threshold (aka hysteresis) for threshold server-side expression | Yes |
|
||||
| `lokiStructuredMetadata` | Enables the loki data source to request structured metadata from the Loki server | Yes |
|
||||
| `awsDatasourcesNewFormStyling` | Applies new form styling for configuration and query editors in AWS plugins | Yes |
|
||||
| `managedPluginsInstall` | Install managed plugins directly from plugins catalog | Yes |
|
||||
| `annotationPermissionUpdate` | Change the way annotation permissions work by scoping them to folders and dashboards. | Yes |
|
||||
| `ssoSettingsApi` | Enables the SSO settings API and the OAuth configuration UIs in Grafana | Yes |
|
||||
| `logsInfiniteScrolling` | Enables infinite scrolling for the Logs panel in Explore and Dashboards | Yes |
|
||||
| `exploreMetrics` | Enables the new Explore Metrics core app | Yes |
|
||||
| `alertingSimplifiedRouting` | Enables users to easily configure alert notifications by specifying a contact point directly when editing or creating an alert rule | Yes |
|
||||
| `logRowsPopoverMenu` | Enable filtering menu displayed when text of a log line is selected | Yes |
|
||||
| `lokiQueryHints` | Enables query hints for Loki | Yes |
|
||||
| `alertingQueryOptimization` | Optimizes eligible queries in order to reduce load on datasources | |
|
||||
| `betterPageScrolling` | Removes CustomScrollbar from the UI, relying on native browser scrollbars | Yes |
|
||||
| `cloudWatchNewLabelParsing` | Updates CloudWatch label parsing to be more accurate | Yes |
|
||||
| `pluginProxyPreserveTrailingSlash` | Preserve plugin proxy trailing slash. | Yes |
|
||||
| Feature toggle name | Description | Enabled by default |
|
||||
| ---------------------------------- | ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ------------------ |
|
||||
| `disableEnvelopeEncryption` | Disable envelope encryption (emergency only) | |
|
||||
| `publicDashboards` | [Deprecated] Public dashboards are now enabled by default; to disable them, use the configuration setting. This feature toggle will be removed in the next major version. | Yes |
|
||||
| `featureHighlights` | Highlight Grafana Enterprise features | |
|
||||
| `correlations` | Correlations page | Yes |
|
||||
| `exploreContentOutline` | Content outline sidebar | Yes |
|
||||
| `cloudWatchCrossAccountQuerying` | Enables cross-account querying in CloudWatch datasources | Yes |
|
||||
| `nestedFolders` | Enable folder nesting | Yes |
|
||||
| `logsContextDatasourceUi` | Allow datasource to provide custom UI for context view | Yes |
|
||||
| `lokiQuerySplitting` | Split large interval queries into subqueries with smaller time intervals | Yes |
|
||||
| `prometheusMetricEncyclopedia` | Adds the metrics explorer component to the Prometheus query builder as an option in metric select | Yes |
|
||||
| `influxdbBackendMigration` | Query InfluxDB InfluxQL without the proxy | Yes |
|
||||
| `prometheusDataplane` | Changes responses to from Prometheus to be compliant with the dataplane specification. In particular, when this feature toggle is active, the numeric `Field.Name` is set from 'Value' to the value of the `__name__` label. | Yes |
|
||||
| `lokiMetricDataplane` | Changes metric responses from Loki to be compliant with the dataplane specification. | Yes |
|
||||
| `dataplaneFrontendFallback` | Support dataplane contract field name change for transformations and field name matchers where the name is different | Yes |
|
||||
| `recordedQueriesMulti` | Enables writing multiple items from a single query within Recorded Queries | Yes |
|
||||
| `logsExploreTableVisualisation` | A table visualisation for logs in Explore | Yes |
|
||||
| `transformationsRedesign` | Enables the transformations redesign | Yes |
|
||||
| `traceQLStreaming` | Enables response streaming of TraceQL queries of the Tempo data source | |
|
||||
| `awsAsyncQueryCaching` | Enable caching for async queries for Redshift and Athena. Requires that the datasource has caching and async query support enabled | Yes |
|
||||
| `prometheusConfigOverhaulAuth` | Update the Prometheus configuration page with the new auth component | Yes |
|
||||
| `alertingNoDataErrorExecution` | Changes how Alerting state manager handles execution of NoData/Error | Yes |
|
||||
| `angularDeprecationUI` | Display Angular warnings in dashboards and panels | Yes |
|
||||
| `dashgpt` | Enable AI powered features in dashboards | Yes |
|
||||
| `alertingInsights` | Show the new alerting insights landing page | Yes |
|
||||
| `panelMonitoring` | Enables panel monitoring through logs and measurements | Yes |
|
||||
| `kubernetesPlaylists` | Use the kubernetes API in the frontend for playlists, and route /api/playlist requests to k8s | Yes |
|
||||
| `recoveryThreshold` | Enables feature recovery threshold (aka hysteresis) for threshold server-side expression | Yes |
|
||||
| `lokiStructuredMetadata` | Enables the loki data source to request structured metadata from the Loki server | Yes |
|
||||
| `awsDatasourcesNewFormStyling` | Applies new form styling for configuration and query editors in AWS plugins | Yes |
|
||||
| `managedPluginsInstall` | Install managed plugins directly from plugins catalog | Yes |
|
||||
| `annotationPermissionUpdate` | Change the way annotation permissions work by scoping them to folders and dashboards. | Yes |
|
||||
| `ssoSettingsApi` | Enables the SSO settings API and the OAuth configuration UIs in Grafana | Yes |
|
||||
| `logsInfiniteScrolling` | Enables infinite scrolling for the Logs panel in Explore and Dashboards | Yes |
|
||||
| `exploreMetrics` | Enables the new Explore Metrics core app | Yes |
|
||||
| `alertingSimplifiedRouting` | Enables users to easily configure alert notifications by specifying a contact point directly when editing or creating an alert rule | Yes |
|
||||
| `logRowsPopoverMenu` | Enable filtering menu displayed when text of a log line is selected | Yes |
|
||||
| `lokiQueryHints` | Enables query hints for Loki | Yes |
|
||||
| `alertingQueryOptimization` | Optimizes eligible queries in order to reduce load on datasources | |
|
||||
| `betterPageScrolling` | Removes CustomScrollbar from the UI, relying on native browser scrollbars | Yes |
|
||||
| `cloudWatchNewLabelParsing` | Updates CloudWatch label parsing to be more accurate | Yes |
|
||||
| `pluginProxyPreserveTrailingSlash` | Preserve plugin proxy trailing slash. | Yes |
|
||||
|
||||
## Public preview feature toggles
|
||||
|
||||
|
@ -73,7 +73,6 @@ export interface FeatureToggles {
|
||||
unifiedRequestLog?: boolean;
|
||||
renderAuthJWT?: boolean;
|
||||
refactorVariablesTimeRange?: boolean;
|
||||
enableElasticsearchBackendQuerying?: boolean;
|
||||
faroDatasourceSelector?: boolean;
|
||||
enableDatagridEditing?: boolean;
|
||||
extraThemes?: boolean;
|
||||
|
@ -413,14 +413,6 @@ var (
|
||||
Owner: grafanaDashboardsSquad,
|
||||
HideFromAdminPage: true, // Non-feature, used to test out a bug fix that impacts the performance of template variables.
|
||||
},
|
||||
{
|
||||
Name: "enableElasticsearchBackendQuerying",
|
||||
Description: "Enable the processing of queries and responses in the Elasticsearch data source through backend",
|
||||
Stage: FeatureStageGeneralAvailability,
|
||||
Owner: grafanaObservabilityLogsSquad,
|
||||
Expression: "true", // enabled by default
|
||||
AllowSelfServe: true,
|
||||
},
|
||||
{
|
||||
Name: "faroDatasourceSelector",
|
||||
Description: "Enable the data source selector within the Frontend Apps section of the Frontend Observability",
|
||||
|
@ -54,7 +54,6 @@ alertStateHistoryLokiOnly,experimental,@grafana/alerting-squad,false,false,false
|
||||
unifiedRequestLog,experimental,@grafana/grafana-backend-group,false,false,false
|
||||
renderAuthJWT,preview,@grafana/grafana-as-code,false,false,false
|
||||
refactorVariablesTimeRange,preview,@grafana/dashboards-squad,false,false,false
|
||||
enableElasticsearchBackendQuerying,GA,@grafana/observability-logs,false,false,false
|
||||
faroDatasourceSelector,preview,@grafana/app-o11y,false,false,true
|
||||
enableDatagridEditing,preview,@grafana/dataviz-squad,false,false,true
|
||||
extraThemes,experimental,@grafana/grafana-frontend-platform,false,false,true
|
||||
|
|
@ -227,10 +227,6 @@ const (
|
||||
// Refactor time range variables flow to reduce number of API calls made when query variables are chained
|
||||
FlagRefactorVariablesTimeRange = "refactorVariablesTimeRange"
|
||||
|
||||
// FlagEnableElasticsearchBackendQuerying
|
||||
// Enable the processing of queries and responses in the Elasticsearch data source through backend
|
||||
FlagEnableElasticsearchBackendQuerying = "enableElasticsearchBackendQuerying"
|
||||
|
||||
// FlagFaroDatasourceSelector
|
||||
// Enable the data source selector within the Frontend Apps section of the Frontend Observability
|
||||
FlagFaroDatasourceSelector = "faroDatasourceSelector"
|
||||
|
@ -2179,7 +2179,8 @@
|
||||
"metadata": {
|
||||
"name": "enableElasticsearchBackendQuerying",
|
||||
"resourceVersion": "1717578796182",
|
||||
"creationTimestamp": "2024-06-05T09:13:16Z"
|
||||
"creationTimestamp": "2024-06-05T09:13:16Z",
|
||||
"deletionTimestamp": "2024-06-05T14:07:54Z"
|
||||
},
|
||||
"spec": {
|
||||
"description": "Enable the processing of queries and responses in the Elasticsearch data source through backend",
|
||||
|
@ -90,6 +90,10 @@ func TestIndexPattern(t *testing.T) {
|
||||
require.Equal(t, indices[0], "data-2018-moredata-05.15")
|
||||
})
|
||||
|
||||
indexPatternScenario(t, noInterval, "", timeRange, func(indices []string) {
|
||||
require.Len(t, indices, 0)
|
||||
})
|
||||
|
||||
t.Run("Should return 01 week", func(t *testing.T) {
|
||||
from = time.Date(2018, 1, 15, 17, 50, 0, 0, time.UTC)
|
||||
to = time.Date(2018, 1, 15, 17, 55, 0, 0, time.UTC)
|
||||
|
@ -1,267 +0,0 @@
|
||||
import { first as _first, cloneDeep } from 'lodash';
|
||||
import { lastValueFrom, Observable, of, throwError } from 'rxjs';
|
||||
import { catchError, map, tap } from 'rxjs/operators';
|
||||
|
||||
import {
|
||||
DataFrame,
|
||||
DataQueryRequest,
|
||||
DataQueryResponse,
|
||||
dateTime,
|
||||
ensureTimeField,
|
||||
Field,
|
||||
LogRowContextOptions,
|
||||
LogRowContextQueryDirection,
|
||||
LogRowModel,
|
||||
} from '@grafana/data';
|
||||
import { BackendSrvRequest, getBackendSrv, TemplateSrv } from '@grafana/runtime';
|
||||
|
||||
import { ElasticResponse } from './ElasticResponse';
|
||||
import { ElasticDatasource, enhanceDataFrameWithDataLinks } from './datasource';
|
||||
import { defaultBucketAgg, hasMetricOfType } from './queryDef';
|
||||
import { trackQuery } from './tracking';
|
||||
import { DataLinkConfig, ElasticsearchQuery, Logs } from './types';
|
||||
|
||||
export class LegacyQueryRunner {
|
||||
datasource: ElasticDatasource;
|
||||
templateSrv: TemplateSrv;
|
||||
|
||||
constructor(datasource: ElasticDatasource, templateSrv: TemplateSrv) {
|
||||
this.datasource = datasource;
|
||||
this.templateSrv = templateSrv;
|
||||
}
|
||||
|
||||
request(method: string, url: string, data?: BackendSrvRequest['data'], headers?: BackendSrvRequest['headers']) {
|
||||
if (!this.datasource.isProxyAccess) {
|
||||
const error = new Error(
|
||||
'Browser access mode in the Elasticsearch datasource is no longer available. Switch to server access mode.'
|
||||
);
|
||||
return throwError(() => error);
|
||||
}
|
||||
|
||||
const options: BackendSrvRequest = {
|
||||
url: this.datasource.url + '/' + url,
|
||||
method,
|
||||
data,
|
||||
headers,
|
||||
};
|
||||
|
||||
if (method === 'POST') {
|
||||
options.headers = options.headers ?? {};
|
||||
options.headers['Content-Type'] = 'application/x-ndjson';
|
||||
}
|
||||
|
||||
if (this.datasource.basicAuth || this.datasource.withCredentials) {
|
||||
options.withCredentials = true;
|
||||
}
|
||||
if (this.datasource.basicAuth) {
|
||||
options.headers = {
|
||||
Authorization: this.datasource.basicAuth,
|
||||
};
|
||||
}
|
||||
|
||||
return getBackendSrv()
|
||||
.fetch<any>(options)
|
||||
.pipe(
|
||||
map((results) => {
|
||||
results.data.$$config = results.config;
|
||||
return results.data;
|
||||
}),
|
||||
catchError((err) => {
|
||||
if (err.data) {
|
||||
const message = err.data.error?.reason ?? err.data.message ?? 'Unknown error';
|
||||
|
||||
return throwError({
|
||||
message,
|
||||
error: err.data.error,
|
||||
});
|
||||
}
|
||||
|
||||
return throwError(err);
|
||||
})
|
||||
);
|
||||
}
|
||||
|
||||
async logContextQuery(row: LogRowModel, options?: LogRowContextOptions): Promise<{ data: DataFrame[] }> {
|
||||
const sortField = row.dataFrame.fields.find((f) => f.name === 'sort');
|
||||
const searchAfter = sortField?.values[row.rowIndex] || [row.timeEpochMs];
|
||||
const sort = options?.direction === LogRowContextQueryDirection.Forward ? 'asc' : 'desc';
|
||||
|
||||
const header =
|
||||
options?.direction === LogRowContextQueryDirection.Forward
|
||||
? this.datasource.getQueryHeader('query_then_fetch', dateTime(row.timeEpochMs))
|
||||
: this.datasource.getQueryHeader('query_then_fetch', undefined, dateTime(row.timeEpochMs));
|
||||
|
||||
const limit = options?.limit ?? 10;
|
||||
const esQuery = JSON.stringify({
|
||||
size: limit,
|
||||
query: {
|
||||
bool: {
|
||||
filter: [
|
||||
{
|
||||
range: {
|
||||
[this.datasource.timeField]: {
|
||||
[options?.direction === LogRowContextQueryDirection.Forward ? 'gte' : 'lte']: row.timeEpochMs,
|
||||
format: 'epoch_millis',
|
||||
},
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
sort: [{ [this.datasource.timeField]: sort }, { _doc: sort }],
|
||||
search_after: searchAfter,
|
||||
});
|
||||
const payload = [header, esQuery].join('\n') + '\n';
|
||||
const url = this.datasource.getMultiSearchUrl();
|
||||
const response = await lastValueFrom(this.request('POST', url, payload));
|
||||
const targets: ElasticsearchQuery[] = [{ refId: `${row.dataFrame.refId}`, metrics: [{ type: 'logs', id: '1' }] }];
|
||||
const elasticResponse = new ElasticResponse(targets, transformHitsBasedOnDirection(response, sort));
|
||||
const logResponse = elasticResponse.getLogs(this.datasource.logMessageField, this.datasource.logLevelField);
|
||||
const dataFrame = _first(logResponse.data);
|
||||
if (!dataFrame) {
|
||||
return { data: [] };
|
||||
}
|
||||
/**
|
||||
* The LogRowContext requires there is a field in the dataFrame.fields
|
||||
* named `ts` for timestamp and `line` for the actual log line to display.
|
||||
* Unfortunatly these fields are hardcoded and are required for the lines to
|
||||
* be properly displayed. This code just copies the fields based on this.timeField
|
||||
* and this.logMessageField and recreates the dataFrame so it works.
|
||||
*/
|
||||
const timestampField = dataFrame.fields.find((f: Field) => f.name === this.datasource.timeField);
|
||||
const lineField = dataFrame.fields.find((f: Field) => f.name === this.datasource.logMessageField);
|
||||
const otherFields = dataFrame.fields.filter((f: Field) => f !== timestampField && f !== lineField);
|
||||
if (timestampField && lineField) {
|
||||
return {
|
||||
data: [
|
||||
{
|
||||
...dataFrame,
|
||||
fields: [ensureTimeField(timestampField), lineField, ...otherFields],
|
||||
},
|
||||
],
|
||||
};
|
||||
}
|
||||
return logResponse;
|
||||
}
|
||||
|
||||
query(request: DataQueryRequest<ElasticsearchQuery>): Observable<DataQueryResponse> {
|
||||
let payload = '';
|
||||
const targets = this.datasource.interpolateVariablesInQueries(
|
||||
cloneDeep(request.targets),
|
||||
request.scopedVars,
|
||||
request.filters
|
||||
);
|
||||
const sentTargets: ElasticsearchQuery[] = [];
|
||||
let targetsContainsLogsQuery = targets.some((target) => hasMetricOfType(target, 'logs'));
|
||||
|
||||
const logLimits: Array<number | undefined> = [];
|
||||
|
||||
for (const target of targets) {
|
||||
if (target.hide) {
|
||||
continue;
|
||||
}
|
||||
|
||||
let queryObj;
|
||||
if (hasMetricOfType(target, 'logs')) {
|
||||
// FIXME: All this logic here should be in the query builder.
|
||||
// When moving to the BE-only implementation we should remove this and let the BE
|
||||
// Handle this.
|
||||
// TODO: defaultBucketAgg creates a dete_histogram aggregation without a field, so it fallbacks to
|
||||
// the configured timeField. we should allow people to use a different time field here.
|
||||
target.bucketAggs = [defaultBucketAgg()];
|
||||
|
||||
const log = target.metrics?.find((m) => m.type === 'logs') as Logs;
|
||||
const limit = log.settings?.limit ? parseInt(log.settings?.limit, 10) : 500;
|
||||
logLimits.push(limit);
|
||||
|
||||
target.metrics = [];
|
||||
// Setting this for metrics queries that are typed as logs
|
||||
queryObj = this.datasource.queryBuilder.getLogsQuery(target, limit);
|
||||
} else {
|
||||
logLimits.push();
|
||||
if (target.alias) {
|
||||
target.alias = this.datasource.interpolateLuceneQuery(target.alias, request.scopedVars);
|
||||
}
|
||||
|
||||
queryObj = this.datasource.queryBuilder.build(target);
|
||||
}
|
||||
|
||||
const esQuery = JSON.stringify(queryObj);
|
||||
|
||||
const searchType = 'query_then_fetch';
|
||||
const header = this.datasource.getQueryHeader(searchType, request.range.from, request.range.to);
|
||||
payload += header + '\n';
|
||||
|
||||
payload += esQuery + '\n';
|
||||
|
||||
sentTargets.push(target);
|
||||
}
|
||||
|
||||
if (sentTargets.length === 0) {
|
||||
return of({ data: [] });
|
||||
}
|
||||
|
||||
// We replace the range here for actual values. We need to replace it together with enclosing "" so that we replace
|
||||
// it as an integer not as string with digits. This is because elastic will convert the string only if the time
|
||||
// field is specified as type date (which probably should) but can also be specified as integer (millisecond epoch)
|
||||
// and then sending string will error out.
|
||||
payload = payload.replace(/"\$timeFrom"/g, request.range.from.valueOf().toString());
|
||||
payload = payload.replace(/"\$timeTo"/g, request.range.to.valueOf().toString());
|
||||
payload = this.templateSrv.replace(payload, request.scopedVars);
|
||||
|
||||
const url = this.datasource.getMultiSearchUrl();
|
||||
|
||||
const start = new Date();
|
||||
return this.request('POST', url, payload).pipe(
|
||||
map((res) => {
|
||||
const er = new ElasticResponse(sentTargets, res);
|
||||
|
||||
// TODO: This needs to be revisited, it seems wrong to process ALL the sent queries as logs if only one of them was a log query
|
||||
if (targetsContainsLogsQuery) {
|
||||
const response = er.getLogs(this.datasource.logMessageField, this.datasource.logLevelField);
|
||||
|
||||
response.data.forEach((dataFrame, index) => {
|
||||
enhanceDataFrame(dataFrame, this.datasource.dataLinks, logLimits[index]);
|
||||
});
|
||||
return response;
|
||||
}
|
||||
|
||||
return er.getTimeSeries();
|
||||
}),
|
||||
tap((response) => trackQuery(response, request, start))
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
function transformHitsBasedOnDirection(response: any, direction: 'asc' | 'desc') {
|
||||
if (direction === 'desc') {
|
||||
return response;
|
||||
}
|
||||
const actualResponse = response.responses[0];
|
||||
return {
|
||||
...response,
|
||||
responses: [
|
||||
{
|
||||
...actualResponse,
|
||||
hits: {
|
||||
...actualResponse.hits,
|
||||
hits: actualResponse.hits.hits.reverse(),
|
||||
},
|
||||
},
|
||||
],
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Modifies dataFrame and adds dataLinks from the config.
|
||||
* Exported for tests.
|
||||
*/
|
||||
export function enhanceDataFrame(dataFrame: DataFrame, dataLinks: DataLinkConfig[], limit?: number) {
|
||||
if (limit) {
|
||||
dataFrame.meta = {
|
||||
...dataFrame.meta,
|
||||
limit,
|
||||
};
|
||||
}
|
||||
enhanceDataFrameWithDataLinks(dataFrame, dataLinks);
|
||||
}
|
File diff suppressed because it is too large
Load Diff
@ -43,7 +43,6 @@ import {
|
||||
import {
|
||||
DataSourceWithBackend,
|
||||
getDataSourceSrv,
|
||||
config,
|
||||
BackendSrvRequest,
|
||||
TemplateSrv,
|
||||
getTemplateSrv,
|
||||
@ -51,7 +50,6 @@ import {
|
||||
|
||||
import { IndexPattern, intervalMap } from './IndexPattern';
|
||||
import LanguageProvider from './LanguageProvider';
|
||||
import { LegacyQueryRunner } from './LegacyQueryRunner';
|
||||
import { ElasticQueryBuilder } from './QueryBuilder';
|
||||
import { ElasticsearchAnnotationsQueryEditor } from './components/QueryEditor/AnnotationQueryEditor';
|
||||
import { isBucketAggregationWithField } from './components/QueryEditor/BucketAggregationsEditor/aggregations';
|
||||
@ -80,6 +78,9 @@ import {
|
||||
Interval,
|
||||
ElasticsearchAnnotationQuery,
|
||||
RangeMap,
|
||||
isElasticsearchResponseWithAggregations,
|
||||
isElasticsearchResponseWithHits,
|
||||
ElasticsearchHits,
|
||||
} from './types';
|
||||
import { getScriptValue, isSupportedVersion, isTimeSeriesQuery, unsupportedVersionMessage } from './utils';
|
||||
|
||||
@ -127,7 +128,6 @@ export class ElasticDatasource
|
||||
includeFrozen: boolean;
|
||||
isProxyAccess: boolean;
|
||||
databaseVersion: SemVer | null;
|
||||
legacyQueryRunner: LegacyQueryRunner;
|
||||
|
||||
constructor(
|
||||
instanceSettings: DataSourceInstanceSettings<ElasticsearchOptions>,
|
||||
@ -167,7 +167,6 @@ export class ElasticDatasource
|
||||
this.logLevelField = undefined;
|
||||
}
|
||||
this.languageProvider = new LanguageProvider(this);
|
||||
this.legacyQueryRunner = new LegacyQueryRunner(this, this.templateSrv);
|
||||
}
|
||||
|
||||
getResourceRequest(path: string, params?: BackendSrvRequest['params'], options?: Partial<BackendSrvRequest>) {
|
||||
@ -221,11 +220,7 @@ export class ElasticDatasource
|
||||
mergeMap((index) => {
|
||||
// catch all errors and emit an object with an err property to simplify checks later in the pipeline
|
||||
const path = indexUrlList[listLen - index - 1];
|
||||
const requestObservable = config.featureToggles.enableElasticsearchBackendQuerying
|
||||
? from(this.getResource(path))
|
||||
: this.legacyQueryRunner.request('GET', path);
|
||||
|
||||
return requestObservable.pipe(catchError((err) => of({ err })));
|
||||
return from(this.getResource(path)).pipe(catchError((err) => of({ err })));
|
||||
}),
|
||||
skipWhile((resp) => resp?.err?.status === 404), // skip all requests that fail because missing Elastic index
|
||||
throwIfEmpty(() => 'Could not find an available index for this time range.'), // when i === Math.min(listLen, maxTraversals) generate will complete but without emitting any values which means we didn't find a valid index
|
||||
@ -243,17 +238,17 @@ export class ElasticDatasource
|
||||
annotationQuery(options: any): Promise<AnnotationEvent[]> {
|
||||
const payload = this.prepareAnnotationRequest(options);
|
||||
trackAnnotationQuery(options.annotation);
|
||||
const annotationObservable = config.featureToggles.enableElasticsearchBackendQuerying
|
||||
? // TODO: We should migrate this to use query and not resource call
|
||||
// The plan is to look at this when we start to work on raw query editor for ES
|
||||
// as we will have to explore how to handle any query
|
||||
from(this.postResourceRequest('_msearch', payload))
|
||||
: this.legacyQueryRunner.request('POST', '_msearch', payload);
|
||||
|
||||
// TODO: We should migrate this to use query and not resource call
|
||||
// The plan is to look at this when we start to work on raw query editor for ES
|
||||
// as we will have to explore how to handle any query
|
||||
const annotationObservable = from(this.postResourceRequest('_msearch', payload));
|
||||
return lastValueFrom(
|
||||
annotationObservable.pipe(
|
||||
map((res) => {
|
||||
const hits = res.responses[0].hits.hits;
|
||||
map((res: unknown) => {
|
||||
if (!isElasticsearchResponseWithHits(res)) {
|
||||
return [];
|
||||
}
|
||||
const hits = res?.responses[0].hits?.hits ?? [];
|
||||
return this.processHitsToAnnotationEvents(options.annotation, hits);
|
||||
})
|
||||
)
|
||||
@ -353,10 +348,7 @@ export class ElasticDatasource
|
||||
return payload;
|
||||
}
|
||||
|
||||
private processHitsToAnnotationEvents(
|
||||
annotation: ElasticsearchAnnotationQuery,
|
||||
hits: Array<Record<string, string | number | Record<string | number, string | number>>>
|
||||
) {
|
||||
private processHitsToAnnotationEvents(annotation: ElasticsearchAnnotationQuery, hits: ElasticsearchHits) {
|
||||
const timeField = annotation.timeField || '@timestamp';
|
||||
const timeEndField = annotation.timeEndField || null;
|
||||
const textField = annotation.textField || 'tags';
|
||||
@ -526,25 +518,19 @@ export class ElasticDatasource
|
||||
}
|
||||
|
||||
getLogRowContext = async (row: LogRowModel, options?: LogRowContextOptions): Promise<{ data: DataFrame[] }> => {
|
||||
const { enableElasticsearchBackendQuerying } = config.featureToggles;
|
||||
if (enableElasticsearchBackendQuerying) {
|
||||
const contextRequest = this.makeLogContextDataRequest(row, options);
|
||||
|
||||
return lastValueFrom(
|
||||
this.query(contextRequest).pipe(
|
||||
catchError((err) => {
|
||||
const error: DataQueryError = {
|
||||
message: 'Error during context query. Please check JS console logs.',
|
||||
status: err.status,
|
||||
statusText: err.statusText,
|
||||
};
|
||||
throw error;
|
||||
})
|
||||
)
|
||||
);
|
||||
} else {
|
||||
return this.legacyQueryRunner.logContextQuery(row, options);
|
||||
}
|
||||
const contextRequest = this.makeLogContextDataRequest(row, options);
|
||||
return lastValueFrom(
|
||||
this.query(contextRequest).pipe(
|
||||
catchError((err) => {
|
||||
const error: DataQueryError = {
|
||||
message: 'Error during context query. Please check JS console logs.',
|
||||
status: err.status,
|
||||
statusText: err.statusText,
|
||||
};
|
||||
throw error;
|
||||
})
|
||||
)
|
||||
);
|
||||
};
|
||||
|
||||
/**
|
||||
@ -674,20 +660,16 @@ export class ElasticDatasource
|
||||
}
|
||||
|
||||
query(request: DataQueryRequest<ElasticsearchQuery>): Observable<DataQueryResponse> {
|
||||
const { enableElasticsearchBackendQuerying } = config.featureToggles;
|
||||
if (enableElasticsearchBackendQuerying) {
|
||||
const start = new Date();
|
||||
return super.query(request).pipe(
|
||||
tap((response) => trackQuery(response, request, start)),
|
||||
map((response) => {
|
||||
response.data.forEach((dataFrame) => {
|
||||
enhanceDataFrameWithDataLinks(dataFrame, this.dataLinks);
|
||||
});
|
||||
return response;
|
||||
})
|
||||
);
|
||||
}
|
||||
return this.legacyQueryRunner.query(request);
|
||||
const start = new Date();
|
||||
return super.query(request).pipe(
|
||||
tap((response) => trackQuery(response, request, start)),
|
||||
map((response) => {
|
||||
response.data.forEach((dataFrame) => {
|
||||
enhanceDataFrameWithDataLinks(dataFrame, this.dataLinks);
|
||||
});
|
||||
return response;
|
||||
})
|
||||
);
|
||||
}
|
||||
|
||||
filterQuery(query: ElasticsearchQuery): boolean {
|
||||
@ -796,14 +778,12 @@ export class ElasticDatasource
|
||||
|
||||
const url = this.getMultiSearchUrl();
|
||||
|
||||
const termsObservable = config.featureToggles.enableElasticsearchBackendQuerying
|
||||
? // TODO: This is run through resource call, but maybe should run through query
|
||||
from(this.postResourceRequest(url, esQuery))
|
||||
: this.legacyQueryRunner.request('POST', url, esQuery);
|
||||
|
||||
return termsObservable.pipe(
|
||||
map((res) => {
|
||||
if (!res.responses[0].aggregations) {
|
||||
return from(this.postResourceRequest(url, esQuery)).pipe(
|
||||
map((res: unknown) => {
|
||||
if (!isElasticsearchResponseWithAggregations(res)) {
|
||||
return [];
|
||||
}
|
||||
if (!res || !res.responses[0].aggregations) {
|
||||
return [];
|
||||
}
|
||||
|
||||
@ -1028,10 +1008,7 @@ export class ElasticDatasource
|
||||
|
||||
private getDatabaseVersionUncached(): Promise<SemVer | null> {
|
||||
// we want this function to never fail
|
||||
const getDbVersionObservable = config.featureToggles.enableElasticsearchBackendQuerying
|
||||
? from(this.getResourceRequest(''))
|
||||
: this.legacyQueryRunner.request('GET', '/');
|
||||
|
||||
const getDbVersionObservable = from(this.getResourceRequest(''));
|
||||
return lastValueFrom(getDbVersionObservable).then(
|
||||
(data) => {
|
||||
const versionNumber = data?.version?.number;
|
||||
|
@ -1,10 +1,12 @@
|
||||
import { DataSourceInstanceSettings, PluginType } from '@grafana/data';
|
||||
import { CoreApp, DataQueryRequest, DataSourceInstanceSettings, FieldType, PluginType, dateTime } from '@grafana/data';
|
||||
import { TemplateSrv } from '@grafana/runtime';
|
||||
|
||||
import { ElasticDatasource } from './datasource';
|
||||
import { ElasticsearchOptions } from './types';
|
||||
import { ElasticsearchOptions, ElasticsearchQuery } from './types';
|
||||
|
||||
export function createElasticDatasource(settings: Partial<DataSourceInstanceSettings<ElasticsearchOptions>> = {}) {
|
||||
export function createElasticDatasource(
|
||||
settings: Partial<DataSourceInstanceSettings<Partial<ElasticsearchOptions>>> = {}
|
||||
) {
|
||||
const { jsonData, ...rest } = settings;
|
||||
|
||||
const instanceSettings: DataSourceInstanceSettings<ElasticsearchOptions> = {
|
||||
@ -35,7 +37,7 @@ export function createElasticDatasource(settings: Partial<DataSourceInstanceSett
|
||||
type: 'type',
|
||||
uid: 'uid',
|
||||
access: 'proxy',
|
||||
url: '',
|
||||
url: 'http://elasticsearch.local',
|
||||
jsonData: {
|
||||
timeField: '',
|
||||
timeInterval: '',
|
||||
@ -60,3 +62,49 @@ export function createElasticDatasource(settings: Partial<DataSourceInstanceSett
|
||||
|
||||
return new ElasticDatasource(instanceSettings, templateSrv);
|
||||
}
|
||||
|
||||
export const createElasticQuery = (): DataQueryRequest<ElasticsearchQuery> => {
|
||||
return {
|
||||
requestId: '',
|
||||
interval: '',
|
||||
panelId: 0,
|
||||
intervalMs: 1,
|
||||
scopedVars: {},
|
||||
timezone: '',
|
||||
app: CoreApp.Dashboard,
|
||||
startTime: 0,
|
||||
range: {
|
||||
from: dateTime([2015, 4, 30, 10]),
|
||||
to: dateTime([2015, 5, 1, 10]),
|
||||
raw: {
|
||||
from: '',
|
||||
to: '',
|
||||
},
|
||||
},
|
||||
targets: [
|
||||
{
|
||||
refId: 'A',
|
||||
bucketAggs: [{ type: 'date_histogram', field: '@timestamp', id: '2' }],
|
||||
metrics: [{ type: 'count', id: '' }],
|
||||
query: 'test',
|
||||
},
|
||||
],
|
||||
};
|
||||
};
|
||||
|
||||
export const mockResponseFrames = [
|
||||
{
|
||||
schema: {
|
||||
fields: [
|
||||
{ name: '@timestamp', type: FieldType.time },
|
||||
{ name: 'Value', type: FieldType.number },
|
||||
],
|
||||
},
|
||||
data: {
|
||||
values: [
|
||||
[100, 200, 300],
|
||||
[1, 2, 3],
|
||||
],
|
||||
},
|
||||
},
|
||||
];
|
||||
|
@ -139,3 +139,68 @@ export interface ElasticsearchAnnotationQuery {
|
||||
}
|
||||
|
||||
export type RangeMap = Record<string, { from: number; to: number; format: string }>;
|
||||
|
||||
export type ElasticsearchResponse = ElasticsearchResponseWithHits | ElasticsearchResponseWithAggregations;
|
||||
|
||||
export type ElasticsearchResponseWithHits = {
|
||||
responses: Array<{
|
||||
hits: {
|
||||
hits: ElasticsearchHits;
|
||||
};
|
||||
}>;
|
||||
};
|
||||
export type ElasticsearchHits = Array<Record<string, string | number | Record<string | number, string | number>>>;
|
||||
|
||||
export type ElasticsearchResponseWithAggregations = {
|
||||
responses: Array<{
|
||||
aggregations: {
|
||||
[key: string]: {
|
||||
buckets: Array<{
|
||||
key_as_string?: string;
|
||||
key: string;
|
||||
doc_count: number;
|
||||
[key: string]: string | number | undefined;
|
||||
}>;
|
||||
};
|
||||
};
|
||||
}>;
|
||||
};
|
||||
|
||||
export const isElasticsearchResponseWithHits = (res: unknown): res is ElasticsearchResponseWithHits => {
|
||||
return (
|
||||
res &&
|
||||
typeof res === 'object' &&
|
||||
'responses' in res &&
|
||||
Array.isArray(res['responses']) &&
|
||||
res['responses'].find((response: unknown) => {
|
||||
return (
|
||||
typeof response === 'object' &&
|
||||
response !== null &&
|
||||
'hits' in response &&
|
||||
typeof response['hits'] === 'object' &&
|
||||
response['hits'] !== null &&
|
||||
'hits' in response['hits'] &&
|
||||
Array.isArray(response['hits']['hits'])
|
||||
);
|
||||
})
|
||||
);
|
||||
};
|
||||
|
||||
export const isElasticsearchResponseWithAggregations = (res: unknown): res is ElasticsearchResponseWithAggregations => {
|
||||
return (
|
||||
res &&
|
||||
typeof res === 'object' &&
|
||||
'responses' in res &&
|
||||
Array.isArray(res['responses']) &&
|
||||
res['responses'].find((response: unknown) => {
|
||||
return (
|
||||
typeof response === 'object' &&
|
||||
response !== null &&
|
||||
'aggregations' in response &&
|
||||
typeof response['aggregations'] === 'object' &&
|
||||
response['aggregations'] !== null &&
|
||||
Object.keys(response['aggregations']).length > 0
|
||||
);
|
||||
})
|
||||
);
|
||||
};
|
||||
|
Loading…
Reference in New Issue
Block a user