mirror of
https://github.com/grafana/grafana.git
synced 2025-02-25 18:55:37 -06:00
Chore: Upgrades rxjs to 7.3.0 (#37913)
* Chore: upgrades rxjs to 7.3.0 * Chore: replaces toPromise with lastValueFrom * Chore: removes unused mock that caused test error * Chore: fixes build error in PieChartPanel * Chore: fixed build error in requestAllIndices * Chore: fixes comment * Chore: trying to reduce docs errors * Chore: searches for explicit api extractor warnings * Chore: updates rxjs-spy to 8.0.0 * Chore: reverts to main * Chore: upgrade rxjs and rxjs-spy
This commit is contained in:
parent
646d95b8fb
commit
dba15190af
@ -195,7 +195,7 @@
|
||||
"redux-mock-store": "1.5.4",
|
||||
"regexp-replace-loader": "1.0.1",
|
||||
"rimraf": "3.0.1",
|
||||
"rxjs-spy": "^7.5.1",
|
||||
"rxjs-spy": "8.0.0",
|
||||
"sass": "1.27.0",
|
||||
"sass-lint": "1.12.1",
|
||||
"sass-loader": "8.0.2",
|
||||
@ -297,7 +297,7 @@
|
||||
"regenerator-runtime": "0.13.3",
|
||||
"reselect": "4.0.0",
|
||||
"rst2html": "github:thoward/rst2html#990cb89",
|
||||
"rxjs": "6.6.3",
|
||||
"rxjs": "7.3.0",
|
||||
"search-query-parser": "1.5.4",
|
||||
"semver": "^7.1.3",
|
||||
"slate": "0.47.8",
|
||||
|
@ -28,7 +28,7 @@
|
||||
"eventemitter3": "4.0.7",
|
||||
"lodash": "4.17.21",
|
||||
"marked": "2.0.1",
|
||||
"rxjs": "6.6.3",
|
||||
"rxjs": "7.3.0",
|
||||
"xss": "1.0.6"
|
||||
},
|
||||
"devDependencies": {
|
||||
|
@ -4,6 +4,11 @@
|
||||
// but you still might need to select it for testing,
|
||||
// in that case please add the attribute data-test-id={selector} in the component and
|
||||
// prefix your selector string with 'data-test-id' so that when create the selectors we know to search for it on the right attribute
|
||||
/**
|
||||
* Selectors grouped/defined in Components
|
||||
*
|
||||
* @alpha
|
||||
*/
|
||||
export const Components = {
|
||||
TimePicker: {
|
||||
openButton: 'data-testid TimePicker Open Button',
|
||||
|
@ -2,7 +2,19 @@ import { Pages } from './pages';
|
||||
import { Components } from './components';
|
||||
import { E2ESelectors } from '../types';
|
||||
|
||||
/**
|
||||
* Exposes selectors in package for easy use in e2e tests and in production code
|
||||
*
|
||||
* @alpha
|
||||
*/
|
||||
export const selectors: { pages: E2ESelectors<typeof Pages>; components: E2ESelectors<typeof Components> } = {
|
||||
pages: Pages,
|
||||
components: Components,
|
||||
};
|
||||
|
||||
/**
|
||||
* Exposes Pages, Component selectors and E2ESelectors type in package for easy use in e2e tests and in production code
|
||||
*
|
||||
* @alpha
|
||||
*/
|
||||
export { Pages, Components, E2ESelectors };
|
||||
|
@ -1,5 +1,10 @@
|
||||
import { Components } from './components';
|
||||
|
||||
/**
|
||||
* Selectors grouped/defined in Pages
|
||||
*
|
||||
* @alpha
|
||||
*/
|
||||
export const Pages = {
|
||||
Login: {
|
||||
url: '/login',
|
||||
|
@ -1,5 +1,23 @@
|
||||
/**
|
||||
* A string selector
|
||||
*
|
||||
* @alpha
|
||||
*/
|
||||
|
||||
export type StringSelector = string;
|
||||
|
||||
/**
|
||||
* A function selector with an argument
|
||||
*
|
||||
* @alpha
|
||||
*/
|
||||
export type FunctionSelector = (id: string) => string;
|
||||
|
||||
/**
|
||||
* A function selector without argument
|
||||
*
|
||||
* @alpha
|
||||
*/
|
||||
export type CssSelector = () => string;
|
||||
|
||||
/**
|
||||
|
@ -147,7 +147,7 @@ export interface BackendSrv {
|
||||
|
||||
/**
|
||||
* @deprecated Use the fetch function instead. If you prefer to work with a promise
|
||||
* call the toPromise() function on the Observable returned by fetch.
|
||||
* wrap the Observable returned by fetch with the lastValueFrom function.
|
||||
*/
|
||||
request(options: BackendSrvRequest): Promise<any>;
|
||||
|
||||
|
@ -1,4 +1,14 @@
|
||||
import { from, merge, MonoTypeOperatorFunction, Observable, of, Subject, Subscription, throwError } from 'rxjs';
|
||||
import {
|
||||
from,
|
||||
lastValueFrom,
|
||||
merge,
|
||||
MonoTypeOperatorFunction,
|
||||
Observable,
|
||||
of,
|
||||
Subject,
|
||||
Subscription,
|
||||
throwError,
|
||||
} from 'rxjs';
|
||||
import { catchError, filter, map, mergeMap, retryWhen, share, takeUntil, tap, throwIfEmpty } from 'rxjs/operators';
|
||||
import { fromFetch } from 'rxjs/fetch';
|
||||
import { v4 as uuidv4 } from 'uuid';
|
||||
@ -62,9 +72,7 @@ export class BackendSrv implements BackendService {
|
||||
}
|
||||
|
||||
async request<T = any>(options: BackendSrvRequest): Promise<T> {
|
||||
return this.fetch<T>(options)
|
||||
.pipe(map((response: FetchResponse<T>) => response.data))
|
||||
.toPromise();
|
||||
return await lastValueFrom(this.fetch<T>(options).pipe(map((response: FetchResponse<T>) => response.data)));
|
||||
}
|
||||
|
||||
fetch<T>(options: BackendSrvRequest): Observable<FetchResponse<T>> {
|
||||
@ -134,7 +142,7 @@ export class BackendSrv implements BackendService {
|
||||
}
|
||||
|
||||
async datasourceRequest(options: BackendSrvRequest): Promise<any> {
|
||||
return this.fetch(options).toPromise();
|
||||
return lastValueFrom(this.fetch(options));
|
||||
}
|
||||
|
||||
private parseRequestOptions(options: BackendSrvRequest): BackendSrvRequest {
|
||||
|
@ -1,5 +1,7 @@
|
||||
import { lastValueFrom } from 'rxjs';
|
||||
import { urlUtil } from '@grafana/data';
|
||||
import { getBackendSrv } from '@grafana/runtime';
|
||||
|
||||
import {
|
||||
AlertmanagerAlert,
|
||||
AlertManagerCortexConfig,
|
||||
@ -17,13 +19,13 @@ import { getDatasourceAPIId, GRAFANA_RULES_SOURCE_NAME } from '../utils/datasour
|
||||
// "grafana" for grafana-managed, otherwise a datasource name
|
||||
export async function fetchAlertManagerConfig(alertManagerSourceName: string): Promise<AlertManagerCortexConfig> {
|
||||
try {
|
||||
const result = await getBackendSrv()
|
||||
.fetch<AlertManagerCortexConfig>({
|
||||
const result = await lastValueFrom(
|
||||
getBackendSrv().fetch<AlertManagerCortexConfig>({
|
||||
url: `/api/alertmanager/${getDatasourceAPIId(alertManagerSourceName)}/config/api/v1/alerts`,
|
||||
showErrorAlert: false,
|
||||
showSuccessAlert: false,
|
||||
})
|
||||
.toPromise();
|
||||
);
|
||||
return {
|
||||
template_files: result.data.template_files ?? {},
|
||||
alertmanager_config: result.data.alertmanager_config ?? {},
|
||||
@ -47,36 +49,36 @@ export async function updateAlertManagerConfig(
|
||||
alertManagerSourceName: string,
|
||||
config: AlertManagerCortexConfig
|
||||
): Promise<void> {
|
||||
await getBackendSrv()
|
||||
.fetch({
|
||||
await lastValueFrom(
|
||||
getBackendSrv().fetch({
|
||||
method: 'POST',
|
||||
url: `/api/alertmanager/${getDatasourceAPIId(alertManagerSourceName)}/config/api/v1/alerts`,
|
||||
data: config,
|
||||
showErrorAlert: false,
|
||||
showSuccessAlert: false,
|
||||
})
|
||||
.toPromise();
|
||||
);
|
||||
}
|
||||
|
||||
export async function deleteAlertManagerConfig(alertManagerSourceName: string): Promise<void> {
|
||||
await getBackendSrv()
|
||||
.fetch({
|
||||
await lastValueFrom(
|
||||
getBackendSrv().fetch({
|
||||
method: 'DELETE',
|
||||
url: `/api/alertmanager/${getDatasourceAPIId(alertManagerSourceName)}/config/api/v1/alerts`,
|
||||
showErrorAlert: false,
|
||||
showSuccessAlert: false,
|
||||
})
|
||||
.toPromise();
|
||||
);
|
||||
}
|
||||
|
||||
export async function fetchSilences(alertManagerSourceName: string): Promise<Silence[]> {
|
||||
const result = await getBackendSrv()
|
||||
.fetch<Silence[]>({
|
||||
const result = await lastValueFrom(
|
||||
getBackendSrv().fetch<Silence[]>({
|
||||
url: `/api/alertmanager/${getDatasourceAPIId(alertManagerSourceName)}/api/v2/silences`,
|
||||
showErrorAlert: false,
|
||||
showSuccessAlert: false,
|
||||
})
|
||||
.toPromise();
|
||||
);
|
||||
return result.data;
|
||||
}
|
||||
|
||||
@ -85,15 +87,15 @@ export async function createOrUpdateSilence(
|
||||
alertmanagerSourceName: string,
|
||||
payload: SilenceCreatePayload
|
||||
): Promise<Silence> {
|
||||
const result = await getBackendSrv()
|
||||
.fetch<Silence>({
|
||||
const result = await lastValueFrom(
|
||||
getBackendSrv().fetch<Silence>({
|
||||
url: `/api/alertmanager/${getDatasourceAPIId(alertmanagerSourceName)}/api/v2/silences`,
|
||||
data: payload,
|
||||
showErrorAlert: false,
|
||||
showSuccessAlert: false,
|
||||
method: 'POST',
|
||||
})
|
||||
.toPromise();
|
||||
);
|
||||
return result.data;
|
||||
}
|
||||
|
||||
@ -121,39 +123,39 @@ export async function fetchAlerts(
|
||||
)
|
||||
.join('&') || '';
|
||||
|
||||
const result = await getBackendSrv()
|
||||
.fetch<AlertmanagerAlert[]>({
|
||||
const result = await lastValueFrom(
|
||||
getBackendSrv().fetch<AlertmanagerAlert[]>({
|
||||
url:
|
||||
`/api/alertmanager/${getDatasourceAPIId(alertmanagerSourceName)}/api/v2/alerts` +
|
||||
(filters ? '?' + filters : ''),
|
||||
showErrorAlert: false,
|
||||
showSuccessAlert: false,
|
||||
})
|
||||
.toPromise();
|
||||
);
|
||||
|
||||
return result.data;
|
||||
}
|
||||
|
||||
export async function fetchAlertGroups(alertmanagerSourceName: string): Promise<AlertmanagerGroup[]> {
|
||||
const result = await getBackendSrv()
|
||||
.fetch<AlertmanagerGroup[]>({
|
||||
const result = await lastValueFrom(
|
||||
getBackendSrv().fetch<AlertmanagerGroup[]>({
|
||||
url: `/api/alertmanager/${getDatasourceAPIId(alertmanagerSourceName)}/api/v2/alerts/groups`,
|
||||
showErrorAlert: false,
|
||||
showSuccessAlert: false,
|
||||
})
|
||||
.toPromise();
|
||||
);
|
||||
|
||||
return result.data;
|
||||
}
|
||||
|
||||
export async function fetchStatus(alertManagerSourceName: string): Promise<AlertmanagerStatus> {
|
||||
const result = await getBackendSrv()
|
||||
.fetch<AlertmanagerStatus>({
|
||||
const result = await lastValueFrom(
|
||||
getBackendSrv().fetch<AlertmanagerStatus>({
|
||||
url: `/api/alertmanager/${getDatasourceAPIId(alertManagerSourceName)}/api/v2/status`,
|
||||
showErrorAlert: false,
|
||||
showSuccessAlert: false,
|
||||
})
|
||||
.toPromise();
|
||||
);
|
||||
|
||||
return result.data;
|
||||
}
|
||||
@ -162,15 +164,15 @@ export async function testReceivers(alertManagerSourceName: string, receivers: R
|
||||
const data: TestReceiversPayload = {
|
||||
receivers,
|
||||
};
|
||||
const result = await getBackendSrv()
|
||||
.fetch<TestReceiversResult>({
|
||||
const result = await lastValueFrom(
|
||||
getBackendSrv().fetch<TestReceiversResult>({
|
||||
method: 'POST',
|
||||
data,
|
||||
url: `/api/alertmanager/${getDatasourceAPIId(alertManagerSourceName)}/config/api/v1/receivers/test`,
|
||||
showErrorAlert: false,
|
||||
showSuccessAlert: false,
|
||||
})
|
||||
.toPromise();
|
||||
);
|
||||
|
||||
// api returns 207 if one or more receivers has failed test. Collect errors in this case
|
||||
if (result.status === 207) {
|
||||
|
@ -1,22 +1,23 @@
|
||||
import { lastValueFrom } from 'rxjs';
|
||||
import { getBackendSrv } from '@grafana/runtime';
|
||||
|
||||
import { RuleNamespace } from 'app/types/unified-alerting';
|
||||
import { PromRulesResponse } from 'app/types/unified-alerting-dto';
|
||||
import { getDatasourceAPIId } from '../utils/datasource';
|
||||
|
||||
export async function fetchRules(dataSourceName: string): Promise<RuleNamespace[]> {
|
||||
const response = await getBackendSrv()
|
||||
.fetch<PromRulesResponse>({
|
||||
const response = await lastValueFrom(
|
||||
getBackendSrv().fetch<PromRulesResponse>({
|
||||
url: `/api/prometheus/${getDatasourceAPIId(dataSourceName)}/api/v1/rules`,
|
||||
showErrorAlert: false,
|
||||
showSuccessAlert: false,
|
||||
})
|
||||
.toPromise()
|
||||
.catch((e) => {
|
||||
if ('status' in e && e.status === 404) {
|
||||
throw new Error('404 from rule state endpoint. Perhaps ruler API is not enabled?');
|
||||
}
|
||||
throw e;
|
||||
});
|
||||
).catch((e) => {
|
||||
if ('status' in e && e.status === 404) {
|
||||
throw new Error('404 from rule state endpoint. Perhaps ruler API is not enabled?');
|
||||
}
|
||||
throw e;
|
||||
});
|
||||
|
||||
const nsMap: { [key: string]: RuleNamespace } = {};
|
||||
response.data.data.groups.forEach((group) => {
|
||||
|
@ -1,6 +1,8 @@
|
||||
import { lastValueFrom } from 'rxjs';
|
||||
import { getBackendSrv } from '@grafana/runtime';
|
||||
|
||||
import { PostableRulerRuleGroupDTO, RulerRuleGroupDTO, RulerRulesConfigDTO } from 'app/types/unified-alerting-dto';
|
||||
import { getDatasourceAPIId } from '../utils/datasource';
|
||||
import { getBackendSrv } from '@grafana/runtime';
|
||||
import { RULER_NOT_SUPPORTED_MSG } from '../utils/constants';
|
||||
|
||||
// upsert a rule group. use this to update rules
|
||||
@ -9,15 +11,15 @@ export async function setRulerRuleGroup(
|
||||
namespace: string,
|
||||
group: PostableRulerRuleGroupDTO
|
||||
): Promise<void> {
|
||||
await await getBackendSrv()
|
||||
.fetch<unknown>({
|
||||
await lastValueFrom(
|
||||
getBackendSrv().fetch<unknown>({
|
||||
method: 'POST',
|
||||
url: `/api/ruler/${getDatasourceAPIId(dataSourceName)}/api/v1/rules/${encodeURIComponent(namespace)}`,
|
||||
data: group,
|
||||
showErrorAlert: false,
|
||||
showSuccessAlert: false,
|
||||
})
|
||||
.toPromise();
|
||||
);
|
||||
}
|
||||
|
||||
// fetch all ruler rule namespaces and included groups
|
||||
@ -51,8 +53,8 @@ export async function fetchRulerRulesGroup(
|
||||
}
|
||||
|
||||
export async function deleteRulerRulesGroup(dataSourceName: string, namespace: string, groupName: string) {
|
||||
return getBackendSrv()
|
||||
.fetch({
|
||||
return await lastValueFrom(
|
||||
getBackendSrv().fetch({
|
||||
url: `/api/ruler/${getDatasourceAPIId(dataSourceName)}/api/v1/rules/${encodeURIComponent(
|
||||
namespace
|
||||
)}/${encodeURIComponent(groupName)}`,
|
||||
@ -60,19 +62,19 @@ export async function deleteRulerRulesGroup(dataSourceName: string, namespace: s
|
||||
showSuccessAlert: false,
|
||||
showErrorAlert: false,
|
||||
})
|
||||
.toPromise();
|
||||
);
|
||||
}
|
||||
|
||||
// false in case ruler is not supported. this is weird, but we'll work on it
|
||||
async function rulerGetRequest<T>(url: string, empty: T): Promise<T> {
|
||||
try {
|
||||
const response = await getBackendSrv()
|
||||
.fetch<T>({
|
||||
const response = await lastValueFrom(
|
||||
getBackendSrv().fetch<T>({
|
||||
url,
|
||||
showErrorAlert: false,
|
||||
showSuccessAlert: false,
|
||||
})
|
||||
.toPromise();
|
||||
);
|
||||
return response.data;
|
||||
} catch (e) {
|
||||
if (e?.status === 404) {
|
||||
|
@ -1,11 +1,6 @@
|
||||
// Libaries
|
||||
import { cloneDeep, flattenDeep } from 'lodash';
|
||||
// Components
|
||||
import coreModule from 'app/core/core_module';
|
||||
// Utils & Services
|
||||
import { dedupAnnotations } from './events_processing';
|
||||
// Types
|
||||
import { DashboardModel } from '../dashboard/state';
|
||||
import { lastValueFrom, Observable, of } from 'rxjs';
|
||||
import { map, mergeMap } from 'rxjs/operators';
|
||||
import {
|
||||
AnnotationEvent,
|
||||
AppEvents,
|
||||
@ -16,10 +11,12 @@ import {
|
||||
ScopedVars,
|
||||
} from '@grafana/data';
|
||||
import { getBackendSrv, getDataSourceSrv } from '@grafana/runtime';
|
||||
|
||||
import coreModule from 'app/core/core_module';
|
||||
import { dedupAnnotations } from './events_processing';
|
||||
import { DashboardModel } from '../dashboard/state';
|
||||
import { appEvents } from 'app/core/core';
|
||||
import { getTimeSrv } from '../dashboard/services/TimeSrv';
|
||||
import { Observable, of } from 'rxjs';
|
||||
import { map, mergeMap } from 'rxjs/operators';
|
||||
import { AnnotationQueryOptions, AnnotationQueryResponse } from './types';
|
||||
import { standardAnnotationSupport } from './standardAnnotationSupport';
|
||||
import { runRequest } from '../query/state/runRequest';
|
||||
@ -154,11 +151,9 @@ export class AnnotationsSrv {
|
||||
});
|
||||
}
|
||||
// Note: future annotation lifecycle will use observables directly
|
||||
return executeAnnotationQuery(options, datasource, annotation)
|
||||
.toPromise()
|
||||
.then((res) => {
|
||||
return res.events ?? [];
|
||||
});
|
||||
return lastValueFrom(executeAnnotationQuery(options, datasource, annotation)).then((res) => {
|
||||
return res.events ?? [];
|
||||
});
|
||||
})
|
||||
.then((results) => {
|
||||
// store response in annotation object if this is a snapshot call
|
||||
|
@ -1,11 +1,11 @@
|
||||
import React, { PureComponent } from 'react';
|
||||
|
||||
import { lastValueFrom } from 'rxjs';
|
||||
import { css, cx } from '@emotion/css';
|
||||
import { AnnotationEventMappings, AnnotationQuery, DataQuery, DataSourceApi, LoadingState } from '@grafana/data';
|
||||
import { Button, Icon, IconName, Spinner } from '@grafana/ui';
|
||||
|
||||
import { getDashboardSrv } from 'app/features/dashboard/services/DashboardSrv';
|
||||
import { getTimeSrv } from 'app/features/dashboard/services/TimeSrv';
|
||||
import { css, cx } from '@emotion/css';
|
||||
import { standardAnnotationSupport } from '../standardAnnotationSupport';
|
||||
import { executeAnnotationQuery } from '../annotations_srv';
|
||||
import { PanelModel } from 'app/features/dashboard/state';
|
||||
@ -64,15 +64,17 @@ export default class StandardAnnotationQueryEditor extends PureComponent<Props,
|
||||
this.setState({
|
||||
running: true,
|
||||
});
|
||||
const response = await executeAnnotationQuery(
|
||||
{
|
||||
range: getTimeSrv().timeRange(),
|
||||
panel: {} as PanelModel,
|
||||
dashboard,
|
||||
},
|
||||
datasource,
|
||||
annotation
|
||||
).toPromise();
|
||||
const response = await lastValueFrom(
|
||||
executeAnnotationQuery(
|
||||
{
|
||||
range: getTimeSrv().timeRange(),
|
||||
panel: {} as PanelModel,
|
||||
dashboard,
|
||||
},
|
||||
datasource,
|
||||
annotation
|
||||
)
|
||||
);
|
||||
this.setState({
|
||||
running: false,
|
||||
response,
|
||||
|
@ -1,3 +1,4 @@
|
||||
import { lastValueFrom } from 'rxjs';
|
||||
import { DataSourcePluginMeta, DataSourceSettings, locationUtil } from '@grafana/data';
|
||||
import { DataSourceWithBackend, getDataSourceSrv, locationService } from '@grafana/runtime';
|
||||
import { updateNavIndex } from 'app/core/actions';
|
||||
@ -140,13 +141,13 @@ export function loadDataSource(uid: string): ThunkResult<void> {
|
||||
async function getDataSourceUsingUidOrId(uid: string): Promise<DataSourceSettings> {
|
||||
// Try first with uid api
|
||||
try {
|
||||
const byUid = await getBackendSrv()
|
||||
.fetch<DataSourceSettings>({
|
||||
const byUid = await lastValueFrom(
|
||||
getBackendSrv().fetch<DataSourceSettings>({
|
||||
method: 'GET',
|
||||
url: `/api/datasources/uid/${uid}`,
|
||||
showErrorAlert: false,
|
||||
})
|
||||
.toPromise();
|
||||
);
|
||||
|
||||
if (byUid.ok) {
|
||||
return byUid.data;
|
||||
@ -158,13 +159,13 @@ async function getDataSourceUsingUidOrId(uid: string): Promise<DataSourceSetting
|
||||
// try lookup by old db id
|
||||
const id = parseInt(uid, 10);
|
||||
if (!Number.isNaN(id)) {
|
||||
const response = await getBackendSrv()
|
||||
.fetch<DataSourceSettings>({
|
||||
const response = await lastValueFrom(
|
||||
getBackendSrv().fetch<DataSourceSettings>({
|
||||
method: 'GET',
|
||||
url: `/api/datasources/${id}`,
|
||||
showErrorAlert: false,
|
||||
})
|
||||
.toPromise();
|
||||
);
|
||||
|
||||
// Not ideal to do a full page reload here but so tricky to handle this
|
||||
// otherwise We can update the location using react router, but need to
|
||||
|
@ -1,10 +1,5 @@
|
||||
import { DrawStyle, StackingMode } from '@grafana/ui';
|
||||
|
||||
jest.mock('@grafana/data/src/datetime/formatter', () => ({
|
||||
dateTimeFormat: () => 'format() jest mocked',
|
||||
dateTimeFormatTimeAgo: (ts: any) => 'fromNow() jest mocked',
|
||||
}));
|
||||
|
||||
import { lastValueFrom } from 'rxjs';
|
||||
import {
|
||||
ArrayVector,
|
||||
DataFrame,
|
||||
@ -26,6 +21,11 @@ import { describe } from '../../../../test/lib/common';
|
||||
import { ExplorePanelData } from 'app/types';
|
||||
import TableModel from 'app/core/table_model';
|
||||
|
||||
jest.mock('@grafana/data/src/datetime/formatter', () => ({
|
||||
dateTimeFormat: () => 'format() jest mocked',
|
||||
dateTimeFormatTimeAgo: (ts: any) => 'fromNow() jest mocked',
|
||||
}));
|
||||
|
||||
const getTestContext = () => {
|
||||
const timeSeries = toDataFrame({
|
||||
name: 'A-series',
|
||||
@ -184,7 +184,7 @@ describe('decorateWithTableResult', () => {
|
||||
it('should process table type dataFrame', async () => {
|
||||
const { table, emptyTable } = getTestContext();
|
||||
const panelData = createExplorePanelData({ tableFrames: [table, emptyTable] });
|
||||
const panelResult = await decorateWithTableResult(panelData).toPromise();
|
||||
const panelResult = await lastValueFrom(decorateWithTableResult(panelData));
|
||||
|
||||
let theResult = panelResult.tableResult;
|
||||
|
||||
@ -241,7 +241,7 @@ describe('decorateWithTableResult', () => {
|
||||
}),
|
||||
];
|
||||
const panelData = createExplorePanelData({ tableFrames });
|
||||
const panelResult = await decorateWithTableResult(panelData).toPromise();
|
||||
const panelResult = await lastValueFrom(decorateWithTableResult(panelData));
|
||||
const result = panelResult.tableResult;
|
||||
|
||||
expect(result?.fields[0].name).toBe('Time');
|
||||
@ -264,20 +264,20 @@ describe('decorateWithTableResult', () => {
|
||||
tableFrames[0].fields[0].display = displayFunctionMock;
|
||||
|
||||
const panelData = createExplorePanelData({ tableFrames });
|
||||
const panelResult = await decorateWithTableResult(panelData).toPromise();
|
||||
const panelResult = await lastValueFrom(decorateWithTableResult(panelData));
|
||||
expect(panelResult.tableResult?.fields[0].display).toBe(displayFunctionMock);
|
||||
});
|
||||
|
||||
it('should return null when passed empty array', async () => {
|
||||
const panelData = createExplorePanelData({ tableFrames: [] });
|
||||
const panelResult = await decorateWithTableResult(panelData).toPromise();
|
||||
const panelResult = await lastValueFrom(decorateWithTableResult(panelData));
|
||||
expect(panelResult.tableResult).toBeNull();
|
||||
});
|
||||
|
||||
it('returns data if panelData has error', async () => {
|
||||
const { table, emptyTable } = getTestContext();
|
||||
const panelData = createExplorePanelData({ error: {}, tableFrames: [table, emptyTable] });
|
||||
const panelResult = await decorateWithTableResult(panelData).toPromise();
|
||||
const panelResult = await lastValueFrom(decorateWithTableResult(panelData));
|
||||
expect(panelResult.tableResult).not.toBeNull();
|
||||
});
|
||||
});
|
||||
|
@ -1,6 +1,6 @@
|
||||
import { lastValueFrom, Observable, of } from 'rxjs';
|
||||
import { DataQuery, DataQueryResponse, DataSourceApi, DataSourceInstanceSettings } from '@grafana/data';
|
||||
import { BackendSrvRequest, getBackendSrv } from '@grafana/runtime';
|
||||
import { Observable, of } from 'rxjs';
|
||||
|
||||
export type AlertManagerQuery = {
|
||||
query: string;
|
||||
@ -35,7 +35,7 @@ export class AlertManagerDatasource extends DataSourceApi<AlertManagerQuery> {
|
||||
options.headers!.Authorization = this.instanceSettings.basicAuth;
|
||||
}
|
||||
|
||||
return getBackendSrv().fetch<any>(options).toPromise();
|
||||
return lastValueFrom(getBackendSrv().fetch<any>(options));
|
||||
}
|
||||
|
||||
async testDatasource() {
|
||||
|
@ -1,4 +1,4 @@
|
||||
import { Observable, of } from 'rxjs';
|
||||
import { lastValueFrom, Observable, of } from 'rxjs';
|
||||
import { catchError, map } from 'rxjs/operators';
|
||||
import { SelectableValue } from '@grafana/data';
|
||||
import { FetchResponse, getBackendSrv } from '@grafana/runtime';
|
||||
@ -38,33 +38,34 @@ export default class Api {
|
||||
return Promise.resolve(this.cache[path]);
|
||||
}
|
||||
|
||||
return getBackendSrv()
|
||||
.fetch<Record<string, any>>({
|
||||
url: baseUrl + path,
|
||||
method: 'GET',
|
||||
})
|
||||
.pipe(
|
||||
map((response) => {
|
||||
const responsePropName = path.match(/([^\/]*)\/*$/)![1].split('?')[0];
|
||||
let res = [];
|
||||
if (response && response.data && response.data[responsePropName]) {
|
||||
res = response.data[responsePropName].map(responseMap);
|
||||
}
|
||||
|
||||
if (useCache) {
|
||||
this.cache[path] = res;
|
||||
}
|
||||
|
||||
return res;
|
||||
}),
|
||||
catchError((error) => {
|
||||
appEvents.emit(CoreEvents.dsRequestError, {
|
||||
error: { data: { error: formatCloudMonitoringError(error) } },
|
||||
});
|
||||
return of([]);
|
||||
return lastValueFrom(
|
||||
getBackendSrv()
|
||||
.fetch<Record<string, any>>({
|
||||
url: baseUrl + path,
|
||||
method: 'GET',
|
||||
})
|
||||
)
|
||||
.toPromise();
|
||||
.pipe(
|
||||
map((response) => {
|
||||
const responsePropName = path.match(/([^\/]*)\/*$/)![1].split('?')[0];
|
||||
let res = [];
|
||||
if (response && response.data && response.data[responsePropName]) {
|
||||
res = response.data[responsePropName].map(responseMap);
|
||||
}
|
||||
|
||||
if (useCache) {
|
||||
this.cache[path] = res;
|
||||
}
|
||||
|
||||
return res;
|
||||
}),
|
||||
catchError((error) => {
|
||||
appEvents.emit(CoreEvents.dsRequestError, {
|
||||
error: { data: { error: formatCloudMonitoringError(error) } },
|
||||
});
|
||||
return of([]);
|
||||
})
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
post(data: Record<string, any>): Observable<FetchResponse<PostResponse>> {
|
||||
@ -76,11 +77,11 @@ export default class Api {
|
||||
}
|
||||
|
||||
test(projectName: string) {
|
||||
return getBackendSrv()
|
||||
.fetch<any>({
|
||||
return lastValueFrom(
|
||||
getBackendSrv().fetch<any>({
|
||||
url: `${this.baseUrl}${projectName}/metricDescriptors`,
|
||||
method: 'GET',
|
||||
})
|
||||
.toPromise();
|
||||
);
|
||||
}
|
||||
}
|
||||
|
@ -1,21 +1,20 @@
|
||||
import { chunk, flatten, isString } from 'lodash';
|
||||
|
||||
import { from, lastValueFrom, Observable, of, throwError } from 'rxjs';
|
||||
import { catchError, map, mergeMap } from 'rxjs/operators';
|
||||
import {
|
||||
DataQueryRequest,
|
||||
DataQueryResponse,
|
||||
DataSourceInstanceSettings,
|
||||
ScopedVars,
|
||||
SelectableValue,
|
||||
DataQueryResponse,
|
||||
} from '@grafana/data';
|
||||
import { DataSourceWithBackend, toDataQueryResponse } from '@grafana/runtime';
|
||||
|
||||
import { getTemplateSrv, TemplateSrv } from 'app/features/templating/template_srv';
|
||||
import { getTimeSrv, TimeSrv } from 'app/features/dashboard/services/TimeSrv';
|
||||
|
||||
import { CloudMonitoringOptions, CloudMonitoringQuery, Filter, MetricDescriptor, QueryType, EditorMode } from './types';
|
||||
import { CloudMonitoringOptions, CloudMonitoringQuery, EditorMode, Filter, MetricDescriptor, QueryType } from './types';
|
||||
import API from './api';
|
||||
import { DataSourceWithBackend, toDataQueryResponse } from '@grafana/runtime';
|
||||
import { CloudMonitoringVariableSupport } from './variables';
|
||||
import { catchError, map, mergeMap } from 'rxjs/operators';
|
||||
import { from, Observable, of, throwError } from 'rxjs';
|
||||
|
||||
export default class CloudMonitoringDatasource extends DataSourceWithBackend<
|
||||
CloudMonitoringQuery,
|
||||
@ -72,35 +71,36 @@ export default class CloudMonitoringDatasource extends DataSourceWithBackend<
|
||||
},
|
||||
];
|
||||
|
||||
return this.api
|
||||
.post({
|
||||
from: options.range.from.valueOf().toString(),
|
||||
to: options.range.to.valueOf().toString(),
|
||||
queries,
|
||||
})
|
||||
.pipe(
|
||||
map(({ data }) => {
|
||||
const dataQueryResponse = toDataQueryResponse({
|
||||
data: data,
|
||||
});
|
||||
const df: any = [];
|
||||
if (dataQueryResponse.data.length !== 0) {
|
||||
for (let i = 0; i < dataQueryResponse.data.length; i++) {
|
||||
for (let j = 0; j < dataQueryResponse.data[i].fields[0].values.length; j++) {
|
||||
df.push({
|
||||
annotation: annotation,
|
||||
time: Date.parse(dataQueryResponse.data[i].fields[0].values.get(j)),
|
||||
title: dataQueryResponse.data[i].fields[1].values.get(j),
|
||||
tags: [],
|
||||
text: dataQueryResponse.data[i].fields[3].values.get(j),
|
||||
});
|
||||
return lastValueFrom(
|
||||
this.api
|
||||
.post({
|
||||
from: options.range.from.valueOf().toString(),
|
||||
to: options.range.to.valueOf().toString(),
|
||||
queries,
|
||||
})
|
||||
.pipe(
|
||||
map(({ data }) => {
|
||||
const dataQueryResponse = toDataQueryResponse({
|
||||
data: data,
|
||||
});
|
||||
const df: any = [];
|
||||
if (dataQueryResponse.data.length !== 0) {
|
||||
for (let i = 0; i < dataQueryResponse.data.length; i++) {
|
||||
for (let j = 0; j < dataQueryResponse.data[i].fields[0].values.length; j++) {
|
||||
df.push({
|
||||
annotation: annotation,
|
||||
time: Date.parse(dataQueryResponse.data[i].fields[0].values.get(j)),
|
||||
title: dataQueryResponse.data[i].fields[1].values.get(j),
|
||||
tags: [],
|
||||
text: dataQueryResponse.data[i].fields[3].values.get(j),
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return df;
|
||||
})
|
||||
)
|
||||
.toPromise();
|
||||
return df;
|
||||
})
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
applyTemplateVariables(
|
||||
@ -150,11 +150,11 @@ export default class CloudMonitoringDatasource extends DataSourceWithBackend<
|
||||
const queries = options.targets;
|
||||
|
||||
if (!queries.length) {
|
||||
return of({ results: [] }).toPromise();
|
||||
return lastValueFrom(of({ results: [] }));
|
||||
}
|
||||
|
||||
return from(this.ensureGCEDefaultProject())
|
||||
.pipe(
|
||||
return lastValueFrom(
|
||||
from(this.ensureGCEDefaultProject()).pipe(
|
||||
mergeMap(() => {
|
||||
return this.api.post({
|
||||
from: options.range.from.valueOf().toString(),
|
||||
@ -170,7 +170,7 @@ export default class CloudMonitoringDatasource extends DataSourceWithBackend<
|
||||
return result && result.meta ? result.meta.labels : {};
|
||||
})
|
||||
)
|
||||
.toPromise();
|
||||
);
|
||||
}
|
||||
|
||||
async testDatasource() {
|
||||
@ -206,27 +206,28 @@ export default class CloudMonitoringDatasource extends DataSourceWithBackend<
|
||||
}
|
||||
|
||||
async getGCEDefaultProject() {
|
||||
return this.api
|
||||
.post({
|
||||
queries: [
|
||||
{
|
||||
refId: 'getGCEDefaultProject',
|
||||
type: 'getGCEDefaultProject',
|
||||
datasourceId: this.id,
|
||||
},
|
||||
],
|
||||
})
|
||||
.pipe(
|
||||
map(({ data }) => {
|
||||
return data && data.results && data.results.getGCEDefaultProject && data.results.getGCEDefaultProject.meta
|
||||
? data.results.getGCEDefaultProject.meta.defaultProject
|
||||
: '';
|
||||
}),
|
||||
catchError((err) => {
|
||||
return throwError(err.data.error);
|
||||
return lastValueFrom(
|
||||
this.api
|
||||
.post({
|
||||
queries: [
|
||||
{
|
||||
refId: 'getGCEDefaultProject',
|
||||
type: 'getGCEDefaultProject',
|
||||
datasourceId: this.id,
|
||||
},
|
||||
],
|
||||
})
|
||||
)
|
||||
.toPromise();
|
||||
.pipe(
|
||||
map(({ data }) => {
|
||||
return data && data.results && data.results.getGCEDefaultProject && data.results.getGCEDefaultProject.meta
|
||||
? data.results.getGCEDefaultProject.meta.defaultProject
|
||||
: '';
|
||||
}),
|
||||
catchError((err) => {
|
||||
return throwError(err.data.error);
|
||||
})
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
getDefaultProject(): string {
|
||||
|
@ -1,7 +1,7 @@
|
||||
import React from 'react';
|
||||
import angular from 'angular';
|
||||
import { find, isEmpty, isString, set } from 'lodash';
|
||||
import { merge, Observable, of, throwError, zip } from 'rxjs';
|
||||
import { lastValueFrom, merge, Observable, of, throwError, zip } from 'rxjs';
|
||||
import {
|
||||
catchError,
|
||||
concatMap,
|
||||
@ -15,7 +15,7 @@ import {
|
||||
takeWhile,
|
||||
tap,
|
||||
} from 'rxjs/operators';
|
||||
import { getBackendSrv, getGrafanaLiveSrv, toDataQueryResponse, DataSourceWithBackend } from '@grafana/runtime';
|
||||
import { DataSourceWithBackend, getBackendSrv, getGrafanaLiveSrv, toDataQueryResponse } from '@grafana/runtime';
|
||||
import { RowContextOptions } from '@grafana/ui/src/components/Logs/LogRowContextProvider';
|
||||
import {
|
||||
DataFrame,
|
||||
@ -464,14 +464,14 @@ export class CloudWatchDatasource extends DataSourceWithBackend<CloudWatchQuery,
|
||||
}
|
||||
|
||||
async describeLogGroups(params: DescribeLogGroupsRequest): Promise<string[]> {
|
||||
const dataFrames = await this.makeLogActionRequest('DescribeLogGroups', [params]).toPromise();
|
||||
const dataFrames = await lastValueFrom(this.makeLogActionRequest('DescribeLogGroups', [params]));
|
||||
|
||||
const logGroupNames = dataFrames[0]?.fields[0]?.values.toArray() ?? [];
|
||||
return logGroupNames;
|
||||
}
|
||||
|
||||
async getLogGroupFields(params: GetLogGroupFieldsRequest): Promise<GetLogGroupFieldsResponse> {
|
||||
const dataFrames = await this.makeLogActionRequest('GetLogGroupFields', [params]).toPromise();
|
||||
const dataFrames = await lastValueFrom(this.makeLogActionRequest('GetLogGroupFields', [params]));
|
||||
|
||||
const fieldNames = dataFrames[0].fields[0].values.toArray();
|
||||
const fieldPercentages = dataFrames[0].fields[1].values.toArray();
|
||||
@ -516,7 +516,7 @@ export class CloudWatchDatasource extends DataSourceWithBackend<CloudWatchQuery,
|
||||
requestParams.startTime = row.timeEpochMs;
|
||||
}
|
||||
|
||||
const dataFrames = await this.makeLogActionRequest('GetLogEvents', [requestParams]).toPromise();
|
||||
const dataFrames = await lastValueFrom(this.makeLogActionRequest('GetLogEvents', [requestParams]));
|
||||
|
||||
return {
|
||||
data: dataFrames,
|
||||
@ -591,27 +591,27 @@ export class CloudWatchDatasource extends DataSourceWithBackend<CloudWatchQuery,
|
||||
|
||||
doMetricQueryRequest(subtype: string, parameters: any): Promise<Array<{ text: any; label: any; value: any }>> {
|
||||
const range = this.timeSrv.timeRange();
|
||||
return this.awsRequest(DS_QUERY_ENDPOINT, {
|
||||
from: range.from.valueOf().toString(),
|
||||
to: range.to.valueOf().toString(),
|
||||
queries: [
|
||||
{
|
||||
refId: 'metricFindQuery',
|
||||
intervalMs: 1, // dummy
|
||||
maxDataPoints: 1, // dummy
|
||||
datasourceId: this.id,
|
||||
type: 'metricFindQuery',
|
||||
subtype: subtype,
|
||||
...parameters,
|
||||
},
|
||||
],
|
||||
})
|
||||
.pipe(
|
||||
return lastValueFrom(
|
||||
this.awsRequest(DS_QUERY_ENDPOINT, {
|
||||
from: range.from.valueOf().toString(),
|
||||
to: range.to.valueOf().toString(),
|
||||
queries: [
|
||||
{
|
||||
refId: 'metricFindQuery',
|
||||
intervalMs: 1, // dummy
|
||||
maxDataPoints: 1, // dummy
|
||||
datasourceId: this.id,
|
||||
type: 'metricFindQuery',
|
||||
subtype: subtype,
|
||||
...parameters,
|
||||
},
|
||||
],
|
||||
}).pipe(
|
||||
map((r) => {
|
||||
return this.transformSuggestDataFromDataframes(r);
|
||||
})
|
||||
)
|
||||
.toPromise();
|
||||
);
|
||||
}
|
||||
|
||||
makeLogActionRequest(
|
||||
@ -843,19 +843,19 @@ export class CloudWatchDatasource extends DataSourceWithBackend<CloudWatchQuery,
|
||||
alarmNamePrefix: annotation.alarmNamePrefix || '',
|
||||
};
|
||||
|
||||
return this.awsRequest(DS_QUERY_ENDPOINT, {
|
||||
from: options.range.from.valueOf().toString(),
|
||||
to: options.range.to.valueOf().toString(),
|
||||
queries: [
|
||||
{
|
||||
refId: 'annotationQuery',
|
||||
datasourceId: this.id,
|
||||
type: 'annotationQuery',
|
||||
...parameters,
|
||||
},
|
||||
],
|
||||
})
|
||||
.pipe(
|
||||
return lastValueFrom(
|
||||
this.awsRequest(DS_QUERY_ENDPOINT, {
|
||||
from: options.range.from.valueOf().toString(),
|
||||
to: options.range.to.valueOf().toString(),
|
||||
queries: [
|
||||
{
|
||||
refId: 'annotationQuery',
|
||||
datasourceId: this.id,
|
||||
type: 'annotationQuery',
|
||||
...parameters,
|
||||
},
|
||||
],
|
||||
}).pipe(
|
||||
map((r) => {
|
||||
const frames = toDataQueryResponse({ data: r }).data as DataFrame[];
|
||||
const table = toLegacyResponseData(frames[0]) as TableData;
|
||||
@ -868,7 +868,7 @@ export class CloudWatchDatasource extends DataSourceWithBackend<CloudWatchQuery,
|
||||
}));
|
||||
})
|
||||
)
|
||||
.toPromise();
|
||||
);
|
||||
}
|
||||
|
||||
targetContainsTemplate(target: any) {
|
||||
|
@ -1,7 +1,9 @@
|
||||
// Libraries
|
||||
import { sortedUniq } from 'lodash';
|
||||
import { lastValueFrom } from 'rxjs';
|
||||
import Prism, { Grammar } from 'prismjs';
|
||||
import { AbsoluteTimeRange, HistoryItem, LanguageProvider } from '@grafana/data';
|
||||
import { CompletionItemGroup, SearchFunctionType, Token, TypeaheadInput, TypeaheadOutput } from '@grafana/ui';
|
||||
|
||||
// Services & Utils
|
||||
import syntax, {
|
||||
AGGREGATION_FUNCTIONS_STATS,
|
||||
BOOLEAN_FUNCTIONS,
|
||||
@ -12,14 +14,8 @@ import syntax, {
|
||||
QUERY_COMMANDS,
|
||||
STRING_FUNCTIONS,
|
||||
} from './syntax';
|
||||
|
||||
// Types
|
||||
import { CloudWatchQuery, TSDBResponse } from './types';
|
||||
import { AbsoluteTimeRange, HistoryItem, LanguageProvider } from '@grafana/data';
|
||||
|
||||
import { CloudWatchDatasource } from './datasource';
|
||||
import { CompletionItemGroup, SearchFunctionType, Token, TypeaheadInput, TypeaheadOutput } from '@grafana/ui';
|
||||
import Prism, { Grammar } from 'prismjs';
|
||||
|
||||
export type CloudWatchHistoryItem = HistoryItem<CloudWatchQuery>;
|
||||
|
||||
@ -50,7 +46,7 @@ export class CloudWatchLanguageProvider extends LanguageProvider {
|
||||
}
|
||||
|
||||
request = (url: string, params?: any): Promise<TSDBResponse> => {
|
||||
return this.datasource.awsRequest(url, params).toPromise();
|
||||
return lastValueFrom(this.datasource.awsRequest(url, params));
|
||||
};
|
||||
|
||||
start = () => {
|
||||
|
@ -1,4 +1,4 @@
|
||||
import { interval, of, throwError } from 'rxjs';
|
||||
import { interval, lastValueFrom, of, throwError } from 'rxjs';
|
||||
import {
|
||||
DataFrame,
|
||||
DataQueryErrorType,
|
||||
@ -29,12 +29,6 @@ import { CustomVariableModel, initialVariableModelState, VariableHide } from '..
|
||||
import * as rxjsUtils from '../utils/rxjs/increasingInterval';
|
||||
import { createFetchResponse } from 'test/helpers/createFetchResponse';
|
||||
|
||||
jest.mock('rxjs/operators', () => {
|
||||
const operators = jest.requireActual('rxjs/operators');
|
||||
operators.delay = jest.fn(() => (s: any) => s);
|
||||
return operators;
|
||||
});
|
||||
|
||||
jest.mock('@grafana/runtime', () => ({
|
||||
...((jest.requireActual('@grafana/runtime') as unknown) as object),
|
||||
getBackendSrv: () => backendSrv,
|
||||
@ -272,7 +266,9 @@ describe('CloudWatchDatasource', () => {
|
||||
}
|
||||
});
|
||||
|
||||
const myResponse = await ds.logsQuery([{ queryId: 'fake-query-id', region: 'default', refId: 'A' }]).toPromise();
|
||||
const myResponse = await lastValueFrom(
|
||||
ds.logsQuery([{ queryId: 'fake-query-id', region: 'default', refId: 'A' }])
|
||||
);
|
||||
|
||||
const expectedData = [
|
||||
{
|
||||
@ -313,7 +309,9 @@ describe('CloudWatchDatasource', () => {
|
||||
}
|
||||
});
|
||||
|
||||
const myResponse = await ds.logsQuery([{ queryId: 'fake-query-id', region: 'default', refId: 'A' }]).toPromise();
|
||||
const myResponse = await lastValueFrom(
|
||||
ds.logsQuery([{ queryId: 'fake-query-id', region: 'default', refId: 'A' }])
|
||||
);
|
||||
expect(myResponse).toEqual({
|
||||
data: [fakeFrames[fakeFrames.length - 1]],
|
||||
key: 'test-key',
|
||||
@ -336,7 +334,9 @@ describe('CloudWatchDatasource', () => {
|
||||
}
|
||||
});
|
||||
|
||||
const myResponse = await ds.logsQuery([{ queryId: 'fake-query-id', region: 'default', refId: 'A' }]).toPromise();
|
||||
const myResponse = await lastValueFrom(
|
||||
ds.logsQuery([{ queryId: 'fake-query-id', region: 'default', refId: 'A' }])
|
||||
);
|
||||
|
||||
expect(myResponse).toEqual({
|
||||
data: [fakeFrames[2]],
|
||||
|
@ -1,4 +1,8 @@
|
||||
import { cloneDeep, find, isNumber, isObject, isString, first as _first, map as _map } from 'lodash';
|
||||
import { cloneDeep, find, first as _first, isNumber, isObject, isString, map as _map } from 'lodash';
|
||||
import { generate, lastValueFrom, Observable, of, throwError } from 'rxjs';
|
||||
import { catchError, first, map, mergeMap, skipWhile, throwIfEmpty } from 'rxjs/operators';
|
||||
import { gte, lt, satisfies } from 'semver';
|
||||
import { BackendSrvRequest, getBackendSrv, getDataSourceSrv } from '@grafana/runtime';
|
||||
import {
|
||||
DataFrame,
|
||||
DataLink,
|
||||
@ -22,7 +26,6 @@ import { ElasticResponse } from './elastic_response';
|
||||
import { IndexPattern } from './index_pattern';
|
||||
import { ElasticQueryBuilder } from './query_builder';
|
||||
import { defaultBucketAgg, hasMetricOfType } from './query_def';
|
||||
import { BackendSrvRequest, getBackendSrv, getDataSourceSrv } from '@grafana/runtime';
|
||||
import { getTemplateSrv, TemplateSrv } from 'app/features/templating/template_srv';
|
||||
import { DataLinkConfig, ElasticsearchOptions, ElasticsearchQuery } from './types';
|
||||
import { RowContextOptions } from '@grafana/ui/src/components/Logs/LogRowContextProvider';
|
||||
@ -37,10 +40,7 @@ import {
|
||||
BucketAggregation,
|
||||
isBucketAggregationWithField,
|
||||
} from './components/QueryEditor/BucketAggregationsEditor/aggregations';
|
||||
import { generate, Observable, of, throwError } from 'rxjs';
|
||||
import { catchError, first, map, mergeMap, skipWhile, throwIfEmpty } from 'rxjs/operators';
|
||||
import { coerceESVersion, getScriptValue } from './utils';
|
||||
import { gte, lt, satisfies } from 'semver';
|
||||
|
||||
// Those are metadata fields as defined in https://www.elastic.co/guide/en/elasticsearch/reference/current/mapping-fields.html#_identity_metadata_fields.
|
||||
// custom fields can start with underscores, therefore is not safe to exclude anything that starts with one.
|
||||
@ -183,16 +183,16 @@ export class ElasticDatasource extends DataSourceApi<ElasticsearchQuery, Elastic
|
||||
const maxTraversals = 7; // do not go beyond one week (for a daily pattern)
|
||||
const listLen = indexList.length;
|
||||
|
||||
return generate(
|
||||
0,
|
||||
(i) => i < Math.min(listLen, maxTraversals),
|
||||
(i) => i + 1
|
||||
).pipe(
|
||||
return generate({
|
||||
initialState: 0,
|
||||
condition: (i) => i < Math.min(listLen, maxTraversals),
|
||||
iterate: (i) => i + 1,
|
||||
}).pipe(
|
||||
mergeMap((index) => {
|
||||
// catch all errors and emit an object with an err property to simplify checks later in the pipeline
|
||||
return this.request('GET', indexList[listLen - index - 1]).pipe(catchError((err) => of({ err })));
|
||||
}),
|
||||
skipWhile((resp) => resp.err && resp.err.status === 404), // skip all requests that fail because missing Elastic index
|
||||
skipWhile((resp) => resp?.err?.status === 404), // skip all requests that fail because missing Elastic index
|
||||
throwIfEmpty(() => 'Could not find an available index for this time range.'), // when i === Math.min(listLen, maxTraversals) generate will complete but without emitting any values which means we didn't find a valid index
|
||||
first(), // take the first value that isn't skipped
|
||||
map((resp) => {
|
||||
@ -279,8 +279,8 @@ export class ElasticDatasource extends DataSourceApi<ElasticsearchQuery, Elastic
|
||||
|
||||
const payload = JSON.stringify(header) + '\n' + JSON.stringify(data) + '\n';
|
||||
|
||||
return this.post('_msearch', payload)
|
||||
.pipe(
|
||||
return lastValueFrom(
|
||||
this.post('_msearch', payload).pipe(
|
||||
map((res) => {
|
||||
const list = [];
|
||||
const hits = res.responses[0].hits.hits;
|
||||
@ -351,7 +351,7 @@ export class ElasticDatasource extends DataSourceApi<ElasticsearchQuery, Elastic
|
||||
return list;
|
||||
})
|
||||
)
|
||||
.toPromise();
|
||||
);
|
||||
}
|
||||
|
||||
private interpolateLuceneQuery(queryString: string, scopedVars: ScopedVars) {
|
||||
@ -397,8 +397,8 @@ export class ElasticDatasource extends DataSourceApi<ElasticsearchQuery, Elastic
|
||||
|
||||
testDatasource() {
|
||||
// validate that the index exist and has date field
|
||||
return this.getFields(['date'])
|
||||
.pipe(
|
||||
return lastValueFrom(
|
||||
this.getFields(['date']).pipe(
|
||||
mergeMap((dateFields) => {
|
||||
const timeField: any = find(dateFields, { text: this.timeField });
|
||||
if (!timeField) {
|
||||
@ -415,7 +415,7 @@ export class ElasticDatasource extends DataSourceApi<ElasticsearchQuery, Elastic
|
||||
}
|
||||
})
|
||||
)
|
||||
.toPromise();
|
||||
);
|
||||
}
|
||||
|
||||
getQueryHeader(searchType: any, timeFrom?: DateTime, timeTo?: DateTime): string {
|
||||
@ -524,7 +524,7 @@ export class ElasticDatasource extends DataSourceApi<ElasticsearchQuery, Elastic
|
||||
});
|
||||
const payload = [header, esQuery].join('\n') + '\n';
|
||||
const url = this.getMultiSearchUrl();
|
||||
const response = await this.post(url, payload).toPromise();
|
||||
const response = await lastValueFrom(this.post(url, payload));
|
||||
const targets: ElasticsearchQuery[] = [{ refId: `${row.dataFrame.refId}`, metrics: [{ type: 'logs', id: '1' }] }];
|
||||
const elasticResponse = new ElasticResponse(targets, transformHitsBasedOnDirection(response, sort));
|
||||
const logResponse = elasticResponse.getLogs(this.logMessageField, this.logLevelField);
|
||||
@ -782,13 +782,13 @@ export class ElasticDatasource extends DataSourceApi<ElasticsearchQuery, Elastic
|
||||
if (query) {
|
||||
if (parsedQuery.find === 'fields') {
|
||||
parsedQuery.type = this.templateSrv.replace(parsedQuery.type, {}, 'lucene');
|
||||
return this.getFields(parsedQuery.type, range).toPromise();
|
||||
return lastValueFrom(this.getFields(parsedQuery.type, range));
|
||||
}
|
||||
|
||||
if (parsedQuery.find === 'terms') {
|
||||
parsedQuery.field = this.templateSrv.replace(parsedQuery.field, {}, 'lucene');
|
||||
parsedQuery.query = this.templateSrv.replace(parsedQuery.query || '*', {}, 'lucene');
|
||||
return this.getTerms(parsedQuery, range).toPromise();
|
||||
return lastValueFrom(this.getTerms(parsedQuery, range));
|
||||
}
|
||||
}
|
||||
|
||||
@ -796,11 +796,11 @@ export class ElasticDatasource extends DataSourceApi<ElasticsearchQuery, Elastic
|
||||
}
|
||||
|
||||
getTagKeys() {
|
||||
return this.getFields().toPromise();
|
||||
return lastValueFrom(this.getFields());
|
||||
}
|
||||
|
||||
getTagValues(options: any) {
|
||||
return this.getTerms({ field: options.key, query: '*' }).toPromise();
|
||||
return lastValueFrom(this.getTerms({ field: options.key, query: '*' }));
|
||||
}
|
||||
|
||||
targetContainsTemplate(target: any) {
|
||||
|
@ -1,3 +1,4 @@
|
||||
import { lastValueFrom } from 'rxjs';
|
||||
import { MetricFindValue, SelectableValue } from '@grafana/data';
|
||||
import {
|
||||
BucketAggregationType,
|
||||
@ -62,7 +63,7 @@ export const useFields = (type: AggregationType | string[]) => {
|
||||
return async (q?: string) => {
|
||||
// _mapping doesn't support filtering, we avoid sending a request everytime q changes
|
||||
if (!rawFields) {
|
||||
rawFields = await datasource.getFields(filter, range).toPromise();
|
||||
rawFields = await lastValueFrom(datasource.getFields(filter, range));
|
||||
}
|
||||
|
||||
return rawFields.filter(({ text }) => q === undefined || text.includes(q)).map(toSelectableValue);
|
||||
|
@ -1,9 +1,10 @@
|
||||
import { lastValueFrom, of } from 'rxjs';
|
||||
import { DataFrame, getFrameDisplayName, toUtc } from '@grafana/data';
|
||||
import { setBackendSrv } from '@grafana/runtime';
|
||||
|
||||
import { TemplateSrv } from 'app/features/templating/template_srv';
|
||||
import { backendSrv } from 'app/core/services/backend_srv';
|
||||
import { setBackendSrv } from '@grafana/runtime';
|
||||
import AppInsightsDatasource from './app_insights_datasource';
|
||||
import { of } from 'rxjs';
|
||||
|
||||
const templateSrv = new TemplateSrv();
|
||||
|
||||
@ -174,17 +175,14 @@ describe('AppInsightsDatasource', () => {
|
||||
});
|
||||
|
||||
it('should return a list of datapoints', () => {
|
||||
return ctx.ds
|
||||
.query(options)
|
||||
.toPromise()
|
||||
.then((results: any) => {
|
||||
expect(results.data.length).toBe(1);
|
||||
const data = results.data[0] as DataFrame;
|
||||
expect(getFrameDisplayName(data)).toEqual('PrimaryResult');
|
||||
expect(data.fields[0].values.length).toEqual(1);
|
||||
expect(data.fields[0].values.get(0)).toEqual(1558278660000);
|
||||
expect(data.fields[1].values.get(0)).toEqual(2.2075);
|
||||
});
|
||||
return lastValueFrom(ctx.ds.query(options)).then((results: any) => {
|
||||
expect(results.data.length).toBe(1);
|
||||
const data = results.data[0] as DataFrame;
|
||||
expect(getFrameDisplayName(data)).toEqual('PrimaryResult');
|
||||
expect(data.fields[0].values.length).toEqual(1);
|
||||
expect(data.fields[0].values.get(0)).toEqual(1558278660000);
|
||||
expect(data.fields[1].values.get(0)).toEqual(2.2075);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
@ -216,17 +214,14 @@ describe('AppInsightsDatasource', () => {
|
||||
});
|
||||
|
||||
it('should return a list of datapoints', () => {
|
||||
return ctx.ds
|
||||
.query(options)
|
||||
.toPromise()
|
||||
.then((results: any) => {
|
||||
expect(results.data.length).toBe(1);
|
||||
const data = results.data[0] as DataFrame;
|
||||
expect(getFrameDisplayName(data)).toEqual('paritionA');
|
||||
expect(data.fields[0].values.length).toEqual(1);
|
||||
expect(data.fields[0].values.get(0)).toEqual(1558278660000);
|
||||
expect(data.fields[1].values.get(0)).toEqual(2.2075);
|
||||
});
|
||||
return lastValueFrom(ctx.ds.query(options)).then((results: any) => {
|
||||
expect(results.data.length).toBe(1);
|
||||
const data = results.data[0] as DataFrame;
|
||||
expect(getFrameDisplayName(data)).toEqual('paritionA');
|
||||
expect(data.fields[0].values.length).toEqual(1);
|
||||
expect(data.fields[0].values.get(0)).toEqual(1558278660000);
|
||||
expect(data.fields[1].values.get(0)).toEqual(2.2075);
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
@ -280,16 +275,13 @@ describe('AppInsightsDatasource', () => {
|
||||
});
|
||||
|
||||
it('should return a single datapoint', () => {
|
||||
return ctx.ds
|
||||
.query(options)
|
||||
.toPromise()
|
||||
.then((results: any) => {
|
||||
expect(results.data.length).toBe(1);
|
||||
const data = results.data[0] as DataFrame;
|
||||
expect(getFrameDisplayName(data)).toEqual('exceptions/server');
|
||||
expect(data.fields[0].values.get(0)).toEqual(1558278660000);
|
||||
expect(data.fields[1].values.get(0)).toEqual(2.2075);
|
||||
});
|
||||
return lastValueFrom(ctx.ds.query(options)).then((results: any) => {
|
||||
expect(results.data.length).toBe(1);
|
||||
const data = results.data[0] as DataFrame;
|
||||
expect(getFrameDisplayName(data)).toEqual('exceptions/server');
|
||||
expect(data.fields[0].values.get(0)).toEqual(1558278660000);
|
||||
expect(data.fields[1].values.get(0)).toEqual(2.2075);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
@ -326,19 +318,16 @@ describe('AppInsightsDatasource', () => {
|
||||
});
|
||||
|
||||
it('should return a list of datapoints', () => {
|
||||
return ctx.ds
|
||||
.query(options)
|
||||
.toPromise()
|
||||
.then((results: any) => {
|
||||
expect(results.data.length).toBe(1);
|
||||
const data = results.data[0] as DataFrame;
|
||||
expect(getFrameDisplayName(data)).toEqual('exceptions/server');
|
||||
expect(data.fields[0].values.length).toEqual(2);
|
||||
expect(data.fields[0].values.get(0)).toEqual(1504108800000);
|
||||
expect(data.fields[1].values.get(0)).toEqual(3);
|
||||
expect(data.fields[0].values.get(1)).toEqual(1504112400000);
|
||||
expect(data.fields[1].values.get(1)).toEqual(6);
|
||||
});
|
||||
return lastValueFrom(ctx.ds.query(options)).then((results: any) => {
|
||||
expect(results.data.length).toBe(1);
|
||||
const data = results.data[0] as DataFrame;
|
||||
expect(getFrameDisplayName(data)).toEqual('exceptions/server');
|
||||
expect(data.fields[0].values.length).toEqual(2);
|
||||
expect(data.fields[0].values.get(0)).toEqual(1504108800000);
|
||||
expect(data.fields[1].values.get(0)).toEqual(3);
|
||||
expect(data.fields[0].values.get(1)).toEqual(1504112400000);
|
||||
expect(data.fields[1].values.get(1)).toEqual(6);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
@ -383,26 +372,23 @@ describe('AppInsightsDatasource', () => {
|
||||
});
|
||||
|
||||
it('should return a list of datapoints', () => {
|
||||
return ctx.ds
|
||||
.query(options)
|
||||
.toPromise()
|
||||
.then((results: any) => {
|
||||
expect(results.data.length).toBe(2);
|
||||
let data = results.data[0] as DataFrame;
|
||||
expect(getFrameDisplayName(data)).toEqual('exceptions/server{client/city="Miami"}');
|
||||
expect(data.fields[1].values.length).toEqual(2);
|
||||
expect(data.fields[0].values.get(0)).toEqual(1504108800000);
|
||||
expect(data.fields[1].values.get(0)).toEqual(10);
|
||||
expect(data.fields[0].values.get(1)).toEqual(1504112400000);
|
||||
expect(data.fields[1].values.get(1)).toEqual(20);
|
||||
data = results.data[1] as DataFrame;
|
||||
expect(getFrameDisplayName(data)).toEqual('exceptions/server{client/city="San Antonio"}');
|
||||
expect(data.fields[1].values.length).toEqual(2);
|
||||
expect(data.fields[0].values.get(0)).toEqual(1504108800000);
|
||||
expect(data.fields[1].values.get(0)).toEqual(1);
|
||||
expect(data.fields[0].values.get(1)).toEqual(1504112400000);
|
||||
expect(data.fields[1].values.get(1)).toEqual(2);
|
||||
});
|
||||
return lastValueFrom(ctx.ds.query(options)).then((results: any) => {
|
||||
expect(results.data.length).toBe(2);
|
||||
let data = results.data[0] as DataFrame;
|
||||
expect(getFrameDisplayName(data)).toEqual('exceptions/server{client/city="Miami"}');
|
||||
expect(data.fields[1].values.length).toEqual(2);
|
||||
expect(data.fields[0].values.get(0)).toEqual(1504108800000);
|
||||
expect(data.fields[1].values.get(0)).toEqual(10);
|
||||
expect(data.fields[0].values.get(1)).toEqual(1504112400000);
|
||||
expect(data.fields[1].values.get(1)).toEqual(20);
|
||||
data = results.data[1] as DataFrame;
|
||||
expect(getFrameDisplayName(data)).toEqual('exceptions/server{client/city="San Antonio"}');
|
||||
expect(data.fields[1].values.length).toEqual(2);
|
||||
expect(data.fields[0].values.get(0)).toEqual(1504108800000);
|
||||
expect(data.fields[1].values.get(0)).toEqual(1);
|
||||
expect(data.fields[0].values.get(1)).toEqual(1504112400000);
|
||||
expect(data.fields[1].values.get(1)).toEqual(2);
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
|
@ -1,4 +1,7 @@
|
||||
import { each, indexOf, isArray, isString, map as _map } from 'lodash';
|
||||
import { lastValueFrom, Observable, of, OperatorFunction, pipe, throwError } from 'rxjs';
|
||||
import { catchError, map } from 'rxjs/operators';
|
||||
import { getBackendSrv } from '@grafana/runtime';
|
||||
import {
|
||||
DataFrame,
|
||||
DataQueryRequest,
|
||||
@ -11,9 +14,9 @@ import {
|
||||
TimeRange,
|
||||
toDataFrame,
|
||||
} from '@grafana/data';
|
||||
|
||||
import { isVersionGtOrEq, SemVersion } from 'app/core/utils/version';
|
||||
import gfunc, { FuncDefs, FuncInstance } from './gfunc';
|
||||
import { getBackendSrv } from '@grafana/runtime';
|
||||
import { getTemplateSrv, TemplateSrv } from 'app/features/templating/template_srv';
|
||||
// Types
|
||||
import {
|
||||
@ -26,8 +29,6 @@ import {
|
||||
} from './types';
|
||||
import { getRollupNotice, getRuntimeConsolidationNotice } from 'app/plugins/datasource/graphite/meta';
|
||||
import { getSearchFilterScopedVar } from '../../../features/variables/utils';
|
||||
import { Observable, of, OperatorFunction, pipe, throwError } from 'rxjs';
|
||||
import { catchError, map } from 'rxjs/operators';
|
||||
import { DEFAULT_GRAPHITE_VERSION } from './versions';
|
||||
import { reduceError } from './utils';
|
||||
|
||||
@ -250,8 +251,8 @@ export class GraphiteDatasource extends DataSourceApi<
|
||||
maxDataPoints: 100,
|
||||
} as unknown) as DataQueryRequest<GraphiteQuery>;
|
||||
|
||||
return this.query(graphiteQuery)
|
||||
.pipe(
|
||||
return lastValueFrom(
|
||||
this.query(graphiteQuery).pipe(
|
||||
map((result: any) => {
|
||||
const list = [];
|
||||
|
||||
@ -277,7 +278,7 @@ export class GraphiteDatasource extends DataSourceApi<
|
||||
return list;
|
||||
})
|
||||
)
|
||||
.toPromise();
|
||||
);
|
||||
} else {
|
||||
// Graphite event as annotation
|
||||
const tags = this.templateSrv.replace(options.annotation.tags);
|
||||
@ -315,15 +316,17 @@ export class GraphiteDatasource extends DataSourceApi<
|
||||
if (options.tags) {
|
||||
tags = '&tags=' + options.tags;
|
||||
}
|
||||
return this.doGraphiteRequest({
|
||||
method: 'GET',
|
||||
url:
|
||||
'/events/get_data?from=' +
|
||||
this.translateTime(options.range.raw.from, false, options.timezone) +
|
||||
'&until=' +
|
||||
this.translateTime(options.range.raw.to, true, options.timezone) +
|
||||
tags,
|
||||
}).toPromise();
|
||||
return lastValueFrom(
|
||||
this.doGraphiteRequest({
|
||||
method: 'GET',
|
||||
url:
|
||||
'/events/get_data?from=' +
|
||||
this.translateTime(options.range.raw.from, false, options.timezone) +
|
||||
'&until=' +
|
||||
this.translateTime(options.range.raw.to, true, options.timezone) +
|
||||
tags,
|
||||
})
|
||||
);
|
||||
} catch (err) {
|
||||
return Promise.reject(err);
|
||||
}
|
||||
@ -444,8 +447,8 @@ export class GraphiteDatasource extends DataSourceApi<
|
||||
httpOptions.params.until = range.until;
|
||||
}
|
||||
|
||||
return this.doGraphiteRequest(httpOptions)
|
||||
.pipe(
|
||||
return lastValueFrom(
|
||||
this.doGraphiteRequest(httpOptions).pipe(
|
||||
map((results: any) => {
|
||||
return _map(results.data, (metric) => {
|
||||
return {
|
||||
@ -455,7 +458,7 @@ export class GraphiteDatasource extends DataSourceApi<
|
||||
});
|
||||
})
|
||||
)
|
||||
.toPromise();
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
@ -484,8 +487,8 @@ export class GraphiteDatasource extends DataSourceApi<
|
||||
httpOptions.params.until = range.until;
|
||||
}
|
||||
|
||||
return this.doGraphiteRequest(httpOptions)
|
||||
.pipe(
|
||||
return lastValueFrom(
|
||||
this.doGraphiteRequest(httpOptions).pipe(
|
||||
map((results: any) => {
|
||||
return _map(results.data.results, (metric) => {
|
||||
return {
|
||||
@ -495,7 +498,7 @@ export class GraphiteDatasource extends DataSourceApi<
|
||||
});
|
||||
})
|
||||
)
|
||||
.toPromise();
|
||||
);
|
||||
}
|
||||
|
||||
getTags(optionalOptions: any) {
|
||||
@ -513,8 +516,8 @@ export class GraphiteDatasource extends DataSourceApi<
|
||||
httpOptions.params.until = this.translateTime(options.range.to, true, options.timezone);
|
||||
}
|
||||
|
||||
return this.doGraphiteRequest(httpOptions)
|
||||
.pipe(
|
||||
return lastValueFrom(
|
||||
this.doGraphiteRequest(httpOptions).pipe(
|
||||
map((results: any) => {
|
||||
return _map(results.data, (tag) => {
|
||||
return {
|
||||
@ -524,7 +527,7 @@ export class GraphiteDatasource extends DataSourceApi<
|
||||
});
|
||||
})
|
||||
)
|
||||
.toPromise();
|
||||
);
|
||||
}
|
||||
|
||||
getTagValues(options: any = {}) {
|
||||
@ -540,8 +543,8 @@ export class GraphiteDatasource extends DataSourceApi<
|
||||
httpOptions.params.until = this.translateTime(options.range.to, true, options.timezone);
|
||||
}
|
||||
|
||||
return this.doGraphiteRequest(httpOptions)
|
||||
.pipe(
|
||||
return lastValueFrom(
|
||||
this.doGraphiteRequest(httpOptions).pipe(
|
||||
map((results: any) => {
|
||||
if (results.data && results.data.values) {
|
||||
return _map(results.data.values, (value) => {
|
||||
@ -555,7 +558,7 @@ export class GraphiteDatasource extends DataSourceApi<
|
||||
}
|
||||
})
|
||||
)
|
||||
.toPromise();
|
||||
);
|
||||
}
|
||||
|
||||
getTagsAutoComplete(expressions: any[], tagPrefix: any, optionalOptions?: any) {
|
||||
@ -581,7 +584,7 @@ export class GraphiteDatasource extends DataSourceApi<
|
||||
httpOptions.params.from = this.translateTime(options.range.from, false, options.timezone);
|
||||
httpOptions.params.until = this.translateTime(options.range.to, true, options.timezone);
|
||||
}
|
||||
return this.doGraphiteRequest(httpOptions).pipe(mapToTags()).toPromise();
|
||||
return lastValueFrom(this.doGraphiteRequest(httpOptions).pipe(mapToTags()));
|
||||
}
|
||||
|
||||
getTagValuesAutoComplete(expressions: any[], tag: any, valuePrefix: any, optionalOptions: any) {
|
||||
@ -608,7 +611,7 @@ export class GraphiteDatasource extends DataSourceApi<
|
||||
httpOptions.params.from = this.translateTime(options.range.from, false, options.timezone);
|
||||
httpOptions.params.until = this.translateTime(options.range.to, true, options.timezone);
|
||||
}
|
||||
return this.doGraphiteRequest(httpOptions).pipe(mapToTags()).toPromise();
|
||||
return lastValueFrom(this.doGraphiteRequest(httpOptions).pipe(mapToTags()));
|
||||
}
|
||||
|
||||
getVersion(optionalOptions: any) {
|
||||
@ -620,8 +623,8 @@ export class GraphiteDatasource extends DataSourceApi<
|
||||
requestId: options.requestId,
|
||||
};
|
||||
|
||||
return this.doGraphiteRequest(httpOptions)
|
||||
.pipe(
|
||||
return lastValueFrom(
|
||||
this.doGraphiteRequest(httpOptions).pipe(
|
||||
map((results: any) => {
|
||||
if (results.data) {
|
||||
const semver = new SemVersion(results.data);
|
||||
@ -633,7 +636,7 @@ export class GraphiteDatasource extends DataSourceApi<
|
||||
return of('');
|
||||
})
|
||||
)
|
||||
.toPromise();
|
||||
);
|
||||
}
|
||||
|
||||
createFuncInstance(funcDef: any, options?: any): FuncInstance {
|
||||
@ -664,8 +667,8 @@ export class GraphiteDatasource extends DataSourceApi<
|
||||
url: '/functions',
|
||||
};
|
||||
|
||||
return this.doGraphiteRequest(httpOptions)
|
||||
.pipe(
|
||||
return lastValueFrom(
|
||||
this.doGraphiteRequest(httpOptions).pipe(
|
||||
map((results: any) => {
|
||||
if (results.status !== 200 || typeof results.data !== 'object') {
|
||||
if (typeof results.data === 'string') {
|
||||
@ -690,7 +693,7 @@ export class GraphiteDatasource extends DataSourceApi<
|
||||
return of(this.funcDefs);
|
||||
})
|
||||
)
|
||||
.toPromise();
|
||||
);
|
||||
}
|
||||
|
||||
testDatasource() {
|
||||
@ -704,9 +707,7 @@ export class GraphiteDatasource extends DataSourceApi<
|
||||
maxDataPoints: 300,
|
||||
} as unknown) as DataQueryRequest<GraphiteQuery>;
|
||||
|
||||
return this.query(query)
|
||||
.toPromise()
|
||||
.then(() => ({ status: 'success', message: 'Data source is working' }));
|
||||
return lastValueFrom(this.query(query)).then(() => ({ status: 'success', message: 'Data source is working' }));
|
||||
}
|
||||
|
||||
doGraphiteRequest(options: {
|
||||
|
@ -1,36 +1,36 @@
|
||||
import { cloneDeep, map as _map, reduce, get, has, extend, omit, pick, isString } from 'lodash';
|
||||
|
||||
import { cloneDeep, extend, get, has, isString, map as _map, omit, pick, reduce } from 'lodash';
|
||||
import { lastValueFrom, Observable, of, throwError } from 'rxjs';
|
||||
import { catchError, map } from 'rxjs/operators';
|
||||
import { v4 as uuidv4 } from 'uuid';
|
||||
import { DataSourceWithBackend, frameToMetricFindValue, getBackendSrv } from '@grafana/runtime';
|
||||
import {
|
||||
dateMath,
|
||||
DataSourceInstanceSettings,
|
||||
ScopedVars,
|
||||
AnnotationEvent,
|
||||
AnnotationQueryRequest,
|
||||
ArrayVector,
|
||||
DataFrame,
|
||||
DataQueryError,
|
||||
DataQueryRequest,
|
||||
DataQueryResponse,
|
||||
DataSourceInstanceSettings,
|
||||
dateMath,
|
||||
dateTime,
|
||||
FieldType,
|
||||
LoadingState,
|
||||
QueryResultMeta,
|
||||
MetricFindValue,
|
||||
AnnotationQueryRequest,
|
||||
AnnotationEvent,
|
||||
DataQueryError,
|
||||
DataFrame,
|
||||
TimeSeries,
|
||||
QueryResultMeta,
|
||||
ScopedVars,
|
||||
TIME_SERIES_TIME_FIELD_NAME,
|
||||
TIME_SERIES_VALUE_FIELD_NAME,
|
||||
FieldType,
|
||||
ArrayVector,
|
||||
TimeSeries,
|
||||
} from '@grafana/data';
|
||||
import { v4 as uuidv4 } from 'uuid';
|
||||
|
||||
import InfluxSeries from './influx_series';
|
||||
import InfluxQueryModel from './influx_query_model';
|
||||
import ResponseParser from './response_parser';
|
||||
import { InfluxQueryBuilder } from './query_builder';
|
||||
import { InfluxQuery, InfluxOptions, InfluxVersion } from './types';
|
||||
import { InfluxOptions, InfluxQuery, InfluxVersion } from './types';
|
||||
import { getTemplateSrv, TemplateSrv } from 'app/features/templating/template_srv';
|
||||
import { getBackendSrv, DataSourceWithBackend, frameToMetricFindValue } from '@grafana/runtime';
|
||||
import { Observable, throwError, of } from 'rxjs';
|
||||
import { FluxQueryEditor } from './components/FluxQueryEditor';
|
||||
import { catchError, map } from 'rxjs/operators';
|
||||
import { buildRawQuery } from './queryUtils';
|
||||
|
||||
// we detect the field type based on the value-array
|
||||
@ -310,17 +310,15 @@ export default class InfluxDatasource extends DataSourceWithBackend<InfluxQuery,
|
||||
let query = options.annotation.query.replace('$timeFilter', timeFilter);
|
||||
query = this.templateSrv.replace(query, undefined, 'regex');
|
||||
|
||||
return this._seriesQuery(query, options)
|
||||
.toPromise()
|
||||
.then((data: any) => {
|
||||
if (!data || !data.results || !data.results[0]) {
|
||||
throw { message: 'No results in response from InfluxDB' };
|
||||
}
|
||||
return new InfluxSeries({
|
||||
series: data.results[0].series,
|
||||
annotation: options.annotation,
|
||||
}).getAnnotations();
|
||||
});
|
||||
return lastValueFrom(this._seriesQuery(query, options)).then((data: any) => {
|
||||
if (!data || !data.results || !data.results[0]) {
|
||||
throw { message: 'No results in response from InfluxDB' };
|
||||
}
|
||||
return new InfluxSeries({
|
||||
series: data.results[0].series,
|
||||
annotation: options.annotation,
|
||||
}).getAnnotations();
|
||||
});
|
||||
}
|
||||
|
||||
targetContainsTemplate(target: any) {
|
||||
@ -370,27 +368,24 @@ export default class InfluxDatasource extends DataSourceWithBackend<InfluxQuery,
|
||||
refId: 'metricFindQuery',
|
||||
query,
|
||||
};
|
||||
return super
|
||||
.query({
|
||||
return lastValueFrom(
|
||||
super.query({
|
||||
...options, // includes 'range'
|
||||
targets: [target],
|
||||
} as DataQueryRequest)
|
||||
.toPromise()
|
||||
.then((rsp) => {
|
||||
if (rsp.data?.length) {
|
||||
return frameToMetricFindValue(rsp.data[0]);
|
||||
}
|
||||
return [];
|
||||
});
|
||||
).then((rsp) => {
|
||||
if (rsp.data?.length) {
|
||||
return frameToMetricFindValue(rsp.data[0]);
|
||||
}
|
||||
return [];
|
||||
});
|
||||
}
|
||||
|
||||
const interpolated = this.templateSrv.replace(query, undefined, 'regex');
|
||||
|
||||
return this._seriesQuery(interpolated, options)
|
||||
.toPromise()
|
||||
.then((resp) => {
|
||||
return this.responseParser.parse(query, resp);
|
||||
});
|
||||
return lastValueFrom(this._seriesQuery(interpolated, options)).then((resp) => {
|
||||
return this.responseParser.parse(query, resp);
|
||||
});
|
||||
}
|
||||
|
||||
getTagKeys(options: any = {}) {
|
||||
@ -453,9 +448,7 @@ export default class InfluxDatasource extends DataSourceWithBackend<InfluxQuery,
|
||||
},
|
||||
} as DataQueryRequest<InfluxQuery>;
|
||||
|
||||
return super
|
||||
.query(request)
|
||||
.toPromise()
|
||||
return lastValueFrom(super.query(request))
|
||||
.then((res: DataQueryResponse) => {
|
||||
if (!res || !res.data || res.state !== LoadingState.Done) {
|
||||
console.error('InfluxDB Error', res);
|
||||
@ -477,8 +470,7 @@ export default class InfluxDatasource extends DataSourceWithBackend<InfluxQuery,
|
||||
const queryBuilder = new InfluxQueryBuilder({ measurement: '', tags: [] }, this.database);
|
||||
const query = queryBuilder.buildExploreQuery('RETENTION POLICIES');
|
||||
|
||||
return this._seriesQuery(query)
|
||||
.toPromise()
|
||||
return lastValueFrom(this._seriesQuery(query))
|
||||
.then((res: any) => {
|
||||
const error = get(res, 'results[0].error');
|
||||
if (error) {
|
||||
|
@ -1,9 +1,9 @@
|
||||
import InfluxDatasource from '../datasource';
|
||||
import { lastValueFrom, of } from 'rxjs';
|
||||
import { FetchResponse } from '@grafana/runtime';
|
||||
|
||||
import InfluxDatasource from '../datasource';
|
||||
import { TemplateSrvStub } from 'test/specs/helpers';
|
||||
import { backendSrv } from 'app/core/services/backend_srv'; // will use the version in __mocks__
|
||||
import { of } from 'rxjs';
|
||||
import { FetchResponse } from '@grafana/runtime';
|
||||
|
||||
//@ts-ignore
|
||||
const templateSrv = new TemplateSrvStub();
|
||||
@ -112,7 +112,7 @@ describe('InfluxDataSource', () => {
|
||||
});
|
||||
|
||||
try {
|
||||
await ctx.ds.query(queryOptions).toPromise();
|
||||
await lastValueFrom(ctx.ds.query(queryOptions));
|
||||
} catch (err) {
|
||||
expect(err.message).toBe('InfluxDB Error: Query timeout');
|
||||
}
|
||||
|
@ -1,6 +1,7 @@
|
||||
import { lastValueFrom, of, throwError } from 'rxjs';
|
||||
import { DataQueryRequest, DataSourceInstanceSettings, dateTime, FieldType, PluginType } from '@grafana/data';
|
||||
|
||||
import { backendSrv } from 'app/core/services/backend_srv';
|
||||
import { of, throwError } from 'rxjs';
|
||||
import { createFetchResponse } from 'test/helpers/createFetchResponse';
|
||||
import { ALL_OPERATIONS_KEY } from './components/SearchForm';
|
||||
import { JaegerDatasource } from './datasource';
|
||||
@ -36,7 +37,7 @@ describe('JaegerDatasource', () => {
|
||||
setupFetchMock({ data: [testResponse] });
|
||||
|
||||
const ds = new JaegerDatasource(defaultSettings);
|
||||
const response = await ds.query(defaultQuery).toPromise();
|
||||
const response = await lastValueFrom(ds.query(defaultQuery));
|
||||
expect(response.data.length).toBe(3);
|
||||
expect(response.data[0].fields).toMatchObject(testResponseDataFrameFields);
|
||||
expect(response.data[1].fields).toMatchObject(testResponseNodesFields);
|
||||
@ -55,18 +56,18 @@ describe('JaegerDatasource', () => {
|
||||
},
|
||||
],
|
||||
};
|
||||
await ds.query(query).toPromise();
|
||||
await lastValueFrom(ds.query(query));
|
||||
expect(mock).toBeCalledWith({ url: `${defaultSettings.url}/api/traces/a%2Fb` });
|
||||
});
|
||||
|
||||
it('returns empty response if trace id is not specified', async () => {
|
||||
const ds = new JaegerDatasource(defaultSettings);
|
||||
const response = await ds
|
||||
.query({
|
||||
const response = await lastValueFrom(
|
||||
ds.query({
|
||||
...defaultQuery,
|
||||
targets: [],
|
||||
})
|
||||
.toPromise();
|
||||
);
|
||||
const field = response.data[0].fields[0];
|
||||
expect(field.name).toBe('trace');
|
||||
expect(field.type).toBe(FieldType.trace);
|
||||
@ -76,12 +77,12 @@ describe('JaegerDatasource', () => {
|
||||
it('should handle json file upload', async () => {
|
||||
const ds = new JaegerDatasource(defaultSettings);
|
||||
ds.uploadedJson = JSON.stringify(mockJson);
|
||||
const response = await ds
|
||||
.query({
|
||||
const response = await lastValueFrom(
|
||||
ds.query({
|
||||
...defaultQuery,
|
||||
targets: [{ queryType: 'upload', refId: 'A' }],
|
||||
})
|
||||
.toPromise();
|
||||
);
|
||||
const field = response.data[0].fields[0];
|
||||
expect(field.name).toBe('traceID');
|
||||
expect(field.type).toBe(FieldType.string);
|
||||
@ -91,12 +92,12 @@ describe('JaegerDatasource', () => {
|
||||
it('should return search results when the query type is search', async () => {
|
||||
const mock = setupFetchMock({ data: [testResponse] });
|
||||
const ds = new JaegerDatasource(defaultSettings, timeSrvStub);
|
||||
const response = await ds
|
||||
.query({
|
||||
const response = await lastValueFrom(
|
||||
ds.query({
|
||||
...defaultQuery,
|
||||
targets: [{ queryType: 'search', refId: 'a', service: 'jaeger-query', operation: '/api/services' }],
|
||||
})
|
||||
.toPromise();
|
||||
);
|
||||
expect(mock).toBeCalledWith({
|
||||
url: `${defaultSettings.url}/api/traces?operation=%2Fapi%2Fservices&service=jaeger-query&start=1531468681000&end=1531489712000&lookback=custom`,
|
||||
});
|
||||
@ -109,12 +110,12 @@ describe('JaegerDatasource', () => {
|
||||
it('should remove operation from the query when all is selected', async () => {
|
||||
const mock = setupFetchMock({ data: [testResponse] });
|
||||
const ds = new JaegerDatasource(defaultSettings, timeSrvStub);
|
||||
await ds
|
||||
.query({
|
||||
await lastValueFrom(
|
||||
ds.query({
|
||||
...defaultQuery,
|
||||
targets: [{ queryType: 'search', refId: 'a', service: 'jaeger-query', operation: ALL_OPERATIONS_KEY }],
|
||||
})
|
||||
.toPromise();
|
||||
);
|
||||
expect(mock).toBeCalledWith({
|
||||
url: `${defaultSettings.url}/api/traces?service=jaeger-query&start=1531468681000&end=1531489712000&lookback=custom`,
|
||||
});
|
||||
@ -123,12 +124,12 @@ describe('JaegerDatasource', () => {
|
||||
it('should convert tags from logfmt format to an object', async () => {
|
||||
const mock = setupFetchMock({ data: [testResponse] });
|
||||
const ds = new JaegerDatasource(defaultSettings, timeSrvStub);
|
||||
await ds
|
||||
.query({
|
||||
await lastValueFrom(
|
||||
ds.query({
|
||||
...defaultQuery,
|
||||
targets: [{ queryType: 'search', refId: 'a', service: 'jaeger-query', tags: 'error=true' }],
|
||||
})
|
||||
.toPromise();
|
||||
);
|
||||
expect(mock).toBeCalledWith({
|
||||
url: `${defaultSettings.url}/api/traces?service=jaeger-query&tags=%7B%22error%22%3A%22true%22%7D&start=1531468681000&end=1531489712000&lookback=custom`,
|
||||
});
|
||||
|
@ -1,3 +1,6 @@
|
||||
import { identity, omit, pick, pickBy } from 'lodash';
|
||||
import { lastValueFrom, Observable, of } from 'rxjs';
|
||||
import { catchError, map } from 'rxjs/operators';
|
||||
import {
|
||||
DataQueryRequest,
|
||||
DataQueryResponse,
|
||||
@ -9,14 +12,12 @@ import {
|
||||
MutableDataFrame,
|
||||
} from '@grafana/data';
|
||||
import { BackendSrvRequest, getBackendSrv } from '@grafana/runtime';
|
||||
|
||||
import { serializeParams } from 'app/core/utils/fetch';
|
||||
import { getTimeSrv, TimeSrv } from 'app/features/dashboard/services/TimeSrv';
|
||||
import { Observable, of } from 'rxjs';
|
||||
import { catchError, map } from 'rxjs/operators';
|
||||
import { createTableFrame, createTraceFrame } from './responseTransform';
|
||||
import { createGraphFrames } from './graphTransform';
|
||||
import { JaegerQuery } from './types';
|
||||
import { identity, omit, pick, pickBy } from 'lodash';
|
||||
import { convertTagsLogfmt } from './util';
|
||||
import { ALL_OPERATIONS_KEY } from './components/SearchForm';
|
||||
|
||||
@ -27,7 +28,7 @@ export class JaegerDatasource extends DataSourceApi<JaegerQuery> {
|
||||
}
|
||||
|
||||
async metadataRequest(url: string, params?: Record<string, any>): Promise<any> {
|
||||
const res = await this._request(url, params, { hideFromInspector: true }).toPromise();
|
||||
const res = await lastValueFrom(this._request(url, params, { hideFromInspector: true }));
|
||||
return res.data.data;
|
||||
}
|
||||
|
||||
@ -87,8 +88,8 @@ export class JaegerDatasource extends DataSourceApi<JaegerQuery> {
|
||||
}
|
||||
|
||||
async testDatasource(): Promise<any> {
|
||||
return this._request('/api/services')
|
||||
.pipe(
|
||||
return lastValueFrom(
|
||||
this._request('/api/services').pipe(
|
||||
map((res) => {
|
||||
const values: any[] = res?.data?.data || [];
|
||||
const testResult =
|
||||
@ -121,7 +122,7 @@ export class JaegerDatasource extends DataSourceApi<JaegerQuery> {
|
||||
return of({ status: 'error', message: message });
|
||||
})
|
||||
)
|
||||
.toPromise();
|
||||
);
|
||||
}
|
||||
|
||||
getTimeRange(): { start: number; end: number } {
|
||||
|
@ -1,4 +1,4 @@
|
||||
import { of, throwError } from 'rxjs';
|
||||
import { lastValueFrom, of, throwError } from 'rxjs';
|
||||
import { take } from 'rxjs/operators';
|
||||
import { AnnotationQueryRequest, CoreApp, DataFrame, dateTime, FieldCache, TimeSeries, toUtc } from '@grafana/data';
|
||||
import { BackendSrvRequest, FetchResponse } from '@grafana/runtime';
|
||||
@ -219,56 +219,56 @@ describe('LokiDatasource', () => {
|
||||
|
||||
it('should run logs instant if only instant is selected', async () => {
|
||||
const { ds, options } = setup(logsQuery, CoreApp.Explore, true, false);
|
||||
await ds.query(options).toPromise();
|
||||
await lastValueFrom(ds.query(options));
|
||||
expect(ds.runInstantQuery).toBeCalled();
|
||||
expect(ds.runRangeQuery).not.toBeCalled();
|
||||
});
|
||||
|
||||
it('should run metrics instant if only instant is selected', async () => {
|
||||
const { ds, options } = setup(metricsQuery, CoreApp.Explore, true, false);
|
||||
await ds.query(options).toPromise();
|
||||
lastValueFrom(await ds.query(options));
|
||||
expect(ds.runInstantQuery).toBeCalled();
|
||||
expect(ds.runRangeQuery).not.toBeCalled();
|
||||
});
|
||||
|
||||
it('should run only logs range query if only range is selected', async () => {
|
||||
const { ds, options } = setup(logsQuery, CoreApp.Explore, false, true);
|
||||
await ds.query(options).toPromise();
|
||||
lastValueFrom(await ds.query(options));
|
||||
expect(ds.runInstantQuery).not.toBeCalled();
|
||||
expect(ds.runRangeQuery).toBeCalled();
|
||||
});
|
||||
|
||||
it('should run only metrics range query if only range is selected', async () => {
|
||||
const { ds, options } = setup(metricsQuery, CoreApp.Explore, false, true);
|
||||
await ds.query(options).toPromise();
|
||||
lastValueFrom(await ds.query(options));
|
||||
expect(ds.runInstantQuery).not.toBeCalled();
|
||||
expect(ds.runRangeQuery).toBeCalled();
|
||||
});
|
||||
|
||||
it('should run only logs range query if no query type is selected in Explore', async () => {
|
||||
const { ds, options } = setup(logsQuery, CoreApp.Explore);
|
||||
await ds.query(options).toPromise();
|
||||
lastValueFrom(await ds.query(options));
|
||||
expect(ds.runInstantQuery).not.toBeCalled();
|
||||
expect(ds.runRangeQuery).toBeCalled();
|
||||
});
|
||||
|
||||
it('should run only metrics range query if no query type is selected in Explore', async () => {
|
||||
const { ds, options } = setup(metricsQuery, CoreApp.Explore);
|
||||
await ds.query(options).toPromise();
|
||||
lastValueFrom(await ds.query(options));
|
||||
expect(ds.runInstantQuery).not.toBeCalled();
|
||||
expect(ds.runRangeQuery).toBeCalled();
|
||||
});
|
||||
|
||||
it('should run only logs range query in Dashboard', async () => {
|
||||
const { ds, options } = setup(logsQuery, CoreApp.Dashboard);
|
||||
await ds.query(options).toPromise();
|
||||
lastValueFrom(await ds.query(options));
|
||||
expect(ds.runInstantQuery).not.toBeCalled();
|
||||
expect(ds.runRangeQuery).toBeCalled();
|
||||
});
|
||||
|
||||
it('should run only metrics range query in Dashboard', async () => {
|
||||
const { ds, options } = setup(metricsQuery, CoreApp.Dashboard);
|
||||
await ds.query(options).toPromise();
|
||||
lastValueFrom(await ds.query(options));
|
||||
expect(ds.runInstantQuery).not.toBeCalled();
|
||||
expect(ds.runRangeQuery).toBeCalled();
|
||||
});
|
||||
@ -349,7 +349,7 @@ describe('LokiDatasource', () => {
|
||||
});
|
||||
|
||||
it('should not modify expression with no filters', async () => {
|
||||
await ds.query(options as any).toPromise();
|
||||
await lastValueFrom(ds.query(options as any));
|
||||
expect(ds.runRangeQuery).toBeCalledWith({ expr: DEFAULT_EXPR }, expect.anything(), expect.anything());
|
||||
});
|
||||
|
||||
@ -367,7 +367,7 @@ describe('LokiDatasource', () => {
|
||||
},
|
||||
]);
|
||||
|
||||
await ds.query(options as any).toPromise();
|
||||
await lastValueFrom(ds.query(options as any));
|
||||
expect(ds.runRangeQuery).toBeCalledWith(
|
||||
{ expr: 'rate({bar="baz",job="foo",k1="v1",k2!="v2"} |= "bar" [5m])' },
|
||||
expect.anything(),
|
||||
@ -388,7 +388,7 @@ describe('LokiDatasource', () => {
|
||||
value: `v'.*`,
|
||||
},
|
||||
]);
|
||||
await ds.query(options as any).toPromise();
|
||||
await lastValueFrom(ds.query(options as any));
|
||||
expect(ds.runRangeQuery).toBeCalledWith(
|
||||
{ expr: 'rate({bar="baz",job="foo",k1=~"v.*",k2=~"v\\\\\'.*"} |= "bar" [5m])' },
|
||||
expect.anything(),
|
||||
|
@ -1,6 +1,6 @@
|
||||
// Libraries
|
||||
import { cloneDeep, isEmpty, map as lodashMap } from 'lodash';
|
||||
import { merge, Observable, of, throwError } from 'rxjs';
|
||||
import { lastValueFrom, merge, Observable, of, throwError } from 'rxjs';
|
||||
import { catchError, map, switchMap } from 'rxjs/operators';
|
||||
import Prism from 'prismjs';
|
||||
|
||||
@ -36,7 +36,7 @@ import {
|
||||
lokiStreamsToDataFrames,
|
||||
processRangeQueryResponse,
|
||||
} from './result_transformer';
|
||||
import { getHighlighterExpressionsFromQuery, queryHasPipeParser, addParsedLabelToQuery } from './query_utils';
|
||||
import { addParsedLabelToQuery, getHighlighterExpressionsFromQuery, queryHasPipeParser } from './query_utils';
|
||||
|
||||
import {
|
||||
LokiOptions,
|
||||
@ -323,7 +323,7 @@ export class LokiDatasource extends DataSourceApi<LokiQuery, LokiOptions> {
|
||||
}
|
||||
|
||||
async metadataRequest(url: string, params?: Record<string, string | number>) {
|
||||
const res = await this._request(url, params, { hideFromInspector: true }).toPromise();
|
||||
const res = await lastValueFrom(this._request(url, params, { hideFromInspector: true }));
|
||||
return res.data.data || res.data.values || [];
|
||||
}
|
||||
|
||||
@ -459,8 +459,8 @@ export class LokiDatasource extends DataSourceApi<LokiQuery, LokiOptions> {
|
||||
);
|
||||
|
||||
const reverse = options && options.direction === 'FORWARD';
|
||||
return this._request(RANGE_QUERY_ENDPOINT, target)
|
||||
.pipe(
|
||||
return lastValueFrom(
|
||||
this._request(RANGE_QUERY_ENDPOINT, target).pipe(
|
||||
catchError((err: any) => {
|
||||
if (err.status === 404) {
|
||||
return of(err);
|
||||
@ -479,7 +479,7 @@ export class LokiDatasource extends DataSourceApi<LokiQuery, LokiOptions> {
|
||||
})
|
||||
)
|
||||
)
|
||||
.toPromise();
|
||||
);
|
||||
};
|
||||
|
||||
prepareLogRowContextQueryTarget = (row: LogRowModel, limit: number, direction: 'BACKWARD' | 'FORWARD') => {
|
||||
@ -524,8 +524,8 @@ export class LokiDatasource extends DataSourceApi<LokiQuery, LokiOptions> {
|
||||
// Consider only last 10 minutes otherwise request takes too long
|
||||
const startMs = Date.now() - 10 * 60 * 1000;
|
||||
const start = `${startMs}000000`; // API expects nanoseconds
|
||||
return this._request(`${LOKI_ENDPOINT}/label`, { start })
|
||||
.pipe(
|
||||
return lastValueFrom(
|
||||
this._request(`${LOKI_ENDPOINT}/label`, { start }).pipe(
|
||||
map((res) => {
|
||||
const values: any[] = res?.data?.data || res?.data?.values || [];
|
||||
const testResult =
|
||||
@ -558,7 +558,7 @@ export class LokiDatasource extends DataSourceApi<LokiQuery, LokiOptions> {
|
||||
return of({ status: 'error', message: message });
|
||||
})
|
||||
)
|
||||
.toPromise();
|
||||
);
|
||||
}
|
||||
|
||||
async annotationQuery(options: any): Promise<AnnotationEvent[]> {
|
||||
@ -585,8 +585,8 @@ export class LokiDatasource extends DataSourceApi<LokiQuery, LokiOptions> {
|
||||
stepInterval,
|
||||
};
|
||||
const { data } = instant
|
||||
? await this.runInstantQuery(query, options as any).toPromise()
|
||||
: await this.runRangeQuery(query, options as any).toPromise();
|
||||
? await lastValueFrom(this.runInstantQuery(query, options as any))
|
||||
: await lastValueFrom(this.runRangeQuery(query, options as any));
|
||||
|
||||
const annotations: AnnotationEvent[] = [];
|
||||
const splitKeys: string[] = tagKeys.split(',').filter((v: string) => v !== '');
|
||||
|
@ -1,12 +1,12 @@
|
||||
import { map as _map } from 'lodash';
|
||||
import { of } from 'rxjs';
|
||||
import { lastValueFrom, of } from 'rxjs';
|
||||
import { catchError, map, mapTo } from 'rxjs/operators';
|
||||
import { BackendDataSourceResponse, DataSourceWithBackend, FetchResponse, getBackendSrv } from '@grafana/runtime';
|
||||
import { AnnotationEvent, DataSourceInstanceSettings, ScopedVars, MetricFindValue } from '@grafana/data';
|
||||
import { AnnotationEvent, DataSourceInstanceSettings, MetricFindValue, ScopedVars } from '@grafana/data';
|
||||
|
||||
import ResponseParser from './response_parser';
|
||||
import { getTemplateSrv, TemplateSrv } from 'app/features/templating/template_srv';
|
||||
import { MssqlQueryForInterpolation, MssqlQuery, MssqlOptions } from './types';
|
||||
import { MssqlOptions, MssqlQuery, MssqlQueryForInterpolation } from './types';
|
||||
import { getTimeSrv, TimeSrv } from 'app/features/dashboard/services/TimeSrv';
|
||||
import { toTestingStatus } from '@grafana/runtime/src/utils/queryResponse';
|
||||
|
||||
@ -92,24 +92,25 @@ export class MssqlDatasource extends DataSourceWithBackend<MssqlQuery, MssqlOpti
|
||||
format: 'table',
|
||||
};
|
||||
|
||||
return getBackendSrv()
|
||||
.fetch<BackendDataSourceResponse>({
|
||||
url: '/api/ds/query',
|
||||
method: 'POST',
|
||||
data: {
|
||||
from: options.range.from.valueOf().toString(),
|
||||
to: options.range.to.valueOf().toString(),
|
||||
queries: [query],
|
||||
},
|
||||
requestId: options.annotation.name,
|
||||
})
|
||||
.pipe(
|
||||
map(
|
||||
async (res: FetchResponse<BackendDataSourceResponse>) =>
|
||||
await this.responseParser.transformAnnotationResponse(options, res.data)
|
||||
return lastValueFrom(
|
||||
getBackendSrv()
|
||||
.fetch<BackendDataSourceResponse>({
|
||||
url: '/api/ds/query',
|
||||
method: 'POST',
|
||||
data: {
|
||||
from: options.range.from.valueOf().toString(),
|
||||
to: options.range.to.valueOf().toString(),
|
||||
queries: [query],
|
||||
},
|
||||
requestId: options.annotation.name,
|
||||
})
|
||||
.pipe(
|
||||
map(
|
||||
async (res: FetchResponse<BackendDataSourceResponse>) =>
|
||||
await this.responseParser.transformAnnotationResponse(options, res.data)
|
||||
)
|
||||
)
|
||||
)
|
||||
.toPromise();
|
||||
);
|
||||
}
|
||||
|
||||
filterQuery(query: MssqlQuery): boolean {
|
||||
@ -131,52 +132,54 @@ export class MssqlDatasource extends DataSourceWithBackend<MssqlQuery, MssqlOpti
|
||||
format: 'table',
|
||||
};
|
||||
|
||||
return getBackendSrv()
|
||||
.fetch<BackendDataSourceResponse>({
|
||||
url: '/api/ds/query',
|
||||
method: 'POST',
|
||||
data: {
|
||||
from: range.from.valueOf().toString(),
|
||||
to: range.to.valueOf().toString(),
|
||||
queries: [interpolatedQuery],
|
||||
},
|
||||
requestId: refId,
|
||||
})
|
||||
.pipe(
|
||||
map((rsp) => {
|
||||
return this.responseParser.transformMetricFindResponse(rsp);
|
||||
return lastValueFrom(
|
||||
getBackendSrv()
|
||||
.fetch<BackendDataSourceResponse>({
|
||||
url: '/api/ds/query',
|
||||
method: 'POST',
|
||||
data: {
|
||||
from: range.from.valueOf().toString(),
|
||||
to: range.to.valueOf().toString(),
|
||||
queries: [interpolatedQuery],
|
||||
},
|
||||
requestId: refId,
|
||||
})
|
||||
)
|
||||
.toPromise();
|
||||
.pipe(
|
||||
map((rsp) => {
|
||||
return this.responseParser.transformMetricFindResponse(rsp);
|
||||
})
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
testDatasource(): Promise<any> {
|
||||
return getBackendSrv()
|
||||
.fetch({
|
||||
url: '/api/ds/query',
|
||||
method: 'POST',
|
||||
data: {
|
||||
from: '5m',
|
||||
to: 'now',
|
||||
queries: [
|
||||
{
|
||||
refId: 'A',
|
||||
intervalMs: 1,
|
||||
maxDataPoints: 1,
|
||||
datasourceId: this.id,
|
||||
rawSql: 'SELECT 1',
|
||||
format: 'table',
|
||||
},
|
||||
],
|
||||
},
|
||||
})
|
||||
.pipe(
|
||||
mapTo({ status: 'success', message: 'Database Connection OK' }),
|
||||
catchError((err) => {
|
||||
return of(toTestingStatus(err));
|
||||
return lastValueFrom(
|
||||
getBackendSrv()
|
||||
.fetch({
|
||||
url: '/api/ds/query',
|
||||
method: 'POST',
|
||||
data: {
|
||||
from: '5m',
|
||||
to: 'now',
|
||||
queries: [
|
||||
{
|
||||
refId: 'A',
|
||||
intervalMs: 1,
|
||||
maxDataPoints: 1,
|
||||
datasourceId: this.id,
|
||||
rawSql: 'SELECT 1',
|
||||
format: 'table',
|
||||
},
|
||||
],
|
||||
},
|
||||
})
|
||||
)
|
||||
.toPromise();
|
||||
.pipe(
|
||||
mapTo({ status: 'success', message: 'Database Connection OK' }),
|
||||
catchError((err) => {
|
||||
return of(toTestingStatus(err));
|
||||
})
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
targetContainsTemplate(query: MssqlQuery): boolean {
|
||||
|
@ -1,14 +1,15 @@
|
||||
import { map as _map } from 'lodash';
|
||||
import { lastValueFrom, of } from 'rxjs';
|
||||
import { catchError, map, mapTo } from 'rxjs/operators';
|
||||
import { getBackendSrv, DataSourceWithBackend, FetchResponse, BackendDataSourceResponse } from '@grafana/runtime';
|
||||
import { DataSourceInstanceSettings, ScopedVars, MetricFindValue, AnnotationEvent } from '@grafana/data';
|
||||
import { BackendDataSourceResponse, DataSourceWithBackend, FetchResponse, getBackendSrv } from '@grafana/runtime';
|
||||
import { AnnotationEvent, DataSourceInstanceSettings, MetricFindValue, ScopedVars } from '@grafana/data';
|
||||
|
||||
import MySQLQueryModel from 'app/plugins/datasource/mysql/mysql_query_model';
|
||||
import ResponseParser from './response_parser';
|
||||
import { MysqlQueryForInterpolation, MySQLOptions, MySQLQuery } from './types';
|
||||
import { MySQLOptions, MySQLQuery, MysqlQueryForInterpolation } from './types';
|
||||
import { getTemplateSrv, TemplateSrv } from 'app/features/templating/template_srv';
|
||||
import { getSearchFilterScopedVar } from '../../../features/variables/utils';
|
||||
import { getTimeSrv, TimeSrv } from 'app/features/dashboard/services/TimeSrv';
|
||||
import { of } from 'rxjs';
|
||||
import { toTestingStatus } from '@grafana/runtime/src/utils/queryResponse';
|
||||
|
||||
export class MysqlDatasource extends DataSourceWithBackend<MySQLQuery, MySQLOptions> {
|
||||
@ -99,24 +100,25 @@ export class MysqlDatasource extends DataSourceWithBackend<MySQLQuery, MySQLOpti
|
||||
format: 'table',
|
||||
};
|
||||
|
||||
return getBackendSrv()
|
||||
.fetch<BackendDataSourceResponse>({
|
||||
url: '/api/ds/query',
|
||||
method: 'POST',
|
||||
data: {
|
||||
from: options.range.from.valueOf().toString(),
|
||||
to: options.range.to.valueOf().toString(),
|
||||
queries: [query],
|
||||
},
|
||||
requestId: options.annotation.name,
|
||||
})
|
||||
.pipe(
|
||||
map(
|
||||
async (res: FetchResponse<BackendDataSourceResponse>) =>
|
||||
await this.responseParser.transformAnnotationResponse(options, res.data)
|
||||
return lastValueFrom(
|
||||
getBackendSrv()
|
||||
.fetch<BackendDataSourceResponse>({
|
||||
url: '/api/ds/query',
|
||||
method: 'POST',
|
||||
data: {
|
||||
from: options.range.from.valueOf().toString(),
|
||||
to: options.range.to.valueOf().toString(),
|
||||
queries: [query],
|
||||
},
|
||||
requestId: options.annotation.name,
|
||||
})
|
||||
.pipe(
|
||||
map(
|
||||
async (res: FetchResponse<BackendDataSourceResponse>) =>
|
||||
await this.responseParser.transformAnnotationResponse(options, res.data)
|
||||
)
|
||||
)
|
||||
)
|
||||
.toPromise();
|
||||
);
|
||||
}
|
||||
|
||||
metricFindQuery(query: string, optionalOptions: any): Promise<MetricFindValue[]> {
|
||||
@ -140,52 +142,54 @@ export class MysqlDatasource extends DataSourceWithBackend<MySQLQuery, MySQLOpti
|
||||
|
||||
const range = this.timeSrv.timeRange();
|
||||
|
||||
return getBackendSrv()
|
||||
.fetch<BackendDataSourceResponse>({
|
||||
url: '/api/ds/query',
|
||||
method: 'POST',
|
||||
data: {
|
||||
from: range.from.valueOf().toString(),
|
||||
to: range.to.valueOf().toString(),
|
||||
queries: [interpolatedQuery],
|
||||
},
|
||||
requestId: refId,
|
||||
})
|
||||
.pipe(
|
||||
map((rsp) => {
|
||||
return this.responseParser.transformMetricFindResponse(rsp);
|
||||
return lastValueFrom(
|
||||
getBackendSrv()
|
||||
.fetch<BackendDataSourceResponse>({
|
||||
url: '/api/ds/query',
|
||||
method: 'POST',
|
||||
data: {
|
||||
from: range.from.valueOf().toString(),
|
||||
to: range.to.valueOf().toString(),
|
||||
queries: [interpolatedQuery],
|
||||
},
|
||||
requestId: refId,
|
||||
})
|
||||
)
|
||||
.toPromise();
|
||||
.pipe(
|
||||
map((rsp) => {
|
||||
return this.responseParser.transformMetricFindResponse(rsp);
|
||||
})
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
testDatasource(): Promise<any> {
|
||||
return getBackendSrv()
|
||||
.fetch({
|
||||
url: '/api/ds/query',
|
||||
method: 'POST',
|
||||
data: {
|
||||
from: '5m',
|
||||
to: 'now',
|
||||
queries: [
|
||||
{
|
||||
refId: 'A',
|
||||
intervalMs: 1,
|
||||
maxDataPoints: 1,
|
||||
datasourceId: this.id,
|
||||
rawSql: 'SELECT 1',
|
||||
format: 'table',
|
||||
},
|
||||
],
|
||||
},
|
||||
})
|
||||
.pipe(
|
||||
mapTo({ status: 'success', message: 'Database Connection OK' }),
|
||||
catchError((err) => {
|
||||
return of(toTestingStatus(err));
|
||||
return lastValueFrom(
|
||||
getBackendSrv()
|
||||
.fetch({
|
||||
url: '/api/ds/query',
|
||||
method: 'POST',
|
||||
data: {
|
||||
from: '5m',
|
||||
to: 'now',
|
||||
queries: [
|
||||
{
|
||||
refId: 'A',
|
||||
intervalMs: 1,
|
||||
maxDataPoints: 1,
|
||||
datasourceId: this.id,
|
||||
rawSql: 'SELECT 1',
|
||||
format: 'table',
|
||||
},
|
||||
],
|
||||
},
|
||||
})
|
||||
)
|
||||
.toPromise();
|
||||
.pipe(
|
||||
mapTo({ status: 'success', message: 'Database Connection OK' }),
|
||||
catchError((err) => {
|
||||
return of(toTestingStatus(err));
|
||||
})
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
targetContainsTemplate(target: any) {
|
||||
|
@ -10,10 +10,10 @@ import {
|
||||
includes,
|
||||
isArray,
|
||||
isEmpty,
|
||||
toPairs,
|
||||
map as _map,
|
||||
toPairs,
|
||||
} from 'lodash';
|
||||
import { Observable, of } from 'rxjs';
|
||||
import { lastValueFrom, Observable, of } from 'rxjs';
|
||||
import { catchError, map } from 'rxjs/operators';
|
||||
import { FetchResponse, getBackendSrv } from '@grafana/runtime';
|
||||
import {
|
||||
@ -133,8 +133,8 @@ export default class OpenTsDatasource extends DataSourceApi<OpenTsdbQuery, OpenT
|
||||
|
||||
const queries = compact(qs);
|
||||
|
||||
return this.performTimeSeriesQuery(queries, start, end)
|
||||
.pipe(
|
||||
return lastValueFrom(
|
||||
this.performTimeSeriesQuery(queries, start, end).pipe(
|
||||
map((results) => {
|
||||
if (results.data[0]) {
|
||||
let annotationObject = results.data[0].annotations;
|
||||
@ -156,7 +156,7 @@ export default class OpenTsDatasource extends DataSourceApi<OpenTsdbQuery, OpenT
|
||||
return eventList;
|
||||
})
|
||||
)
|
||||
.toPromise();
|
||||
);
|
||||
}
|
||||
|
||||
targetContainsTemplate(target: any) {
|
||||
@ -332,42 +332,42 @@ export default class OpenTsDatasource extends DataSourceApi<OpenTsdbQuery, OpenT
|
||||
|
||||
const metricsQuery = interpolated.match(metricsRegex);
|
||||
if (metricsQuery) {
|
||||
return this._performSuggestQuery(metricsQuery[1], 'metrics').pipe(map(responseTransform)).toPromise();
|
||||
return lastValueFrom(this._performSuggestQuery(metricsQuery[1], 'metrics').pipe(map(responseTransform)));
|
||||
}
|
||||
|
||||
const tagNamesQuery = interpolated.match(tagNamesRegex);
|
||||
if (tagNamesQuery) {
|
||||
return this._performMetricKeyLookup(tagNamesQuery[1]).pipe(map(responseTransform)).toPromise();
|
||||
return lastValueFrom(this._performMetricKeyLookup(tagNamesQuery[1]).pipe(map(responseTransform)));
|
||||
}
|
||||
|
||||
const tagValuesQuery = interpolated.match(tagValuesRegex);
|
||||
if (tagValuesQuery) {
|
||||
return this._performMetricKeyValueLookup(tagValuesQuery[1], tagValuesQuery[2])
|
||||
.pipe(map(responseTransform))
|
||||
.toPromise();
|
||||
return lastValueFrom(
|
||||
this._performMetricKeyValueLookup(tagValuesQuery[1], tagValuesQuery[2]).pipe(map(responseTransform))
|
||||
);
|
||||
}
|
||||
|
||||
const tagNamesSuggestQuery = interpolated.match(tagNamesSuggestRegex);
|
||||
if (tagNamesSuggestQuery) {
|
||||
return this._performSuggestQuery(tagNamesSuggestQuery[1], 'tagk').pipe(map(responseTransform)).toPromise();
|
||||
return lastValueFrom(this._performSuggestQuery(tagNamesSuggestQuery[1], 'tagk').pipe(map(responseTransform)));
|
||||
}
|
||||
|
||||
const tagValuesSuggestQuery = interpolated.match(tagValuesSuggestRegex);
|
||||
if (tagValuesSuggestQuery) {
|
||||
return this._performSuggestQuery(tagValuesSuggestQuery[1], 'tagv').pipe(map(responseTransform)).toPromise();
|
||||
return lastValueFrom(this._performSuggestQuery(tagValuesSuggestQuery[1], 'tagv').pipe(map(responseTransform)));
|
||||
}
|
||||
|
||||
return Promise.resolve([]);
|
||||
}
|
||||
|
||||
testDatasource() {
|
||||
return this._performSuggestQuery('cpu', 'metrics')
|
||||
.pipe(
|
||||
return lastValueFrom(
|
||||
this._performSuggestQuery('cpu', 'metrics').pipe(
|
||||
map(() => {
|
||||
return { status: 'success', message: 'Data source is working' };
|
||||
})
|
||||
)
|
||||
.toPromise();
|
||||
);
|
||||
}
|
||||
|
||||
getAggregators() {
|
||||
@ -375,8 +375,8 @@ export default class OpenTsDatasource extends DataSourceApi<OpenTsdbQuery, OpenT
|
||||
return this.aggregatorsPromise;
|
||||
}
|
||||
|
||||
this.aggregatorsPromise = this._get('/api/aggregators')
|
||||
.pipe(
|
||||
this.aggregatorsPromise = lastValueFrom(
|
||||
this._get('/api/aggregators').pipe(
|
||||
map((result: any) => {
|
||||
if (result.data && isArray(result.data)) {
|
||||
return result.data.sort();
|
||||
@ -384,7 +384,7 @@ export default class OpenTsDatasource extends DataSourceApi<OpenTsdbQuery, OpenT
|
||||
return [];
|
||||
})
|
||||
)
|
||||
.toPromise();
|
||||
);
|
||||
return this.aggregatorsPromise;
|
||||
}
|
||||
|
||||
@ -393,8 +393,8 @@ export default class OpenTsDatasource extends DataSourceApi<OpenTsdbQuery, OpenT
|
||||
return this.filterTypesPromise;
|
||||
}
|
||||
|
||||
this.filterTypesPromise = this._get('/api/config/filters')
|
||||
.pipe(
|
||||
this.filterTypesPromise = lastValueFrom(
|
||||
this._get('/api/config/filters').pipe(
|
||||
map((result: any) => {
|
||||
if (result.data) {
|
||||
return Object.keys(result.data).sort();
|
||||
@ -402,7 +402,7 @@ export default class OpenTsDatasource extends DataSourceApi<OpenTsdbQuery, OpenT
|
||||
return [];
|
||||
})
|
||||
)
|
||||
.toPromise();
|
||||
);
|
||||
return this.filterTypesPromise;
|
||||
}
|
||||
|
||||
|
@ -1,4 +1,5 @@
|
||||
import { map as _map } from 'lodash';
|
||||
import { lastValueFrom } from 'rxjs';
|
||||
import { map } from 'rxjs/operators';
|
||||
import { BackendDataSourceResponse, DataSourceWithBackend, FetchResponse, getBackendSrv } from '@grafana/runtime';
|
||||
import { AnnotationEvent, DataSourceInstanceSettings, MetricFindValue, ScopedVars } from '@grafana/data';
|
||||
@ -101,24 +102,25 @@ export class PostgresDatasource extends DataSourceWithBackend<PostgresQuery, Pos
|
||||
format: 'table',
|
||||
};
|
||||
|
||||
return getBackendSrv()
|
||||
.fetch<BackendDataSourceResponse>({
|
||||
url: '/api/ds/query',
|
||||
method: 'POST',
|
||||
data: {
|
||||
from: options.range.from.valueOf().toString(),
|
||||
to: options.range.to.valueOf().toString(),
|
||||
queries: [query],
|
||||
},
|
||||
requestId: options.annotation.name,
|
||||
})
|
||||
.pipe(
|
||||
map(
|
||||
async (res: FetchResponse<BackendDataSourceResponse>) =>
|
||||
await this.responseParser.transformAnnotationResponse(options, res.data)
|
||||
return lastValueFrom(
|
||||
getBackendSrv()
|
||||
.fetch<BackendDataSourceResponse>({
|
||||
url: '/api/ds/query',
|
||||
method: 'POST',
|
||||
data: {
|
||||
from: options.range.from.valueOf().toString(),
|
||||
to: options.range.to.valueOf().toString(),
|
||||
queries: [query],
|
||||
},
|
||||
requestId: options.annotation.name,
|
||||
})
|
||||
.pipe(
|
||||
map(
|
||||
async (res: FetchResponse<BackendDataSourceResponse>) =>
|
||||
await this.responseParser.transformAnnotationResponse(options, res.data)
|
||||
)
|
||||
)
|
||||
)
|
||||
.toPromise();
|
||||
);
|
||||
}
|
||||
|
||||
metricFindQuery(query: string, optionalOptions: any): Promise<MetricFindValue[]> {
|
||||
@ -142,23 +144,24 @@ export class PostgresDatasource extends DataSourceWithBackend<PostgresQuery, Pos
|
||||
|
||||
const range = this.timeSrv.timeRange();
|
||||
|
||||
return getBackendSrv()
|
||||
.fetch<BackendDataSourceResponse>({
|
||||
url: '/api/ds/query',
|
||||
method: 'POST',
|
||||
data: {
|
||||
from: range.from.valueOf().toString(),
|
||||
to: range.to.valueOf().toString(),
|
||||
queries: [interpolatedQuery],
|
||||
},
|
||||
requestId: refId,
|
||||
})
|
||||
.pipe(
|
||||
map((rsp) => {
|
||||
return this.responseParser.transformMetricFindResponse(rsp);
|
||||
return lastValueFrom(
|
||||
getBackendSrv()
|
||||
.fetch<BackendDataSourceResponse>({
|
||||
url: '/api/ds/query',
|
||||
method: 'POST',
|
||||
data: {
|
||||
from: range.from.valueOf().toString(),
|
||||
to: range.to.valueOf().toString(),
|
||||
queries: [interpolatedQuery],
|
||||
},
|
||||
requestId: refId,
|
||||
})
|
||||
)
|
||||
.toPromise();
|
||||
.pipe(
|
||||
map((rsp) => {
|
||||
return this.responseParser.transformMetricFindResponse(rsp);
|
||||
})
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
getVersion(): Promise<any> {
|
||||
|
@ -1,3 +1,7 @@
|
||||
import { cloneDeep, defaults } from 'lodash';
|
||||
import { forkJoin, lastValueFrom, merge, Observable, of, OperatorFunction, pipe, Subject, throwError } from 'rxjs';
|
||||
import { catchError, filter, map, tap } from 'rxjs/operators';
|
||||
import LRU from 'lru-cache';
|
||||
import {
|
||||
AnnotationEvent,
|
||||
CoreApp,
|
||||
@ -14,17 +18,14 @@ import {
|
||||
TimeRange,
|
||||
} from '@grafana/data';
|
||||
import { BackendSrvRequest, FetchError, FetchResponse, getBackendSrv } from '@grafana/runtime';
|
||||
|
||||
import { safeStringifyValue } from 'app/core/utils/explore';
|
||||
import { getTimeSrv, TimeSrv } from 'app/features/dashboard/services/TimeSrv';
|
||||
import { getTemplateSrv, TemplateSrv } from 'app/features/templating/template_srv';
|
||||
import { defaults, cloneDeep } from 'lodash';
|
||||
import LRU from 'lru-cache';
|
||||
import { forkJoin, merge, Observable, of, OperatorFunction, pipe, Subject, throwError } from 'rxjs';
|
||||
import { catchError, filter, map, tap } from 'rxjs/operators';
|
||||
import addLabelToQuery from './add_label_to_query';
|
||||
import PrometheusLanguageProvider from './language_provider';
|
||||
import { expandRecordingRules } from './language_utils';
|
||||
import { getQueryHints, getInitHints } from './query_hints';
|
||||
import { getInitHints, getQueryHints } from './query_hints';
|
||||
import { getOriginalMetricName, renderTemplate, transform } from './result_transformer';
|
||||
import {
|
||||
ExemplarTraceIdDestination,
|
||||
@ -160,7 +161,9 @@ export class PrometheusDatasource extends DataSourceApi<PromQuery, PromOptions>
|
||||
// If URL includes endpoint that supports POST and GET method, try to use configured method. This might fail as POST is supported only in v2.10+.
|
||||
if (GET_AND_POST_METADATA_ENDPOINTS.some((endpoint) => url.includes(endpoint))) {
|
||||
try {
|
||||
return await this._request<T>(url, params, { method: this.httpMethod, hideFromInspector: true }).toPromise();
|
||||
return await lastValueFrom(
|
||||
this._request<T>(url, params, { method: this.httpMethod, hideFromInspector: true })
|
||||
);
|
||||
} catch (err) {
|
||||
// If status code of error is Method Not Allowed (405) and HTTP method is POST, retry with GET
|
||||
if (this.httpMethod === 'POST' && err.status === 405) {
|
||||
@ -171,7 +174,9 @@ export class PrometheusDatasource extends DataSourceApi<PromQuery, PromOptions>
|
||||
}
|
||||
}
|
||||
|
||||
return await this._request<T>(url, params, { method: 'GET', hideFromInspector: true }).toPromise(); // toPromise until we change getTagValues, getTagKeys to Observable
|
||||
return await lastValueFrom(
|
||||
this._request<T>(url, params, { method: 'GET', hideFromInspector: true })
|
||||
); // toPromise until we change getTagValues, getTagKeys to Observable
|
||||
}
|
||||
|
||||
interpolateQueryExpr(value: string | string[] = [], variable: any) {
|
||||
@ -654,7 +659,7 @@ export class PrometheusDatasource extends DataSourceApi<PromQuery, PromOptions>
|
||||
};
|
||||
|
||||
const query = this.createQuery(queryModel, queryOptions, start, end);
|
||||
const response = await this.performTimeSeriesQuery(query, query.start, query.end).toPromise();
|
||||
const response = await lastValueFrom(this.performTimeSeriesQuery(query, query.start, query.end));
|
||||
const eventList: AnnotationEvent[] = [];
|
||||
const splitKeys = tagKeys.split(',');
|
||||
|
||||
@ -743,7 +748,7 @@ export class PrometheusDatasource extends DataSourceApi<PromQuery, PromOptions>
|
||||
async testDatasource() {
|
||||
const now = new Date().getTime();
|
||||
const query = { expr: '1+1' } as PromQueryRequest;
|
||||
const response = await this.performInstantQuery(query, now / 1000).toPromise();
|
||||
const response = await lastValueFrom(this.performInstantQuery(query, now / 1000));
|
||||
return response.data.status === 'success'
|
||||
? { status: 'success', message: 'Data source is working' }
|
||||
: { status: 'error', message: response.data.error };
|
||||
|
@ -1,6 +1,8 @@
|
||||
import { map as _map, uniq, chain } from 'lodash';
|
||||
import { chain, map as _map, uniq } from 'lodash';
|
||||
import { lastValueFrom } from 'rxjs';
|
||||
import { map } from 'rxjs/operators';
|
||||
import { MetricFindValue, TimeRange } from '@grafana/data';
|
||||
|
||||
import { PrometheusDatasource } from './datasource';
|
||||
import { PromQueryRequest } from './types';
|
||||
import { getTimeSrv } from 'app/features/dashboard/services/TimeSrv';
|
||||
@ -40,7 +42,7 @@ export default class PrometheusMetricFindQuery {
|
||||
|
||||
const queryResultQuery = this.query.match(queryResultRegex);
|
||||
if (queryResultQuery) {
|
||||
return this.queryResultQuery(queryResultQuery[1]).toPromise();
|
||||
return lastValueFrom(this.queryResultQuery(queryResultQuery[1]));
|
||||
}
|
||||
|
||||
// if query contains full metric name, return metric name and label list
|
||||
|
@ -1,3 +1,4 @@
|
||||
import { lastValueFrom, Observable, of } from 'rxjs';
|
||||
import {
|
||||
DataFrame,
|
||||
dataFrameToJSON,
|
||||
@ -8,10 +9,10 @@ import {
|
||||
MutableDataFrame,
|
||||
PluginType,
|
||||
} from '@grafana/data';
|
||||
import { Observable, of } from 'rxjs';
|
||||
|
||||
import { createFetchResponse } from 'test/helpers/createFetchResponse';
|
||||
import { TempoDatasource } from './datasource';
|
||||
import { FetchResponse, setBackendSrv, BackendDataSourceResponse, setDataSourceSrv } from '@grafana/runtime';
|
||||
import { BackendDataSourceResponse, FetchResponse, setBackendSrv, setDataSourceSrv } from '@grafana/runtime';
|
||||
import mockJson from './mockJsonResponse.json';
|
||||
|
||||
describe('Tempo data source', () => {
|
||||
@ -33,7 +34,7 @@ describe('Tempo data source', () => {
|
||||
})
|
||||
);
|
||||
const ds = new TempoDatasource(defaultSettings);
|
||||
const response = await ds.query({ targets: [{ refId: 'refid1' }] } as any).toPromise();
|
||||
const response = await lastValueFrom(ds.query({ targets: [{ refId: 'refid1' }] } as any));
|
||||
|
||||
expect(
|
||||
(response.data[0] as DataFrame).fields.map((f) => ({
|
||||
@ -89,9 +90,9 @@ describe('Tempo data source', () => {
|
||||
},
|
||||
});
|
||||
setDataSourceSrv(backendSrvWithPrometheus as any);
|
||||
const response = await ds
|
||||
.query({ targets: [{ queryType: 'serviceMap' }], range: getDefaultTimeRange() } as any)
|
||||
.toPromise();
|
||||
const response = await lastValueFrom(
|
||||
ds.query({ targets: [{ queryType: 'serviceMap' }], range: getDefaultTimeRange() } as any)
|
||||
);
|
||||
|
||||
expect(response.data).toHaveLength(2);
|
||||
expect(response.data[0].name).toBe('Nodes');
|
||||
@ -106,11 +107,11 @@ describe('Tempo data source', () => {
|
||||
it('should handle json file upload', async () => {
|
||||
const ds = new TempoDatasource(defaultSettings);
|
||||
ds.uploadedJson = JSON.stringify(mockJson);
|
||||
const response = await ds
|
||||
.query({
|
||||
const response = await lastValueFrom(
|
||||
ds.query({
|
||||
targets: [{ queryType: 'upload', refId: 'A' }],
|
||||
} as any)
|
||||
.toPromise();
|
||||
);
|
||||
const field = response.data[0].fields[0];
|
||||
expect(field.name).toBe('traceID');
|
||||
expect(field.type).toBe(FieldType.string);
|
||||
|
@ -1,4 +1,6 @@
|
||||
import { groupBy } from 'lodash';
|
||||
import { from, lastValueFrom, merge, Observable, of, throwError } from 'rxjs';
|
||||
import { map, mergeMap, toArray } from 'rxjs/operators';
|
||||
import {
|
||||
DataQuery,
|
||||
DataQueryRequest,
|
||||
@ -9,10 +11,9 @@ import {
|
||||
LoadingState,
|
||||
} from '@grafana/data';
|
||||
import { DataSourceWithBackend } from '@grafana/runtime';
|
||||
|
||||
import { TraceToLogsOptions } from 'app/core/components/TraceToLogsSettings';
|
||||
import { getDatasourceSrv } from 'app/features/plugins/datasource_srv';
|
||||
import { from, merge, Observable, of, throwError } from 'rxjs';
|
||||
import { map, mergeMap, toArray } from 'rxjs/operators';
|
||||
import { LokiOptions, LokiQuery } from '../loki/types';
|
||||
import { transformTrace, transformTraceList, transformFromOTLP as transformFromOTEL } from './resultTransformer';
|
||||
import { PrometheusDatasource } from '../prometheus/datasource';
|
||||
@ -119,7 +120,7 @@ export class TempoDatasource extends DataSourceWithBackend<TempoQuery, TempoJson
|
||||
|
||||
async testDatasource(): Promise<any> {
|
||||
// to test Tempo we send a dummy traceID and verify Tempo answers with 'trace not found'
|
||||
const response = await super.query({ targets: [{ query: '0' }] } as any).toPromise();
|
||||
const response = await lastValueFrom(super.query({ targets: [{ query: '0' }] } as any));
|
||||
|
||||
const errorMessage = response.error?.message;
|
||||
if (
|
||||
|
@ -1,6 +1,7 @@
|
||||
import { lastValueFrom, of } from 'rxjs';
|
||||
import { DataSourceInstanceSettings, FieldType } from '@grafana/data';
|
||||
|
||||
import { backendSrv } from 'app/core/services/backend_srv';
|
||||
import { of } from 'rxjs';
|
||||
import { createFetchResponse } from 'test/helpers/createFetchResponse';
|
||||
import { ZipkinDatasource } from './datasource';
|
||||
import mockJson from './mockJsonResponse.json';
|
||||
@ -31,11 +32,11 @@ describe('ZipkinDatasource', () => {
|
||||
it('should handle json file upload', async () => {
|
||||
const ds = new ZipkinDatasource(defaultSettings);
|
||||
ds.uploadedJson = JSON.stringify(mockJson);
|
||||
const response = await ds
|
||||
.query({
|
||||
const response = await lastValueFrom(
|
||||
ds.query({
|
||||
targets: [{ queryType: 'upload', refId: 'A' }],
|
||||
} as any)
|
||||
.toPromise();
|
||||
);
|
||||
const field = response.data[0].fields[0];
|
||||
expect(field.name).toBe('traceID');
|
||||
expect(field.type).toBe(FieldType.string);
|
||||
|
@ -1,3 +1,6 @@
|
||||
import { lastValueFrom, Observable, of } from 'rxjs';
|
||||
import { map } from 'rxjs/operators';
|
||||
import { BackendSrvRequest, FetchResponse, getBackendSrv } from '@grafana/runtime';
|
||||
import {
|
||||
DataQueryRequest,
|
||||
DataQueryResponse,
|
||||
@ -6,9 +9,7 @@ import {
|
||||
FieldType,
|
||||
MutableDataFrame,
|
||||
} from '@grafana/data';
|
||||
import { BackendSrvRequest, FetchResponse, getBackendSrv } from '@grafana/runtime';
|
||||
import { Observable, of } from 'rxjs';
|
||||
import { map } from 'rxjs/operators';
|
||||
|
||||
import { serializeParams } from '../../../core/utils/fetch';
|
||||
import { apiPrefix } from './constants';
|
||||
import { ZipkinQuery, ZipkinSpan } from './types';
|
||||
@ -40,7 +41,7 @@ export class ZipkinDatasource extends DataSourceApi<ZipkinQuery> {
|
||||
}
|
||||
|
||||
async metadataRequest(url: string, params?: Record<string, any>): Promise<any> {
|
||||
const res = await this.request(url, params, { hideFromInspector: true }).toPromise();
|
||||
const res = await lastValueFrom(this.request(url, params, { hideFromInspector: true }));
|
||||
return res.data;
|
||||
}
|
||||
|
||||
|
@ -15,12 +15,12 @@ import { PieChartLegendOptions, PieChartLegendValues, PieChartOptions } from './
|
||||
import { Subscription } from 'rxjs';
|
||||
import {
|
||||
LegendDisplayMode,
|
||||
SeriesVisibilityChangeBehavior,
|
||||
usePanelContext,
|
||||
useTheme2,
|
||||
VizLayout,
|
||||
VizLegend,
|
||||
VizLegendItem,
|
||||
SeriesVisibilityChangeBehavior,
|
||||
} from '@grafana/ui';
|
||||
|
||||
const defaultLegendOptions: PieChartLegendOptions = {
|
||||
@ -140,9 +140,9 @@ function useSliceHighlightState() {
|
||||
setHighlightedTitle(undefined);
|
||||
};
|
||||
|
||||
const subs = new Subscription()
|
||||
.add(eventBus.getStream(DataHoverEvent).subscribe({ next: setHighlightedSlice }))
|
||||
.add(eventBus.getStream(DataHoverClearEvent).subscribe({ next: resetHighlightedSlice }));
|
||||
const subs = new Subscription();
|
||||
subs.add(eventBus.getStream(DataHoverEvent).subscribe({ next: setHighlightedSlice }));
|
||||
subs.add(eventBus.getStream(DataHoverClearEvent).subscribe({ next: resetHighlightedSlice }));
|
||||
|
||||
return () => {
|
||||
subs.unsubscribe();
|
||||
|
@ -28,7 +28,7 @@ if [ ! -d "$REPORT_PATH" ]; then
|
||||
fi
|
||||
fi
|
||||
|
||||
WARNINGS_COUNT="$(find "$REPORT_PATH" -type f -name \*.log -print0 | xargs -0 grep -o "Warning: " | wc -l | xargs)"
|
||||
WARNINGS_COUNT="$(find "$REPORT_PATH" -type f -name \*.log -print0 | xargs -0 grep -o "Warning:.*(ae-\|Warning:.*(tsdoc-" | wc -l | xargs)"
|
||||
WARNINGS_COUNT_LIMIT=1074
|
||||
|
||||
if [ "$WARNINGS_COUNT" -gt $WARNINGS_COUNT_LIMIT ]; then
|
||||
|
151
yarn.lock
151
yarn.lock
@ -92,6 +92,15 @@
|
||||
semver "^6.3.0"
|
||||
source-map "^0.5.0"
|
||||
|
||||
"@babel/generator@^7.10.5":
|
||||
version "7.15.0"
|
||||
resolved "https://registry.yarnpkg.com/@babel/generator/-/generator-7.15.0.tgz#a7d0c172e0d814974bad5aa77ace543b97917f15"
|
||||
integrity sha512-eKl4XdMrbpYvuB505KTta4AV9g+wWzmVBW69tX0H2NwKVKd2YJbKgyK6M8j/rgLbmHOYJn6rUklV677nOyJrEQ==
|
||||
dependencies:
|
||||
"@babel/types" "^7.15.0"
|
||||
jsesc "^2.5.1"
|
||||
source-map "^0.5.0"
|
||||
|
||||
"@babel/generator@^7.12.11", "@babel/generator@^7.12.5", "@babel/generator@^7.13.9", "@babel/generator@^7.14.5", "@babel/generator@^7.4.0":
|
||||
version "7.14.5"
|
||||
resolved "https://registry.yarnpkg.com/@babel/generator/-/generator-7.14.5.tgz#848d7b9f031caca9d0cd0af01b063f226f52d785"
|
||||
@ -181,7 +190,7 @@
|
||||
dependencies:
|
||||
"@babel/types" "^7.14.5"
|
||||
|
||||
"@babel/helper-function-name@^7.0.0", "@babel/helper-function-name@^7.14.5":
|
||||
"@babel/helper-function-name@^7.0.0", "@babel/helper-function-name@^7.10.4", "@babel/helper-function-name@^7.14.5":
|
||||
version "7.14.5"
|
||||
resolved "https://registry.yarnpkg.com/@babel/helper-function-name/-/helper-function-name-7.14.5.tgz#89e2c474972f15d8e233b52ee8c480e2cfcd50c4"
|
||||
integrity sha512-Gjna0AsXWfFvrAuX+VKcN/aNNWonizBj39yGwUzVDVTlMYJMK2Wp6xdpy72mfArFq5uK+NOuexfzZlzI1z9+AQ==
|
||||
@ -282,13 +291,18 @@
|
||||
dependencies:
|
||||
"@babel/types" "^7.14.5"
|
||||
|
||||
"@babel/helper-split-export-declaration@^7.14.5":
|
||||
"@babel/helper-split-export-declaration@^7.10.4", "@babel/helper-split-export-declaration@^7.14.5":
|
||||
version "7.14.5"
|
||||
resolved "https://registry.yarnpkg.com/@babel/helper-split-export-declaration/-/helper-split-export-declaration-7.14.5.tgz#22b23a54ef51c2b7605d851930c1976dd0bc693a"
|
||||
integrity sha512-hprxVPu6e5Kdp2puZUmvOGjaLv9TCe58E/Fl6hRq4YiVQxIcNvuq6uTM2r1mT/oPskuS9CgR+I94sqAYv0NGKA==
|
||||
dependencies:
|
||||
"@babel/types" "^7.14.5"
|
||||
|
||||
"@babel/helper-validator-identifier@^7.10.4", "@babel/helper-validator-identifier@^7.14.9":
|
||||
version "7.14.9"
|
||||
resolved "https://registry.yarnpkg.com/@babel/helper-validator-identifier/-/helper-validator-identifier-7.14.9.tgz#6654d171b2024f6d8ee151bf2509699919131d48"
|
||||
integrity sha512-pQYxPY0UP6IHISRitNe8bsijHex4TWZXi2HwKVsjPiltzlhse2znVcm9Ace510VT1kxIHjGJCZZQBX2gJDbo0g==
|
||||
|
||||
"@babel/helper-validator-identifier@^7.14.5":
|
||||
version "7.14.5"
|
||||
resolved "https://registry.yarnpkg.com/@babel/helper-validator-identifier/-/helper-validator-identifier-7.14.5.tgz#d0f0e277c512e0c938277faa85a3968c9a44c0e8"
|
||||
@ -332,6 +346,16 @@
|
||||
resolved "https://registry.yarnpkg.com/@babel/parser/-/parser-7.14.7.tgz#6099720c8839ca865a2637e6c85852ead0bdb595"
|
||||
integrity sha512-X67Z5y+VBJuHB/RjwECp8kSl5uYi0BvRbNeWqkaJCVh+LiTPl19WBUfG627psSgp9rSf6ojuXghQM3ha6qHHdA==
|
||||
|
||||
"@babel/parser@^7.10.5":
|
||||
version "7.15.3"
|
||||
resolved "https://registry.yarnpkg.com/@babel/parser/-/parser-7.15.3.tgz#3416d9bea748052cfcb63dbcc27368105b1ed862"
|
||||
integrity sha512-O0L6v/HvqbdJawj0iBEfVQMc3/6WP+AeOsovsIgBFyJaG+W2w7eqvZB7puddATmWuARlm1SX7DwxJ/JJUnDpEA==
|
||||
|
||||
"@babel/parser@~7.10.3":
|
||||
version "7.10.5"
|
||||
resolved "https://registry.yarnpkg.com/@babel/parser/-/parser-7.10.5.tgz#e7c6bf5a7deff957cec9f04b551e2762909d826b"
|
||||
integrity sha512-wfryxy4bE1UivvQKSQDU4/X6dr+i8bctjUjj8Zyt3DQy7NtPizJXT8M52nqpNKL+nq2PW8lxk4ZqLj0fD4B4hQ==
|
||||
|
||||
"@babel/plugin-bugfix-v8-spread-parameters-in-optional-chaining@^7.13.12", "@babel/plugin-bugfix-v8-spread-parameters-in-optional-chaining@^7.14.5":
|
||||
version "7.14.5"
|
||||
resolved "https://registry.yarnpkg.com/@babel/plugin-bugfix-v8-spread-parameters-in-optional-chaining/-/plugin-bugfix-v8-spread-parameters-in-optional-chaining-7.14.5.tgz#4b467302e1548ed3b1be43beae2cc9cf45e0bb7e"
|
||||
@ -1211,6 +1235,21 @@
|
||||
debug "^4.1.0"
|
||||
globals "^11.1.0"
|
||||
|
||||
"@babel/traverse@~7.10.3":
|
||||
version "7.10.5"
|
||||
resolved "https://registry.yarnpkg.com/@babel/traverse/-/traverse-7.10.5.tgz#77ce464f5b258be265af618d8fddf0536f20b564"
|
||||
integrity sha512-yc/fyv2gUjPqzTz0WHeRJH2pv7jA9kA7mBX2tXl/x5iOE81uaVPuGPtaYk7wmkx4b67mQ7NqI8rmT2pF47KYKQ==
|
||||
dependencies:
|
||||
"@babel/code-frame" "^7.10.4"
|
||||
"@babel/generator" "^7.10.5"
|
||||
"@babel/helper-function-name" "^7.10.4"
|
||||
"@babel/helper-split-export-declaration" "^7.10.4"
|
||||
"@babel/parser" "^7.10.5"
|
||||
"@babel/types" "^7.10.5"
|
||||
debug "^4.1.0"
|
||||
globals "^11.1.0"
|
||||
lodash "^4.17.19"
|
||||
|
||||
"@babel/types@^7.0.0", "@babel/types@^7.12.11", "@babel/types@^7.12.7", "@babel/types@^7.13.12", "@babel/types@^7.13.14", "@babel/types@^7.14.5", "@babel/types@^7.2.0", "@babel/types@^7.3.0", "@babel/types@^7.3.3", "@babel/types@^7.4.0", "@babel/types@^7.4.4":
|
||||
version "7.14.5"
|
||||
resolved "https://registry.yarnpkg.com/@babel/types/-/types-7.14.5.tgz#3bb997ba829a2104cedb20689c4a5b8121d383ff"
|
||||
@ -1219,6 +1258,23 @@
|
||||
"@babel/helper-validator-identifier" "^7.14.5"
|
||||
to-fast-properties "^2.0.0"
|
||||
|
||||
"@babel/types@^7.10.5", "@babel/types@^7.15.0":
|
||||
version "7.15.0"
|
||||
resolved "https://registry.yarnpkg.com/@babel/types/-/types-7.15.0.tgz#61af11f2286c4e9c69ca8deb5f4375a73c72dcbd"
|
||||
integrity sha512-OBvfqnllOIdX4ojTHpwZbpvz4j3EWyjkZEdmjH0/cgsd6QOdSgU8rLSk6ard/pcW7rlmjdVSX/AWOaORR1uNOQ==
|
||||
dependencies:
|
||||
"@babel/helper-validator-identifier" "^7.14.9"
|
||||
to-fast-properties "^2.0.0"
|
||||
|
||||
"@babel/types@~7.10.3":
|
||||
version "7.10.5"
|
||||
resolved "https://registry.yarnpkg.com/@babel/types/-/types-7.10.5.tgz#d88ae7e2fde86bfbfe851d4d81afa70a997b5d15"
|
||||
integrity sha512-ixV66KWfCI6GKoA/2H9v6bQdbfXEwwpOdQ8cRvb4F+eyvhlaHxWFMQB4+3d9QFJXZsiiiqVrewNV0DFEQpyT4Q==
|
||||
dependencies:
|
||||
"@babel/helper-validator-identifier" "^7.10.4"
|
||||
lodash "^4.17.19"
|
||||
to-fast-properties "^2.0.0"
|
||||
|
||||
"@base2/pretty-print-object@1.0.0":
|
||||
version "1.0.0"
|
||||
resolved "https://registry.yarnpkg.com/@base2/pretty-print-object/-/pretty-print-object-1.0.0.tgz#860ce718b0b73f4009e153541faff2cb6b85d047"
|
||||
@ -5252,10 +5308,10 @@
|
||||
resolved "https://registry.yarnpkg.com/@types/stack-utils/-/stack-utils-2.0.0.tgz#7036640b4e21cc2f259ae826ce843d277dad8cff"
|
||||
integrity sha512-RJJrrySY7A8havqpGObOB4W92QXKJo63/jFLLgpvOtsGUqbQZ9Sbgl35KMm1DjC6j7AvmmU2bIno+3IyEaemaw==
|
||||
|
||||
"@types/stacktrace-js@^0.0.32":
|
||||
version "0.0.32"
|
||||
resolved "https://registry.yarnpkg.com/@types/stacktrace-js/-/stacktrace-js-0.0.32.tgz#d23e4a36a5073d39487fbea8234cc6186862d389"
|
||||
integrity sha512-SdxmlrHfO0BxgbBP9HZWMUo2rima8lwMjPiWm6S0dyKkDa5CseamktFhXg8umu3TPVBkSlX6ZoB5uUDJK89yvg==
|
||||
"@types/stacktrace-js@^0.0.33":
|
||||
version "0.0.33"
|
||||
resolved "https://registry.yarnpkg.com/@types/stacktrace-js/-/stacktrace-js-0.0.33.tgz#9b027370ca161b89798f308af77438802546cb39"
|
||||
integrity sha512-aqJ6QM9QThNL4dHBhwl1f9B0oDqiREkYLn9RldghUKsGeFWWGlCsqsRWxbh+hDvvmptMFqc4aIfFIGz9BBu8Qg==
|
||||
|
||||
"@types/systemjs@^0.20.6":
|
||||
version "0.20.6"
|
||||
@ -6921,6 +6977,15 @@ before-after-hook@^2.0.0:
|
||||
resolved "https://registry.yarnpkg.com/before-after-hook/-/before-after-hook-2.1.0.tgz#b6c03487f44e24200dd30ca5e6a1979c5d2fb635"
|
||||
integrity sha512-IWIbu7pMqyw3EAJHzzHbWa85b6oud/yfKYg5rqB5hNE8CeMi3nX+2C2sj0HswfblST86hpVEOAb9x34NZd6P7A==
|
||||
|
||||
bent@~7.3.6:
|
||||
version "7.3.12"
|
||||
resolved "https://registry.yarnpkg.com/bent/-/bent-7.3.12.tgz#e0a2775d4425e7674c64b78b242af4f49da6b035"
|
||||
integrity sha512-T3yrKnVGB63zRuoco/7Ybl7BwwGZR0lceoVG5XmQyMIH9s19SV5m+a8qam4if0zQuAmOQTyPTPmsQBdAorGK3w==
|
||||
dependencies:
|
||||
bytesish "^0.4.1"
|
||||
caseless "~0.12.0"
|
||||
is-stream "^2.0.0"
|
||||
|
||||
better-opn@^2.1.1:
|
||||
version "2.1.1"
|
||||
resolved "https://registry.yarnpkg.com/better-opn/-/better-opn-2.1.1.tgz#94a55b4695dc79288f31d7d0e5f658320759f7c6"
|
||||
@ -7267,6 +7332,11 @@ bytes@3.1.0:
|
||||
resolved "https://registry.yarnpkg.com/bytes/-/bytes-3.1.0.tgz#f6cf7933a360e0588fa9fde85651cdc7f805d1f6"
|
||||
integrity sha512-zauLjrfCG+xvoyaqLoV8bLVXXNGC4JqlxFCutSDWA6fJrTo2ZuvLYTqZ7aHBLZSMOopbzwv8f+wZcVzfVTI2Dg==
|
||||
|
||||
bytesish@^0.4.1:
|
||||
version "0.4.4"
|
||||
resolved "https://registry.yarnpkg.com/bytesish/-/bytesish-0.4.4.tgz#f3b535a0f1153747427aee27256748cff92347e6"
|
||||
integrity sha512-i4uu6M4zuMUiyfZN4RU2+i9+peJh//pXhd9x1oSe1LBkZ3LEbCoygu8W0bXTukU1Jme2txKuotpCZRaC3FLxcQ==
|
||||
|
||||
cacache@^12.0.0, cacache@^12.0.2, cacache@^12.0.3:
|
||||
version "12.0.4"
|
||||
resolved "https://registry.yarnpkg.com/cacache/-/cacache-12.0.4.tgz#668bcbd105aeb5f1d92fe25570ec9525c8faa40c"
|
||||
@ -7576,6 +7646,14 @@ chalk@^4.0.0, chalk@^4.1.0:
|
||||
ansi-styles "^4.1.0"
|
||||
supports-color "^7.1.0"
|
||||
|
||||
chalk@~4.1.0:
|
||||
version "4.1.2"
|
||||
resolved "https://registry.yarnpkg.com/chalk/-/chalk-4.1.2.tgz#aac4e2b7734a740867aeb16bf02aad556a1e7a01"
|
||||
integrity sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==
|
||||
dependencies:
|
||||
ansi-styles "^4.1.0"
|
||||
supports-color "^7.1.0"
|
||||
|
||||
chance@^1.0.10:
|
||||
version "1.1.4"
|
||||
resolved "https://registry.yarnpkg.com/chance/-/chance-1.1.4.tgz#d8743bf8e40bb05e024c305ca1ff441195eb23db"
|
||||
@ -12028,6 +12106,18 @@ glob@7.1.6, glob@^7.0.0, glob@^7.0.3, glob@^7.1.1, glob@^7.1.2, glob@^7.1.3, glo
|
||||
once "^1.3.0"
|
||||
path-is-absolute "^1.0.0"
|
||||
|
||||
glob@~7.1.6:
|
||||
version "7.1.7"
|
||||
resolved "https://registry.yarnpkg.com/glob/-/glob-7.1.7.tgz#3b193e9233f01d42d0b3f78294bbeeb418f94a90"
|
||||
integrity sha512-OvD9ENzPLbegENnYP5UUfJIirTg4+XwMWGaQfQTY0JenxNvvIKP3U3/tAQSPIu/lHxXYSZmpXlUHeqAIdKzBLQ==
|
||||
dependencies:
|
||||
fs.realpath "^1.0.0"
|
||||
inflight "^1.0.4"
|
||||
inherits "2"
|
||||
minimatch "^3.0.4"
|
||||
once "^1.3.0"
|
||||
path-is-absolute "^1.0.0"
|
||||
|
||||
global-dirs@^0.1.0:
|
||||
version "0.1.1"
|
||||
resolved "https://registry.yarnpkg.com/global-dirs/-/global-dirs-0.1.1.tgz#b319c0dd4607f353f3be9cca4c72fc148c49f445"
|
||||
@ -18655,6 +18745,14 @@ prompts@^2.0.1, prompts@^2.4.0:
|
||||
kleur "^3.0.3"
|
||||
sisteransi "^1.0.5"
|
||||
|
||||
prompts@~2.3.2:
|
||||
version "2.3.2"
|
||||
resolved "https://registry.yarnpkg.com/prompts/-/prompts-2.3.2.tgz#480572d89ecf39566d2bd3fe2c9fccb7c4c0b068"
|
||||
integrity sha512-Q06uKs2CkNYVID0VqwfAl9mipo99zkBv/n2JtWY89Yxa3ZabWSrs0e2KTudKVa3peLUvYXMefDqIleLPVUBZMA==
|
||||
dependencies:
|
||||
kleur "^3.0.3"
|
||||
sisteransi "^1.0.4"
|
||||
|
||||
promzard@^0.3.0:
|
||||
version "0.3.0"
|
||||
resolved "https://registry.yarnpkg.com/promzard/-/promzard-0.3.0.tgz#26a5d6ee8c7dee4cb12208305acfb93ba382a9ee"
|
||||
@ -20608,23 +20706,37 @@ rx-lite@^3.1.2:
|
||||
resolved "https://registry.yarnpkg.com/rx-lite/-/rx-lite-3.1.2.tgz#19ce502ca572665f3b647b10939f97fd1615f102"
|
||||
integrity sha1-Gc5QLKVyZl87ZHsQk5+X/RYV8QI=
|
||||
|
||||
rxjs-spy@^7.5.1:
|
||||
version "7.5.1"
|
||||
resolved "https://registry.yarnpkg.com/rxjs-spy/-/rxjs-spy-7.5.1.tgz#1a9ef50bc8d7dd00d9ecf3c54c00929231eaf319"
|
||||
integrity sha512-dJ9mO4HvW2r16PsU15Qsc0RVkG7pFrfyCNTGx3vrxWje3kIgZ6QjMVnWblQxbniZ32lwLk/2x9+D2O6GhgXV/w==
|
||||
rxjs-report-usage@^1.0.4:
|
||||
version "1.0.5"
|
||||
resolved "https://registry.yarnpkg.com/rxjs-report-usage/-/rxjs-report-usage-1.0.5.tgz#8de220b385f3f65a163e75cfcc2566b1545c9b3d"
|
||||
integrity sha512-jZeg+TTkvP8kAv0tIQj3WOuIhYLi+Ig9mG8DCc+nJHQ1ObJr8IaeNPbJmXDRfHvH3MKQMBzboY4RbQ6jWt6cIg==
|
||||
dependencies:
|
||||
"@babel/parser" "~7.10.3"
|
||||
"@babel/traverse" "~7.10.3"
|
||||
"@babel/types" "~7.10.3"
|
||||
bent "~7.3.6"
|
||||
chalk "~4.1.0"
|
||||
glob "~7.1.6"
|
||||
prompts "~2.3.2"
|
||||
|
||||
rxjs-spy@8.0.0:
|
||||
version "8.0.0"
|
||||
resolved "https://registry.yarnpkg.com/rxjs-spy/-/rxjs-spy-8.0.0.tgz#53f11d3b14d9abbfa437ca7cfa758497f633f04a"
|
||||
integrity sha512-McjTZjTlCacHb2CKGS6vC4Sflb94UEpsEnKZjCydKZY7LQ0ywvnVGkRVwpqSfdna4/XXmgtcNCvhDVJ4P7I92w==
|
||||
dependencies:
|
||||
"@types/circular-json" "^0.4.0"
|
||||
"@types/stacktrace-js" "^0.0.32"
|
||||
"@types/stacktrace-js" "^0.0.33"
|
||||
circular-json "^0.5.0"
|
||||
error-stack-parser "^2.0.1"
|
||||
rxjs-report-usage "^1.0.4"
|
||||
stacktrace-gps "^3.0.2"
|
||||
|
||||
rxjs@6.6.3:
|
||||
version "6.6.3"
|
||||
resolved "https://registry.yarnpkg.com/rxjs/-/rxjs-6.6.3.tgz#8ca84635c4daa900c0d3967a6ee7ac60271ee552"
|
||||
integrity sha512-trsQc+xYYXZ3urjOiJOuCOa5N3jAZ3eiSpQB5hIT8zGlL2QfnHLJ2r7GMkBGuIausdJN1OneaI6gQlsqNHHmZQ==
|
||||
rxjs@7.3.0:
|
||||
version "7.3.0"
|
||||
resolved "https://registry.yarnpkg.com/rxjs/-/rxjs-7.3.0.tgz#39fe4f3461dc1e50be1475b2b85a0a88c1e938c6"
|
||||
integrity sha512-p2yuGIg9S1epc3vrjKf6iVb3RCaAYjYskkO+jHIaV0IjOPlJop4UnodOoFb2xeNwlguqLYvGw1b1McillYb5Gw==
|
||||
dependencies:
|
||||
tslib "^1.9.0"
|
||||
tslib "~2.1.0"
|
||||
|
||||
rxjs@^6.3.3, rxjs@^6.4.0, rxjs@^6.6.7:
|
||||
version "6.6.7"
|
||||
@ -21069,7 +21181,7 @@ sinon@8.1.1:
|
||||
nise "^3.0.1"
|
||||
supports-color "^7.1.0"
|
||||
|
||||
sisteransi@^1.0.5:
|
||||
sisteransi@^1.0.4, sisteransi@^1.0.5:
|
||||
version "1.0.5"
|
||||
resolved "https://registry.yarnpkg.com/sisteransi/-/sisteransi-1.0.5.tgz#134d681297756437cc05ca01370d3a7a571075ed"
|
||||
integrity sha512-bLGGlR1QxBcynn2d5YmDX4MGjlZvy2MRBDRNHLJ8VI6l6+9FUiyTFNJ0IveOSP0bcXgVDPRcfGqA0pjaqUpfVg==
|
||||
@ -22635,6 +22747,11 @@ tslib@^2.1.0:
|
||||
resolved "https://registry.yarnpkg.com/tslib/-/tslib-2.3.0.tgz#803b8cdab3e12ba581a4ca41c8839bbb0dacb09e"
|
||||
integrity sha512-N82ooyxVNm6h1riLCoyS9e3fuJ3AMG2zIZs2Gd1ATcSFjSA23Q0fzjjZeh0jbJvWVDZ0cJT8yaNNaaXHzueNjg==
|
||||
|
||||
tslib@~2.1.0:
|
||||
version "2.1.0"
|
||||
resolved "https://registry.yarnpkg.com/tslib/-/tslib-2.1.0.tgz#da60860f1c2ecaa5703ab7d39bc05b6bf988b97a"
|
||||
integrity sha512-hcVC3wYEziELGGmEEXue7D75zbwIIVUMWAVbHItGPx0ziyXxrOMQx4rQEVEV45Ut/1IotuEvwqPopzIOkDMf0A==
|
||||
|
||||
tsutils@^3.21.0:
|
||||
version "3.21.0"
|
||||
resolved "https://registry.yarnpkg.com/tsutils/-/tsutils-3.21.0.tgz#b48717d394cea6c1e096983eed58e9d61715b623"
|
||||
|
Loading…
Reference in New Issue
Block a user