mirror of
https://github.com/grafana/grafana.git
synced 2025-02-25 18:55:37 -06:00
AzureMonitor: Update Logs workspace() template variable query to return resource URIs (#34445)
* Migrate template variables straight into resource field, Display error if unable to migrate * Update workspace() template variable query function to return a resource URI * update backend to support resource uris in workspace field * don't load resource names for variables * throw error when workspace migration returns no results * update testDatasource to work with either resource URI or workspace GUID * fix tests * cleanup types just a little bit, as a treat
This commit is contained in:
parent
fc04a1ae00
commit
cf94410e59
@ -11,6 +11,7 @@ import (
|
||||
"net/http"
|
||||
"net/url"
|
||||
"path"
|
||||
"regexp"
|
||||
|
||||
"github.com/grafana/grafana-plugin-sdk-go/data"
|
||||
"github.com/grafana/grafana/pkg/api/pluginproxy"
|
||||
@ -65,6 +66,29 @@ func (e *AzureLogAnalyticsDatasource) executeTimeSeriesQuery(ctx context.Context
|
||||
return result, nil
|
||||
}
|
||||
|
||||
func getApiURL(queryJSONModel logJSONQuery) string {
|
||||
// Legacy queries only specify a Workspace GUID, which we need to use the old workspace-centric
|
||||
// API URL for, and newer queries specifying a resource URI should use resource-centric API.
|
||||
// However, legacy workspace queries using a `workspaces()` template variable will be resolved
|
||||
// to a resource URI, so they should use the new resource-centric.
|
||||
azureLogAnalyticsTarget := queryJSONModel.AzureLogAnalytics
|
||||
var resourceOrWorkspace string
|
||||
|
||||
if azureLogAnalyticsTarget.Resource != "" {
|
||||
resourceOrWorkspace = azureLogAnalyticsTarget.Resource
|
||||
} else {
|
||||
resourceOrWorkspace = azureLogAnalyticsTarget.Workspace
|
||||
}
|
||||
|
||||
matchesResourceURI, _ := regexp.MatchString("^/subscriptions/", resourceOrWorkspace)
|
||||
|
||||
if matchesResourceURI {
|
||||
return fmt.Sprintf("v1%s/query", resourceOrWorkspace)
|
||||
} else {
|
||||
return fmt.Sprintf("v1/workspaces/%s/query", resourceOrWorkspace)
|
||||
}
|
||||
}
|
||||
|
||||
func (e *AzureLogAnalyticsDatasource) buildQueries(queries []plugins.DataSubQuery,
|
||||
timeRange plugins.DataTimeRange) ([]*AzureLogAnalyticsQuery, error) {
|
||||
azureLogAnalyticsQueries := []*AzureLogAnalyticsQuery{}
|
||||
@ -89,13 +113,7 @@ func (e *AzureLogAnalyticsDatasource) buildQueries(queries []plugins.DataSubQuer
|
||||
resultFormat = timeSeries
|
||||
}
|
||||
|
||||
// Handle legacy queries without a Resource
|
||||
var apiURL string
|
||||
if azureLogAnalyticsTarget.Resource != "" {
|
||||
apiURL = fmt.Sprintf("v1%s/query", azureLogAnalyticsTarget.Resource)
|
||||
} else {
|
||||
apiURL = fmt.Sprintf("v1/workspaces/%s/query", azureLogAnalyticsTarget.Workspace)
|
||||
}
|
||||
apiURL := getApiURL(queryJSONModel)
|
||||
|
||||
params := url.Values{}
|
||||
rawQuery, err := KqlInterpolate(query, timeRange, azureLogAnalyticsTarget.Query, "TimeGenerated")
|
||||
|
@ -19,6 +19,11 @@ func TestBuildingAzureLogAnalyticsQueries(t *testing.T) {
|
||||
datasource := &AzureLogAnalyticsDatasource{}
|
||||
fromStart := time.Date(2018, 3, 15, 13, 0, 0, 0, time.UTC).In(time.Local)
|
||||
|
||||
timeRange := plugins.DataTimeRange{
|
||||
From: fmt.Sprintf("%v", fromStart.Unix()*1000),
|
||||
To: fmt.Sprintf("%v", fromStart.Add(34*time.Minute).Unix()*1000),
|
||||
}
|
||||
|
||||
tests := []struct {
|
||||
name string
|
||||
queryModel []plugins.DataSubQuery
|
||||
@ -27,11 +32,8 @@ func TestBuildingAzureLogAnalyticsQueries(t *testing.T) {
|
||||
Err require.ErrorAssertionFunc
|
||||
}{
|
||||
{
|
||||
name: "Query with macros should be interpolated",
|
||||
timeRange: plugins.DataTimeRange{
|
||||
From: fmt.Sprintf("%v", fromStart.Unix()*1000),
|
||||
To: fmt.Sprintf("%v", fromStart.Add(34*time.Minute).Unix()*1000),
|
||||
},
|
||||
name: "Query with macros should be interpolated",
|
||||
timeRange: timeRange,
|
||||
queryModel: []plugins.DataSubQuery{
|
||||
{
|
||||
DataSource: &models.DataSource{
|
||||
@ -65,12 +67,11 @@ func TestBuildingAzureLogAnalyticsQueries(t *testing.T) {
|
||||
},
|
||||
},
|
||||
Err: require.NoError,
|
||||
}, {
|
||||
name: "Legacy workspace queries should use workspace query endpoint",
|
||||
timeRange: plugins.DataTimeRange{
|
||||
From: fmt.Sprintf("%v", fromStart.Unix()*1000),
|
||||
To: fmt.Sprintf("%v", fromStart.Add(34*time.Minute).Unix()*1000),
|
||||
},
|
||||
},
|
||||
|
||||
{
|
||||
name: "Legacy queries with a workspace GUID should use workspace-centric url",
|
||||
timeRange: timeRange,
|
||||
queryModel: []plugins.DataSubQuery{
|
||||
{
|
||||
DataSource: &models.DataSource{
|
||||
@ -80,7 +81,7 @@ func TestBuildingAzureLogAnalyticsQueries(t *testing.T) {
|
||||
"queryType": "Azure Log Analytics",
|
||||
"azureLogAnalytics": map[string]interface{}{
|
||||
"workspace": "aaaaaaaa-bbbb-cccc-dddd-eeeeeeeeeeee",
|
||||
"query": "query=Perf | where $__timeFilter() | where $__contains(Computer, 'comp1','comp2') | summarize avg(CounterValue) by bin(TimeGenerated, $__interval), Computer",
|
||||
"query": "query=Perf",
|
||||
"resultFormat": timeSeries,
|
||||
},
|
||||
}),
|
||||
@ -94,13 +95,89 @@ func TestBuildingAzureLogAnalyticsQueries(t *testing.T) {
|
||||
URL: "v1/workspaces/aaaaaaaa-bbbb-cccc-dddd-eeeeeeeeeeee/query",
|
||||
Model: simplejson.NewFromAny(map[string]interface{}{
|
||||
"azureLogAnalytics": map[string]interface{}{
|
||||
"query": "query=Perf | where $__timeFilter() | where $__contains(Computer, 'comp1','comp2') | summarize avg(CounterValue) by bin(TimeGenerated, $__interval), Computer",
|
||||
"resultFormat": timeSeries,
|
||||
"workspace": "aaaaaaaa-bbbb-cccc-dddd-eeeeeeeeeeee",
|
||||
"query": "query=Perf",
|
||||
"resultFormat": timeSeries,
|
||||
},
|
||||
}),
|
||||
Params: url.Values{"query": {"query=Perf | where ['TimeGenerated'] >= datetime('2018-03-15T13:00:00Z') and ['TimeGenerated'] <= datetime('2018-03-15T13:34:00Z') | where ['Computer'] in ('comp1','comp2') | summarize avg(CounterValue) by bin(TimeGenerated, 34000ms), Computer"}},
|
||||
Target: "query=query%3DPerf+%7C+where+%5B%27TimeGenerated%27%5D+%3E%3D+datetime%28%272018-03-15T13%3A00%3A00Z%27%29+and+%5B%27TimeGenerated%27%5D+%3C%3D+datetime%28%272018-03-15T13%3A34%3A00Z%27%29+%7C+where+%5B%27Computer%27%5D+in+%28%27comp1%27%2C%27comp2%27%29+%7C+summarize+avg%28CounterValue%29+by+bin%28TimeGenerated%2C+34000ms%29%2C+Computer",
|
||||
Params: url.Values{"query": {"query=Perf"}},
|
||||
Target: "query=query%3DPerf",
|
||||
},
|
||||
},
|
||||
Err: require.NoError,
|
||||
},
|
||||
|
||||
{
|
||||
name: "Legacy workspace queries with a resource URI (from a template variable) should use resource-centric url",
|
||||
timeRange: timeRange,
|
||||
queryModel: []plugins.DataSubQuery{
|
||||
{
|
||||
DataSource: &models.DataSource{
|
||||
JsonData: simplejson.NewFromAny(map[string]interface{}{}),
|
||||
},
|
||||
Model: simplejson.NewFromAny(map[string]interface{}{
|
||||
"queryType": "Azure Log Analytics",
|
||||
"azureLogAnalytics": map[string]interface{}{
|
||||
"workspace": "/subscriptions/aaaaaaaa-bbbb-cccc-dddd-eeeeeeeeeeee/resourceGroups/cloud-datasources/providers/Microsoft.OperationalInsights/workspaces/AppInsightsTestDataWorkspace",
|
||||
"query": "query=Perf",
|
||||
"resultFormat": timeSeries,
|
||||
},
|
||||
}),
|
||||
RefID: "A",
|
||||
},
|
||||
},
|
||||
azureLogAnalyticsQueries: []*AzureLogAnalyticsQuery{
|
||||
{
|
||||
RefID: "A",
|
||||
ResultFormat: timeSeries,
|
||||
URL: "v1/subscriptions/aaaaaaaa-bbbb-cccc-dddd-eeeeeeeeeeee/resourceGroups/cloud-datasources/providers/Microsoft.OperationalInsights/workspaces/AppInsightsTestDataWorkspace/query",
|
||||
Model: simplejson.NewFromAny(map[string]interface{}{
|
||||
"azureLogAnalytics": map[string]interface{}{
|
||||
"workspace": "/subscriptions/aaaaaaaa-bbbb-cccc-dddd-eeeeeeeeeeee/resourceGroups/cloud-datasources/providers/Microsoft.OperationalInsights/workspaces/AppInsightsTestDataWorkspace",
|
||||
"query": "query=Perf",
|
||||
"resultFormat": timeSeries,
|
||||
},
|
||||
}),
|
||||
Params: url.Values{"query": {"query=Perf"}},
|
||||
Target: "query=query%3DPerf",
|
||||
},
|
||||
},
|
||||
Err: require.NoError,
|
||||
},
|
||||
|
||||
{
|
||||
name: "Queries with a Resource should use resource-centric url",
|
||||
timeRange: timeRange,
|
||||
queryModel: []plugins.DataSubQuery{
|
||||
{
|
||||
DataSource: &models.DataSource{
|
||||
JsonData: simplejson.NewFromAny(map[string]interface{}{}),
|
||||
},
|
||||
Model: simplejson.NewFromAny(map[string]interface{}{
|
||||
"queryType": "Azure Log Analytics",
|
||||
"azureLogAnalytics": map[string]interface{}{
|
||||
"resource": "/subscriptions/aaaaaaaa-bbbb-cccc-dddd-eeeeeeeeeeee/resourceGroups/cloud-datasources/providers/Microsoft.OperationalInsights/workspaces/AppInsightsTestDataWorkspace",
|
||||
"query": "query=Perf",
|
||||
"resultFormat": timeSeries,
|
||||
},
|
||||
}),
|
||||
RefID: "A",
|
||||
},
|
||||
},
|
||||
azureLogAnalyticsQueries: []*AzureLogAnalyticsQuery{
|
||||
{
|
||||
RefID: "A",
|
||||
ResultFormat: timeSeries,
|
||||
URL: "v1/subscriptions/aaaaaaaa-bbbb-cccc-dddd-eeeeeeeeeeee/resourceGroups/cloud-datasources/providers/Microsoft.OperationalInsights/workspaces/AppInsightsTestDataWorkspace/query",
|
||||
Model: simplejson.NewFromAny(map[string]interface{}{
|
||||
"azureLogAnalytics": map[string]interface{}{
|
||||
"resource": "/subscriptions/aaaaaaaa-bbbb-cccc-dddd-eeeeeeeeeeee/resourceGroups/cloud-datasources/providers/Microsoft.OperationalInsights/workspaces/AppInsightsTestDataWorkspace",
|
||||
"query": "query=Perf",
|
||||
"resultFormat": timeSeries,
|
||||
},
|
||||
}),
|
||||
Params: url.Values{"query": {"query=Perf"}},
|
||||
Target: "query=query%3DPerf",
|
||||
},
|
||||
},
|
||||
Err: require.NoError,
|
||||
|
@ -14,6 +14,13 @@ jest.mock('@grafana/runtime', () => ({
|
||||
getTemplateSrv: () => templateSrv,
|
||||
}));
|
||||
|
||||
const makeResourceURI = (
|
||||
resourceName: string,
|
||||
resourceGroup = 'test-resource-group',
|
||||
subscriptionID = 'aaaaaaaa-bbbb-cccc-dddd-eeeeeeeeeeee'
|
||||
) =>
|
||||
`/subscriptions/${subscriptionID}/resourceGroups/${resourceGroup}/providers/Microsoft.OperationalInsights/workspaces/${resourceName}`;
|
||||
|
||||
describe('AzureLogAnalyticsDatasource', () => {
|
||||
const datasourceRequestMock = jest.spyOn(backendSrv, 'datasourceRequest');
|
||||
|
||||
@ -53,6 +60,7 @@ describe('AzureLogAnalyticsDatasource', () => {
|
||||
value: [
|
||||
{
|
||||
name: 'aworkspace',
|
||||
id: makeResourceURI('a-workspace'),
|
||||
properties: {
|
||||
source: 'Azure',
|
||||
customerId: 'abc1b44e-3e57-4410-b027-6cc0ae6dee67',
|
||||
@ -72,7 +80,7 @@ describe('AzureLogAnalyticsDatasource', () => {
|
||||
ctx.ds = new AzureMonitorDatasource(ctx.instanceSettings);
|
||||
|
||||
datasourceRequestMock.mockImplementation((options: { url: string }) => {
|
||||
if (options.url.indexOf('Microsoft.OperationalInsights/workspaces') > -1) {
|
||||
if (options.url.indexOf('Microsoft.OperationalInsights/workspaces?api-version') > -1) {
|
||||
workspacesUrl = options.url;
|
||||
return Promise.resolve({ data: workspaceResponse, status: 200 });
|
||||
} else {
|
||||
@ -80,11 +88,11 @@ describe('AzureLogAnalyticsDatasource', () => {
|
||||
return Promise.resolve({ data: tableResponseWithOneColumn, status: 200 });
|
||||
}
|
||||
});
|
||||
|
||||
await ctx.ds.metricFindQuery('workspace("aworkspace").AzureActivity | distinct Category');
|
||||
});
|
||||
|
||||
it('should use the loganalyticsazure plugin route', () => {
|
||||
it('should use the loganalyticsazure plugin route', async () => {
|
||||
await ctx.ds.metricFindQuery('workspace("aworkspace").AzureActivity | distinct Category');
|
||||
|
||||
expect(workspacesUrl).toContain('workspacesloganalytics');
|
||||
expect(azureLogAnalyticsUrl).toContain('loganalyticsazure');
|
||||
});
|
||||
@ -168,12 +176,14 @@ describe('AzureLogAnalyticsDatasource', () => {
|
||||
value: [
|
||||
{
|
||||
name: 'workspace1',
|
||||
id: makeResourceURI('workspace-1'),
|
||||
properties: {
|
||||
customerId: 'eeee4fde-1aaa-4d60-9974-eeee562ffaa1',
|
||||
},
|
||||
},
|
||||
{
|
||||
name: 'workspace2',
|
||||
id: makeResourceURI('workspace-2'),
|
||||
properties: {
|
||||
customerId: 'eeee4fde-1aaa-4d60-9974-eeee562ffaa2',
|
||||
},
|
||||
@ -192,11 +202,10 @@ describe('AzureLogAnalyticsDatasource', () => {
|
||||
});
|
||||
|
||||
it('should return a list of workspaces', () => {
|
||||
expect(queryResults.length).toBe(2);
|
||||
expect(queryResults[0].text).toBe('workspace1');
|
||||
expect(queryResults[0].value).toBe('eeee4fde-1aaa-4d60-9974-eeee562ffaa1');
|
||||
expect(queryResults[1].text).toBe('workspace2');
|
||||
expect(queryResults[1].value).toBe('eeee4fde-1aaa-4d60-9974-eeee562ffaa2');
|
||||
expect(queryResults).toEqual([
|
||||
{ text: 'workspace1', value: makeResourceURI('workspace-1') },
|
||||
{ text: 'workspace2', value: makeResourceURI('workspace-2') },
|
||||
]);
|
||||
});
|
||||
});
|
||||
|
||||
@ -211,11 +220,10 @@ describe('AzureLogAnalyticsDatasource', () => {
|
||||
});
|
||||
|
||||
it('should return a list of workspaces', () => {
|
||||
expect(queryResults.length).toBe(2);
|
||||
expect(queryResults[0].text).toBe('workspace1');
|
||||
expect(queryResults[0].value).toBe('eeee4fde-1aaa-4d60-9974-eeee562ffaa1');
|
||||
expect(queryResults[1].text).toBe('workspace2');
|
||||
expect(queryResults[1].value).toBe('eeee4fde-1aaa-4d60-9974-eeee562ffaa2');
|
||||
expect(queryResults).toEqual([
|
||||
{ text: 'workspace1', value: makeResourceURI('workspace-1') },
|
||||
{ text: 'workspace2', value: makeResourceURI('workspace-2') },
|
||||
]);
|
||||
});
|
||||
});
|
||||
|
||||
@ -230,11 +238,10 @@ describe('AzureLogAnalyticsDatasource', () => {
|
||||
});
|
||||
|
||||
it('should return a list of workspaces', () => {
|
||||
expect(queryResults.length).toBe(2);
|
||||
expect(queryResults[0].text).toBe('workspace1');
|
||||
expect(queryResults[0].value).toBe('eeee4fde-1aaa-4d60-9974-eeee562ffaa1');
|
||||
expect(queryResults[1].text).toBe('workspace2');
|
||||
expect(queryResults[1].value).toBe('eeee4fde-1aaa-4d60-9974-eeee562ffaa2');
|
||||
expect(queryResults).toEqual([
|
||||
{ text: 'workspace1', value: makeResourceURI('workspace-1') },
|
||||
{ text: 'workspace2', value: makeResourceURI('workspace-2') },
|
||||
]);
|
||||
});
|
||||
});
|
||||
|
||||
@ -258,6 +265,7 @@ describe('AzureLogAnalyticsDatasource', () => {
|
||||
value: [
|
||||
{
|
||||
name: 'aworkspace',
|
||||
id: makeResourceURI('a-workspace'),
|
||||
properties: {
|
||||
source: 'Azure',
|
||||
customerId: 'abc1b44e-3e57-4410-b027-6cc0ae6dee67',
|
||||
@ -268,22 +276,22 @@ describe('AzureLogAnalyticsDatasource', () => {
|
||||
|
||||
beforeEach(async () => {
|
||||
datasourceRequestMock.mockImplementation((options: { url: string }) => {
|
||||
if (options.url.indexOf('Microsoft.OperationalInsights/workspaces') > -1) {
|
||||
if (options.url.indexOf('OperationalInsights/workspaces?api-version=') > -1) {
|
||||
return Promise.resolve({ data: workspaceResponse, status: 200 });
|
||||
} else {
|
||||
return Promise.resolve({ data: tableResponseWithOneColumn, status: 200 });
|
||||
}
|
||||
});
|
||||
|
||||
queryResults = await ctx.ds.metricFindQuery('workspace("aworkspace").AzureActivity | distinct Category');
|
||||
});
|
||||
|
||||
it('should return a list of categories in the correct format', () => {
|
||||
expect(queryResults.length).toBe(2);
|
||||
expect(queryResults[0].text).toBe('Administrative');
|
||||
expect(queryResults[0].value).toBe('Administrative');
|
||||
expect(queryResults[1].text).toBe('Policy');
|
||||
expect(queryResults[1].value).toBe('Policy');
|
||||
it('should return a list of categories in the correct format', async () => {
|
||||
const results = await ctx.ds.metricFindQuery('workspace("aworkspace").AzureActivity | distinct Category');
|
||||
|
||||
expect(results.length).toBe(2);
|
||||
expect(results[0].text).toBe('Administrative');
|
||||
expect(results[0].value).toBe('Administrative');
|
||||
expect(results[1].text).toBe('Policy');
|
||||
expect(results[1].value).toBe('Policy');
|
||||
});
|
||||
});
|
||||
});
|
||||
@ -319,6 +327,7 @@ describe('AzureLogAnalyticsDatasource', () => {
|
||||
value: [
|
||||
{
|
||||
name: 'aworkspace',
|
||||
id: makeResourceURI('a-workspace'),
|
||||
properties: {
|
||||
source: 'Azure',
|
||||
customerId: 'abc1b44e-3e57-4410-b027-6cc0ae6dee67',
|
||||
|
@ -21,6 +21,13 @@ import { mergeMap } from 'rxjs/operators';
|
||||
import { getAuthType, getAzureCloud } from '../credentials';
|
||||
import { getLogAnalyticsApiRoute, getLogAnalyticsManagementApiRoute } from '../api/routes';
|
||||
import { AzureLogAnalyticsMetadata } from '../types/logAnalyticsMetadata';
|
||||
import { isGUIDish } from '../components/ResourcePicker/utils';
|
||||
|
||||
interface AdhocQuery {
|
||||
datasourceId: number;
|
||||
url: string;
|
||||
resultFormat: string;
|
||||
}
|
||||
|
||||
export default class AzureLogAnalyticsDatasource extends DataSourceWithBackend<
|
||||
AzureMonitorQuery,
|
||||
@ -61,7 +68,7 @@ export default class AzureLogAnalyticsDatasource extends DataSourceWithBackend<
|
||||
|
||||
return (
|
||||
map(response.data.value, (val: any) => {
|
||||
return { text: val.name, value: val.properties.customerId };
|
||||
return { text: val.name, value: val.id };
|
||||
}) || []
|
||||
);
|
||||
}
|
||||
@ -95,17 +102,16 @@ export default class AzureLogAnalyticsDatasource extends DataSourceWithBackend<
|
||||
const item = target.azureLogAnalytics;
|
||||
|
||||
const templateSrv = getTemplateSrv();
|
||||
const resource = templateSrv.replace(item.resource, scopedVars);
|
||||
let workspace = templateSrv.replace(item.workspace, scopedVars);
|
||||
|
||||
if (!workspace && this.defaultOrFirstWorkspace) {
|
||||
if (!workspace && !resource && this.defaultOrFirstWorkspace) {
|
||||
workspace = this.defaultOrFirstWorkspace;
|
||||
}
|
||||
|
||||
const subscriptionId = templateSrv.replace(target.subscription || this.subscriptionId, scopedVars);
|
||||
const query = templateSrv.replace(item.query, scopedVars, this.interpolateVariable);
|
||||
|
||||
const resource = templateSrv.replace(item.resource, scopedVars);
|
||||
|
||||
return {
|
||||
refId: target.refId,
|
||||
format: target.format,
|
||||
@ -208,19 +214,21 @@ export default class AzureLogAnalyticsDatasource extends DataSourceWithBackend<
|
||||
* external interface does not support
|
||||
*/
|
||||
metricFindQueryInternal(query: string): Promise<MetricFindValue[]> {
|
||||
// workspaces() - Get workspaces in the default subscription
|
||||
const workspacesQuery = query.match(/^workspaces\(\)/i);
|
||||
if (workspacesQuery) {
|
||||
return this.getWorkspaces(this.subscriptionId);
|
||||
}
|
||||
|
||||
// workspaces("abc-def-etc") - Get workspaces a specified subscription
|
||||
const workspacesQueryWithSub = query.match(/^workspaces\(["']?([^\)]+?)["']?\)/i);
|
||||
if (workspacesQueryWithSub) {
|
||||
return this.getWorkspaces((workspacesQueryWithSub[1] || '').trim());
|
||||
}
|
||||
|
||||
return this.getDefaultOrFirstWorkspace().then((workspace: any) => {
|
||||
const queries: any[] = this.buildQuery(query, null, workspace);
|
||||
|
||||
// Execute the query as KQL to the default or first workspace
|
||||
return this.getDefaultOrFirstWorkspace().then((resourceURI) => {
|
||||
const queries = this.buildQuery(query, null, resourceURI);
|
||||
const promises = this.doQueries(queries);
|
||||
|
||||
return Promise.all(promises)
|
||||
@ -239,24 +247,32 @@ export default class AzureLogAnalyticsDatasource extends DataSourceWithBackend<
|
||||
} else if (err.error && err.error.data && err.error.data.error) {
|
||||
throw { message: err.error.data.error.message };
|
||||
}
|
||||
|
||||
throw err;
|
||||
});
|
||||
}) as Promise<MetricFindValue[]>;
|
||||
}
|
||||
|
||||
private buildQuery(query: string, options: any, workspace: any) {
|
||||
private buildQuery(query: string, options: any, workspace: string): AdhocQuery[] {
|
||||
const querystringBuilder = new LogAnalyticsQuerystringBuilder(
|
||||
getTemplateSrv().replace(query, {}, this.interpolateVariable),
|
||||
options,
|
||||
'TimeGenerated'
|
||||
);
|
||||
|
||||
const querystring = querystringBuilder.generate().uriString;
|
||||
const url = `${this.baseUrl}/v1/workspaces/${workspace}/query?${querystring}`;
|
||||
const queries: any[] = [];
|
||||
queries.push({
|
||||
datasourceId: this.id,
|
||||
url: url,
|
||||
resultFormat: 'table',
|
||||
});
|
||||
const url = isGUIDish(workspace)
|
||||
? `${this.baseUrl}/v1/workspaces/${workspace}/query?${querystring}`
|
||||
: `${this.baseUrl}/v1/${workspace}/query?${querystring}`;
|
||||
|
||||
const queries = [
|
||||
{
|
||||
datasourceId: this.id,
|
||||
url: url,
|
||||
resultFormat: 'table',
|
||||
},
|
||||
];
|
||||
|
||||
return queries;
|
||||
}
|
||||
|
||||
@ -288,8 +304,9 @@ export default class AzureLogAnalyticsDatasource extends DataSourceWithBackend<
|
||||
return Promise.resolve(this.defaultOrFirstWorkspace);
|
||||
}
|
||||
|
||||
return this.getWorkspaces(this.subscriptionId).then((workspaces: any[]) => {
|
||||
return this.getWorkspaces(this.subscriptionId).then((workspaces) => {
|
||||
this.defaultOrFirstWorkspace = workspaces[0].value;
|
||||
|
||||
return this.defaultOrFirstWorkspace;
|
||||
});
|
||||
}
|
||||
@ -301,8 +318,7 @@ export default class AzureLogAnalyticsDatasource extends DataSourceWithBackend<
|
||||
});
|
||||
}
|
||||
|
||||
const queries: any[] = this.buildQuery(options.annotation.rawQuery, options, options.annotation.workspace);
|
||||
|
||||
const queries = this.buildQuery(options.annotation.rawQuery, options, options.annotation.workspace);
|
||||
const promises = this.doQueries(queries);
|
||||
|
||||
return Promise.all(promises).then((results) => {
|
||||
@ -311,7 +327,7 @@ export default class AzureLogAnalyticsDatasource extends DataSourceWithBackend<
|
||||
});
|
||||
}
|
||||
|
||||
doQueries(queries: any[]) {
|
||||
doQueries(queries: AdhocQuery[]) {
|
||||
return map(queries, (query) => {
|
||||
return this.doRequest(query.url)
|
||||
.then((result: any) => {
|
||||
@ -354,7 +370,7 @@ export default class AzureLogAnalyticsDatasource extends DataSourceWithBackend<
|
||||
}
|
||||
}
|
||||
|
||||
// TODO: update to be resource-centric
|
||||
// TODO: update to be completely resource-centric
|
||||
testDatasource(): Promise<DatasourceValidationResult> {
|
||||
const validationError = this.validateDatasource();
|
||||
if (validationError) {
|
||||
@ -362,8 +378,10 @@ export default class AzureLogAnalyticsDatasource extends DataSourceWithBackend<
|
||||
}
|
||||
|
||||
return this.getDefaultOrFirstWorkspace()
|
||||
.then((ws: any) => {
|
||||
const url = `${this.baseUrl}/v1/workspaces/${ws}/metadata`;
|
||||
.then((resourceOrWorkspace) => {
|
||||
const url = isGUIDish(resourceOrWorkspace)
|
||||
? `${this.baseUrl}/v1/workspaces/${resourceOrWorkspace}/metadata`
|
||||
: `${this.baseUrl}/v1${resourceOrWorkspace}/metadata`;
|
||||
|
||||
return this.doRequest(url);
|
||||
})
|
||||
|
@ -1,7 +1,7 @@
|
||||
import React from 'react';
|
||||
import { AzureMonitorErrorish, AzureMonitorOption, AzureMonitorQuery } from '../../types';
|
||||
import Datasource from '../../datasource';
|
||||
import { InlineFieldRow } from '@grafana/ui';
|
||||
import { Alert, InlineFieldRow } from '@grafana/ui';
|
||||
import QueryField from './QueryField';
|
||||
import FormatAsField from './FormatAsField';
|
||||
import ResourceField from './ResourceField';
|
||||
@ -24,7 +24,7 @@ const LogsQueryEditor: React.FC<LogsQueryEditorProps> = ({
|
||||
onChange,
|
||||
setError,
|
||||
}) => {
|
||||
useMigrations(datasource, query, onChange);
|
||||
const migrationError = useMigrations(datasource, query, onChange);
|
||||
|
||||
return (
|
||||
<div data-testid="azure-monitor-logs-query-editor">
|
||||
@ -56,6 +56,8 @@ const LogsQueryEditor: React.FC<LogsQueryEditorProps> = ({
|
||||
onQueryChange={onChange}
|
||||
setError={setError}
|
||||
/>
|
||||
|
||||
{migrationError && <Alert title={migrationError.title}>{migrationError.message}</Alert>}
|
||||
</div>
|
||||
);
|
||||
};
|
||||
|
@ -28,7 +28,7 @@ const ResourceField: React.FC<AzureQueryEditorFieldProps> = ({ query, datasource
|
||||
const [pickerIsOpen, setPickerIsOpen] = useState(false);
|
||||
|
||||
useEffect(() => {
|
||||
if (resource) {
|
||||
if (resource && parseResourceDetails(resource)) {
|
||||
datasource.resourcePickerData.getResource(resource).then(setResourceComponents);
|
||||
} else {
|
||||
setResourceComponents(undefined);
|
||||
|
@ -1,6 +1,7 @@
|
||||
import { useEffect } from 'react';
|
||||
import { useEffect, useState } from 'react';
|
||||
import { AzureMonitorQuery } from '../../types';
|
||||
import Datasource from '../../datasource';
|
||||
import { isGUIDish } from '../ResourcePicker/utils';
|
||||
|
||||
async function migrateWorkspaceQueryToResourceQuery(
|
||||
datasource: Datasource,
|
||||
@ -8,15 +9,21 @@ async function migrateWorkspaceQueryToResourceQuery(
|
||||
onChange: (newQuery: AzureMonitorQuery) => void
|
||||
) {
|
||||
if (query.azureLogAnalytics.workspace !== undefined && !query.azureLogAnalytics.resource) {
|
||||
const resourceURI = await datasource.resourcePickerData.getResourceURIFromWorkspace(
|
||||
query.azureLogAnalytics.workspace
|
||||
);
|
||||
const isWorkspaceGUID = isGUIDish(query.azureLogAnalytics.workspace);
|
||||
let resource: string;
|
||||
|
||||
if (isWorkspaceGUID) {
|
||||
resource = await datasource.resourcePickerData.getResourceURIFromWorkspace(query.azureLogAnalytics.workspace);
|
||||
} else {
|
||||
// The value of workspace is probably a template variable so we just migrate it over as-is
|
||||
resource = query.azureLogAnalytics.workspace;
|
||||
}
|
||||
|
||||
const newQuery = {
|
||||
...query,
|
||||
azureLogAnalytics: {
|
||||
...query.azureLogAnalytics,
|
||||
resource: resourceURI,
|
||||
resource: resource,
|
||||
workspace: undefined,
|
||||
},
|
||||
};
|
||||
@ -27,12 +34,26 @@ async function migrateWorkspaceQueryToResourceQuery(
|
||||
}
|
||||
}
|
||||
|
||||
interface ErrorMessage {
|
||||
title: string;
|
||||
message: string;
|
||||
}
|
||||
|
||||
export default function useMigrations(
|
||||
datasource: Datasource,
|
||||
query: AzureMonitorQuery,
|
||||
onChange: (newQuery: AzureMonitorQuery) => void
|
||||
) {
|
||||
const [migrationError, setMigrationError] = useState<ErrorMessage>();
|
||||
|
||||
useEffect(() => {
|
||||
migrateWorkspaceQueryToResourceQuery(datasource, query, onChange);
|
||||
migrateWorkspaceQueryToResourceQuery(datasource, query, onChange).catch((err) =>
|
||||
setMigrationError({
|
||||
title: 'Unable to migrate workspace as a resource',
|
||||
message: err.message,
|
||||
})
|
||||
);
|
||||
}, [datasource, query, onChange]);
|
||||
|
||||
return migrationError;
|
||||
}
|
||||
|
@ -10,3 +10,7 @@ export function parseResourceURI(resourceURI: string) {
|
||||
const { subscriptionID, resourceGroup, resource } = matches.groups;
|
||||
return { subscriptionID, resourceGroup, resource };
|
||||
}
|
||||
|
||||
export function isGUIDish(input: string) {
|
||||
return !!input.match(/^[A-Z0-9]+/i);
|
||||
}
|
||||
|
@ -107,6 +107,10 @@ export default class ResourcePickerData {
|
||||
throw new Error('unable to fetch resource containers');
|
||||
}
|
||||
|
||||
if (!response.data.length) {
|
||||
throw new Error('unable to find resource for workspace ' + workspace);
|
||||
}
|
||||
|
||||
return response.data[0].id;
|
||||
}
|
||||
|
||||
|
Loading…
Reference in New Issue
Block a user