diff --git a/CHANGELOG.md b/CHANGELOG.md index 9db747d6ed5..a82fc7050b4 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,7 +1,10 @@ # 6.0.0-beta2 (unreleased) +### New Features +* **AzureMonitor**: Enable alerting by converting Azure Monitor API to Go [#14623](https://github.com/grafana/grafana/issues/14623) + ### Minor -* **Pushover**: Adds support for images in pushover notifier [#10780](https://github.com/grafana/grafana/issues/10780), thx [@jpenalbae](https://github.com/jpenalbae) +* **Alerting**: Adds support for images in pushover notifier [#10780](https://github.com/grafana/grafana/issues/10780), thx [@jpenalbae](https://github.com/jpenalbae) * **Graphite/InfluxDB/OpenTSDB**: Fix always take dashboard timezone into consideration when handle custom time ranges [#15284](https://github.com/grafana/grafana/issues/15284) * **Stackdriver**: Template variables in filters using globbing format [#15182](https://github.com/grafana/grafana/issues/15182) * **Cloudwatch**: Add `resource_arns` template variable query function [#8207](https://github.com/grafana/grafana/issues/8207), thx [@jeroenvollenbrock](https://github.com/jeroenvollenbrock) @@ -13,11 +16,18 @@ * **Annotations**: Support PATCH verb in annotations http api [#12546](https://github.com/grafana/grafana/issues/12546), thx [@SamuelToh](https://github.com/SamuelToh) * **Templating**: Add json formatting to variable interpolation [#15291](https://github.com/grafana/grafana/issues/15291), thx [@mtanda](https://github.com/mtanda) * **Login**: Anonymous usage stats for token auth [#15288](https://github.com/grafana/grafana/issues/15288) -* **Alerting**: Fixes crash bug when alert notifier folders are missing [#15295](https://github.com/grafana/grafana/issues/15295) +* **AzureMonitor**: improve autocomplete for Log Analytics and App Insights editor [#15131](https://github.com/grafana/grafana/issues/15131) +* **LDAP**: Fix IPA/FreeIPA v4.6.4 does not allow LDAP searches with empty attributes [#14432](https://github.com/grafana/grafana/issues/14432) ### 6.0.0-beta1 fixes * **Postgres**: Fix default port not added when port not configured [#15189](https://github.com/grafana/grafana/issues/15189) +* **Alerting**: Fixes crash bug when alert notifier folders are missing [#15295](https://github.com/grafana/grafana/issues/15295) +* **Dashboard**: Fix save provisioned dashboard modal [#15219](https://github.com/grafana/grafana/pull/15219) +* **Dashboard**: Fix having a long query in prometheus dashboard query editor blocks 30% of the query field when on OSX and having native scrollbars [#15122](https://github.com/grafana/grafana/issues/15122) +* **Explore**: Fix issue with wrapping on long queries [#15222](https://github.com/grafana/grafana/issues/15222) +* **Explore**: Fix cut & paste adds newline before and after selection [#15223](https://github.com/grafana/grafana/issues/15223) +* **Dataproxy**: Fix global datasource proxy timeout not added to correct http client [#15258](https://github.com/grafana/grafana/issues/15258) [#5699](https://github.com/grafana/grafana/issues/5699) # 6.0.0-beta1 (2019-01-30) diff --git a/package.json b/package.json index fae51a1d856..2f44291a86a 100644 --- a/package.json +++ b/package.json @@ -68,7 +68,7 @@ "husky": "^0.14.3", "jest": "^23.6.0", "jest-date-mock": "^1.0.6", - "lint-staged": "^6.0.0", + "lint-staged": "^8.1.3", "load-grunt-tasks": "3.5.2", "mini-css-extract-plugin": "^0.4.0", "mocha": "^4.0.1", diff --git a/pkg/cmd/grafana-server/main.go b/pkg/cmd/grafana-server/main.go index 3bdaf0cc80e..d371d4e91da 100644 --- a/pkg/cmd/grafana-server/main.go +++ b/pkg/cmd/grafana-server/main.go @@ -19,6 +19,7 @@ import ( _ "github.com/grafana/grafana/pkg/services/alerting/conditions" _ "github.com/grafana/grafana/pkg/services/alerting/notifiers" "github.com/grafana/grafana/pkg/setting" + _ "github.com/grafana/grafana/pkg/tsdb/azuremonitor" _ "github.com/grafana/grafana/pkg/tsdb/cloudwatch" _ "github.com/grafana/grafana/pkg/tsdb/elasticsearch" _ "github.com/grafana/grafana/pkg/tsdb/graphite" diff --git a/pkg/models/datasource.go b/pkg/models/datasource.go index e1cb185d92a..22c53dfa0dd 100644 --- a/pkg/models/datasource.go +++ b/pkg/models/datasource.go @@ -23,7 +23,7 @@ const ( DS_ACCESS_DIRECT = "direct" DS_ACCESS_PROXY = "proxy" DS_STACKDRIVER = "stackdriver" - DS_AZURE_MONITOR = "azure-monitor" + DS_AZURE_MONITOR = "grafana-azure-monitor-datasource" ) var ( diff --git a/pkg/tsdb/azuremonitor/azuremonitor-datasource.go b/pkg/tsdb/azuremonitor/azuremonitor-datasource.go new file mode 100644 index 00000000000..cae8d8bfb73 --- /dev/null +++ b/pkg/tsdb/azuremonitor/azuremonitor-datasource.go @@ -0,0 +1,312 @@ +package azuremonitor + +import ( + "context" + "encoding/json" + "errors" + "fmt" + "io/ioutil" + "net/http" + "net/url" + "path" + "strings" + "time" + + "github.com/grafana/grafana/pkg/api/pluginproxy" + "github.com/grafana/grafana/pkg/models" + "github.com/grafana/grafana/pkg/plugins" + "github.com/grafana/grafana/pkg/setting" + opentracing "github.com/opentracing/opentracing-go" + "golang.org/x/net/context/ctxhttp" + + "github.com/grafana/grafana/pkg/components/null" + "github.com/grafana/grafana/pkg/components/simplejson" + "github.com/grafana/grafana/pkg/tsdb" +) + +// AzureMonitorDatasource calls the Azure Monitor API - one of the four API's supported +type AzureMonitorDatasource struct { + httpClient *http.Client + dsInfo *models.DataSource +} + +var ( + // 1m, 5m, 15m, 30m, 1h, 6h, 12h, 1d in milliseconds + allowedIntervalsMS = []int64{60000, 300000, 900000, 1800000, 3600000, 21600000, 43200000, 86400000} +) + +// executeTimeSeriesQuery does the following: +// 1. build the AzureMonitor url and querystring for each query +// 2. executes each query by calling the Azure Monitor API +// 3. parses the responses for each query into the timeseries format +func (e *AzureMonitorDatasource) executeTimeSeriesQuery(ctx context.Context, originalQueries []*tsdb.Query, timeRange *tsdb.TimeRange) (*tsdb.Response, error) { + result := &tsdb.Response{ + Results: map[string]*tsdb.QueryResult{}, + } + + queries, err := e.buildQueries(originalQueries, timeRange) + if err != nil { + return nil, err + } + + for _, query := range queries { + queryRes, resp, err := e.executeQuery(ctx, query, originalQueries, timeRange) + if err != nil { + return nil, err + } + // azlog.Debug("AzureMonitor", "Response", resp) + + err = e.parseResponse(queryRes, resp, query) + if err != nil { + queryRes.Error = err + } + result.Results[query.RefID] = queryRes + } + + return result, nil +} + +func (e *AzureMonitorDatasource) buildQueries(queries []*tsdb.Query, timeRange *tsdb.TimeRange) ([]*AzureMonitorQuery, error) { + azureMonitorQueries := []*AzureMonitorQuery{} + startTime, err := timeRange.ParseFrom() + if err != nil { + return nil, err + } + + endTime, err := timeRange.ParseTo() + if err != nil { + return nil, err + } + + for _, query := range queries { + var target string + + azureMonitorTarget := query.Model.Get("azureMonitor").MustMap() + azlog.Debug("AzureMonitor", "target", azureMonitorTarget) + + urlComponents := map[string]string{} + urlComponents["resourceGroup"] = fmt.Sprintf("%v", azureMonitorTarget["resourceGroup"]) + urlComponents["metricDefinition"] = fmt.Sprintf("%v", azureMonitorTarget["metricDefinition"]) + urlComponents["resourceName"] = fmt.Sprintf("%v", azureMonitorTarget["resourceName"]) + + ub := urlBuilder{ + ResourceGroup: urlComponents["resourceGroup"], + MetricDefinition: urlComponents["metricDefinition"], + ResourceName: urlComponents["resourceName"], + } + azureURL := ub.Build() + + alias := fmt.Sprintf("%v", azureMonitorTarget["alias"]) + + timeGrain := fmt.Sprintf("%v", azureMonitorTarget["timeGrain"]) + if timeGrain == "auto" { + autoInterval := e.findClosestAllowedIntervalMS(query.IntervalMs) + tg := &TimeGrain{} + timeGrain, err = tg.createISO8601DurationFromIntervalMS(autoInterval) + if err != nil { + return nil, err + } + } + + params := url.Values{} + params.Add("api-version", "2018-01-01") + params.Add("timespan", fmt.Sprintf("%v/%v", startTime.UTC().Format(time.RFC3339), endTime.UTC().Format(time.RFC3339))) + params.Add("interval", timeGrain) + params.Add("aggregation", fmt.Sprintf("%v", azureMonitorTarget["aggregation"])) + params.Add("metricnames", fmt.Sprintf("%v", azureMonitorTarget["metricName"])) + + dimension := strings.TrimSpace(fmt.Sprintf("%v", azureMonitorTarget["dimension"])) + dimensionFilter := strings.TrimSpace(fmt.Sprintf("%v", azureMonitorTarget["dimensionFilter"])) + if azureMonitorTarget["dimension"] != nil && azureMonitorTarget["dimensionFilter"] != nil && len(dimension) > 0 && len(dimensionFilter) > 0 { + params.Add("$filter", fmt.Sprintf("%s eq '%s'", dimension, dimensionFilter)) + } + + target = params.Encode() + + if setting.Env == setting.DEV { + azlog.Debug("Azuremonitor request", "params", params) + } + + azureMonitorQueries = append(azureMonitorQueries, &AzureMonitorQuery{ + URL: azureURL, + UrlComponents: urlComponents, + Target: target, + Params: params, + RefID: query.RefId, + Alias: alias, + }) + } + + return azureMonitorQueries, nil +} + +func (e *AzureMonitorDatasource) executeQuery(ctx context.Context, query *AzureMonitorQuery, queries []*tsdb.Query, timeRange *tsdb.TimeRange) (*tsdb.QueryResult, AzureMonitorResponse, error) { + queryResult := &tsdb.QueryResult{Meta: simplejson.New(), RefId: query.RefID} + + req, err := e.createRequest(ctx, e.dsInfo) + if err != nil { + queryResult.Error = err + return queryResult, AzureMonitorResponse{}, nil + } + + req.URL.Path = path.Join(req.URL.Path, query.URL) + req.URL.RawQuery = query.Params.Encode() + queryResult.Meta.Set("rawQuery", req.URL.RawQuery) + + span, ctx := opentracing.StartSpanFromContext(ctx, "azuremonitor query") + span.SetTag("target", query.Target) + span.SetTag("from", timeRange.From) + span.SetTag("until", timeRange.To) + span.SetTag("datasource_id", e.dsInfo.Id) + span.SetTag("org_id", e.dsInfo.OrgId) + + defer span.Finish() + + opentracing.GlobalTracer().Inject( + span.Context(), + opentracing.HTTPHeaders, + opentracing.HTTPHeadersCarrier(req.Header)) + + azlog.Debug("AzureMonitor", "Request URL", req.URL.String()) + res, err := ctxhttp.Do(ctx, e.httpClient, req) + if err != nil { + queryResult.Error = err + return queryResult, AzureMonitorResponse{}, nil + } + + data, err := e.unmarshalResponse(res) + if err != nil { + queryResult.Error = err + return queryResult, AzureMonitorResponse{}, nil + } + + return queryResult, data, nil +} + +func (e *AzureMonitorDatasource) createRequest(ctx context.Context, dsInfo *models.DataSource) (*http.Request, error) { + // find plugin + plugin, ok := plugins.DataSources[dsInfo.Type] + if !ok { + return nil, errors.New("Unable to find datasource plugin Azure Monitor") + } + + var azureMonitorRoute *plugins.AppPluginRoute + for _, route := range plugin.Routes { + if route.Path == "azuremonitor" { + azureMonitorRoute = route + break + } + } + + cloudName := dsInfo.JsonData.Get("cloudName").MustString("azuremonitor") + subscriptionID := dsInfo.JsonData.Get("subscriptionId").MustString() + proxyPass := fmt.Sprintf("%s/subscriptions/%s", cloudName, subscriptionID) + + u, _ := url.Parse(dsInfo.Url) + u.Path = path.Join(u.Path, "render") + + req, err := http.NewRequest(http.MethodGet, u.String(), nil) + if err != nil { + azlog.Error("Failed to create request", "error", err) + return nil, fmt.Errorf("Failed to create request. error: %v", err) + } + + req.Header.Set("Content-Type", "application/json") + req.Header.Set("User-Agent", fmt.Sprintf("Grafana/%s", setting.BuildVersion)) + + pluginproxy.ApplyRoute(ctx, req, proxyPass, azureMonitorRoute, dsInfo) + + return req, nil +} + +func (e *AzureMonitorDatasource) unmarshalResponse(res *http.Response) (AzureMonitorResponse, error) { + body, err := ioutil.ReadAll(res.Body) + defer res.Body.Close() + if err != nil { + return AzureMonitorResponse{}, err + } + + if res.StatusCode/100 != 2 { + azlog.Error("Request failed", "status", res.Status, "body", string(body)) + return AzureMonitorResponse{}, fmt.Errorf(string(body)) + } + + var data AzureMonitorResponse + err = json.Unmarshal(body, &data) + if err != nil { + azlog.Error("Failed to unmarshal AzureMonitor response", "error", err, "status", res.Status, "body", string(body)) + return AzureMonitorResponse{}, err + } + + return data, nil +} + +func (e *AzureMonitorDatasource) parseResponse(queryRes *tsdb.QueryResult, data AzureMonitorResponse, query *AzureMonitorQuery) error { + if len(data.Value) == 0 { + return nil + } + + for _, series := range data.Value[0].Timeseries { + points := []tsdb.TimePoint{} + + metadataName := "" + metadataValue := "" + if len(series.Metadatavalues) > 0 { + metadataName = series.Metadatavalues[0].Name.LocalizedValue + metadataValue = series.Metadatavalues[0].Value + } + defaultMetricName := formatLegendKey(query.UrlComponents["resourceName"], data.Value[0].Name.LocalizedValue, metadataName, metadataValue) + + for _, point := range series.Data { + var value float64 + switch query.Params.Get("aggregation") { + case "Average": + value = point.Average + case "Total": + value = point.Total + case "Maximum": + value = point.Maximum + case "Minimum": + value = point.Minimum + case "Count": + value = point.Count + default: + value = point.Count + } + points = append(points, tsdb.NewTimePoint(null.FloatFrom(value), float64((point.TimeStamp).Unix())*1000)) + } + + queryRes.Series = append(queryRes.Series, &tsdb.TimeSeries{ + Name: defaultMetricName, + Points: points, + }) + } + + return nil +} + +// findClosestAllowedIntervalMs is used for the auto time grain setting. +// It finds the closest time grain from the list of allowed time grains for Azure Monitor +// using the Grafana interval in milliseconds +func (e *AzureMonitorDatasource) findClosestAllowedIntervalMS(intervalMs int64) int64 { + closest := allowedIntervalsMS[0] + + for i, allowed := range allowedIntervalsMS { + if intervalMs > allowed { + if i+1 < len(allowedIntervalsMS) { + closest = allowedIntervalsMS[i+1] + } else { + closest = allowed + } + } + } + return closest +} + +// formatLegendKey builds the legend key or timeseries name +func formatLegendKey(resourceName string, metricName string, metadataName string, metadataValue string) string { + if len(metadataName) > 0 { + return fmt.Sprintf("%s{%s=%s}.%s", resourceName, metadataName, metadataValue, metricName) + } + return fmt.Sprintf("%s.%s", resourceName, metricName) +} diff --git a/pkg/tsdb/azuremonitor/azuremonitor-datasource_test.go b/pkg/tsdb/azuremonitor/azuremonitor-datasource_test.go new file mode 100644 index 00000000000..b8d1d6cc266 --- /dev/null +++ b/pkg/tsdb/azuremonitor/azuremonitor-datasource_test.go @@ -0,0 +1,264 @@ +package azuremonitor + +import ( + "encoding/json" + "fmt" + "io/ioutil" + "net/url" + "testing" + "time" + + "github.com/grafana/grafana/pkg/components/simplejson" + "github.com/grafana/grafana/pkg/tsdb" + + . "github.com/smartystreets/goconvey/convey" +) + +func TestAzureMonitorDatasource(t *testing.T) { + Convey("AzureMonitorDatasource", t, func() { + datasource := &AzureMonitorDatasource{} + + Convey("Parse queries from frontend and build AzureMonitor API queries", func() { + fromStart := time.Date(2018, 3, 15, 13, 0, 0, 0, time.UTC).In(time.Local) + tsdbQuery := &tsdb.TsdbQuery{ + TimeRange: &tsdb.TimeRange{ + From: fmt.Sprintf("%v", fromStart.Unix()*1000), + To: fmt.Sprintf("%v", fromStart.Add(34*time.Minute).Unix()*1000), + }, + Queries: []*tsdb.Query{ + { + Model: simplejson.NewFromAny(map[string]interface{}{ + "azureMonitor": map[string]interface{}{ + "timeGrain": "PT1M", + "aggregation": "Average", + "resourceGroup": "grafanastaging", + "resourceName": "grafana", + "metricDefinition": "Microsoft.Compute/virtualMachines", + "metricName": "Percentage CPU", + "alias": "testalias", + "queryType": "Azure Monitor", + }, + }), + RefId: "A", + }, + }, + } + Convey("and is a normal query", func() { + queries, err := datasource.buildQueries(tsdbQuery.Queries, tsdbQuery.TimeRange) + So(err, ShouldBeNil) + + So(len(queries), ShouldEqual, 1) + So(queries[0].RefID, ShouldEqual, "A") + So(queries[0].URL, ShouldEqual, "resourceGroups/grafanastaging/providers/Microsoft.Compute/virtualMachines/grafana/providers/microsoft.insights/metrics") + So(queries[0].Target, ShouldEqual, "aggregation=Average&api-version=2018-01-01&interval=PT1M&metricnames=Percentage+CPU×pan=2018-03-15T13%3A00%3A00Z%2F2018-03-15T13%3A34%3A00Z") + So(len(queries[0].Params), ShouldEqual, 5) + So(queries[0].Params["timespan"][0], ShouldEqual, "2018-03-15T13:00:00Z/2018-03-15T13:34:00Z") + So(queries[0].Params["api-version"][0], ShouldEqual, "2018-01-01") + So(queries[0].Params["aggregation"][0], ShouldEqual, "Average") + So(queries[0].Params["metricnames"][0], ShouldEqual, "Percentage CPU") + So(queries[0].Params["interval"][0], ShouldEqual, "PT1M") + So(queries[0].Alias, ShouldEqual, "testalias") + }) + + Convey("and has a dimension filter", func() { + tsdbQuery.Queries[0].Model = simplejson.NewFromAny(map[string]interface{}{ + "azureMonitor": map[string]interface{}{ + "timeGrain": "PT1M", + "aggregation": "Average", + "resourceGroup": "grafanastaging", + "resourceName": "grafana", + "metricDefinition": "Microsoft.Compute/virtualMachines", + "metricName": "Percentage CPU", + "alias": "testalias", + "queryType": "Azure Monitor", + "dimension": "blob", + "dimensionFilter": "*", + }, + }) + + queries, err := datasource.buildQueries(tsdbQuery.Queries, tsdbQuery.TimeRange) + So(err, ShouldBeNil) + + So(queries[0].Target, ShouldEqual, "%24filter=blob+eq+%27%2A%27&aggregation=Average&api-version=2018-01-01&interval=PT1M&metricnames=Percentage+CPU×pan=2018-03-15T13%3A00%3A00Z%2F2018-03-15T13%3A34%3A00Z") + + }) + }) + + Convey("Parse AzureMonitor API response in the time series format", func() { + Convey("when data from query aggregated as average to one time series", func() { + data, err := loadTestFile("./test-data/1-azure-monitor-response-avg.json") + So(err, ShouldBeNil) + So(data.Interval, ShouldEqual, "PT1M") + + res := &tsdb.QueryResult{Meta: simplejson.New(), RefId: "A"} + query := &AzureMonitorQuery{ + UrlComponents: map[string]string{ + "resourceName": "grafana", + }, + Params: url.Values{ + "aggregation": {"Average"}, + }, + } + err = datasource.parseResponse(res, data, query) + So(err, ShouldBeNil) + + So(len(res.Series), ShouldEqual, 1) + So(res.Series[0].Name, ShouldEqual, "grafana.Percentage CPU") + So(len(res.Series[0].Points), ShouldEqual, 5) + + So(res.Series[0].Points[0][0].Float64, ShouldEqual, 2.0875) + So(res.Series[0].Points[0][1].Float64, ShouldEqual, 1549620780000) + + So(res.Series[0].Points[1][0].Float64, ShouldEqual, 2.1525) + So(res.Series[0].Points[1][1].Float64, ShouldEqual, 1549620840000) + + So(res.Series[0].Points[2][0].Float64, ShouldEqual, 2.155) + So(res.Series[0].Points[2][1].Float64, ShouldEqual, 1549620900000) + + So(res.Series[0].Points[3][0].Float64, ShouldEqual, 3.6925) + So(res.Series[0].Points[3][1].Float64, ShouldEqual, 1549620960000) + + So(res.Series[0].Points[4][0].Float64, ShouldEqual, 2.44) + So(res.Series[0].Points[4][1].Float64, ShouldEqual, 1549621020000) + }) + + Convey("when data from query aggregated as total to one time series", func() { + data, err := loadTestFile("./test-data/2-azure-monitor-response-total.json") + So(err, ShouldBeNil) + + res := &tsdb.QueryResult{Meta: simplejson.New(), RefId: "A"} + query := &AzureMonitorQuery{ + UrlComponents: map[string]string{ + "resourceName": "grafana", + }, + Params: url.Values{ + "aggregation": {"Total"}, + }, + } + err = datasource.parseResponse(res, data, query) + So(err, ShouldBeNil) + + So(res.Series[0].Points[0][0].Float64, ShouldEqual, 8.26) + So(res.Series[0].Points[0][1].Float64, ShouldEqual, 1549718940000) + }) + + Convey("when data from query aggregated as maximum to one time series", func() { + data, err := loadTestFile("./test-data/3-azure-monitor-response-maximum.json") + So(err, ShouldBeNil) + + res := &tsdb.QueryResult{Meta: simplejson.New(), RefId: "A"} + query := &AzureMonitorQuery{ + UrlComponents: map[string]string{ + "resourceName": "grafana", + }, + Params: url.Values{ + "aggregation": {"Maximum"}, + }, + } + err = datasource.parseResponse(res, data, query) + So(err, ShouldBeNil) + + So(res.Series[0].Points[0][0].Float64, ShouldEqual, 3.07) + So(res.Series[0].Points[0][1].Float64, ShouldEqual, 1549722360000) + }) + + Convey("when data from query aggregated as minimum to one time series", func() { + data, err := loadTestFile("./test-data/4-azure-monitor-response-minimum.json") + So(err, ShouldBeNil) + + res := &tsdb.QueryResult{Meta: simplejson.New(), RefId: "A"} + query := &AzureMonitorQuery{ + UrlComponents: map[string]string{ + "resourceName": "grafana", + }, + Params: url.Values{ + "aggregation": {"Minimum"}, + }, + } + err = datasource.parseResponse(res, data, query) + So(err, ShouldBeNil) + + So(res.Series[0].Points[0][0].Float64, ShouldEqual, 1.51) + So(res.Series[0].Points[0][1].Float64, ShouldEqual, 1549723380000) + }) + + Convey("when data from query aggregated as Count to one time series", func() { + data, err := loadTestFile("./test-data/5-azure-monitor-response-count.json") + So(err, ShouldBeNil) + + res := &tsdb.QueryResult{Meta: simplejson.New(), RefId: "A"} + query := &AzureMonitorQuery{ + UrlComponents: map[string]string{ + "resourceName": "grafana", + }, + Params: url.Values{ + "aggregation": {"Count"}, + }, + } + err = datasource.parseResponse(res, data, query) + So(err, ShouldBeNil) + + So(res.Series[0].Points[0][0].Float64, ShouldEqual, 4) + So(res.Series[0].Points[0][1].Float64, ShouldEqual, 1549723440000) + }) + + Convey("when data from query aggregated as total and has dimension filter", func() { + data, err := loadTestFile("./test-data/6-azure-monitor-response-multi-dimension.json") + So(err, ShouldBeNil) + + res := &tsdb.QueryResult{Meta: simplejson.New(), RefId: "A"} + query := &AzureMonitorQuery{ + UrlComponents: map[string]string{ + "resourceName": "grafana", + }, + Params: url.Values{ + "aggregation": {"Average"}, + }, + } + err = datasource.parseResponse(res, data, query) + So(err, ShouldBeNil) + So(len(res.Series), ShouldEqual, 3) + + So(res.Series[0].Name, ShouldEqual, "grafana{blobtype=PageBlob}.Blob Count") + So(res.Series[0].Points[0][0].Float64, ShouldEqual, 3) + + So(res.Series[1].Name, ShouldEqual, "grafana{blobtype=BlockBlob}.Blob Count") + So(res.Series[1].Points[0][0].Float64, ShouldEqual, 1) + + So(res.Series[2].Name, ShouldEqual, "grafana{blobtype=Azure Data Lake Storage}.Blob Count") + So(res.Series[2].Points[0][0].Float64, ShouldEqual, 0) + }) + }) + + Convey("Find closest allowed interval for auto time grain", func() { + intervals := map[string]int64{ + "3m": 180000, + "5m": 300000, + "10m": 600000, + "15m": 900000, + "1d": 86400000, + "2d": 172800000, + } + + closest := datasource.findClosestAllowedIntervalMS(intervals["3m"]) + So(closest, ShouldEqual, intervals["5m"]) + + closest = datasource.findClosestAllowedIntervalMS(intervals["10m"]) + So(closest, ShouldEqual, intervals["15m"]) + + closest = datasource.findClosestAllowedIntervalMS(intervals["2d"]) + So(closest, ShouldEqual, intervals["1d"]) + }) + }) +} + +func loadTestFile(path string) (AzureMonitorResponse, error) { + var data AzureMonitorResponse + + jsonBody, err := ioutil.ReadFile(path) + if err != nil { + return data, err + } + err = json.Unmarshal(jsonBody, &data) + return data, err +} diff --git a/pkg/tsdb/azuremonitor/azuremonitor.go b/pkg/tsdb/azuremonitor/azuremonitor.go new file mode 100644 index 00000000000..31a42d21a12 --- /dev/null +++ b/pkg/tsdb/azuremonitor/azuremonitor.go @@ -0,0 +1,70 @@ +package azuremonitor + +import ( + "context" + "fmt" + "net/http" + + "github.com/grafana/grafana/pkg/log" + "github.com/grafana/grafana/pkg/models" + "github.com/grafana/grafana/pkg/tsdb" +) + +var ( + azlog log.Logger +) + +// AzureMonitorExecutor executes queries for the Azure Monitor datasource - all four services +type AzureMonitorExecutor struct { + httpClient *http.Client + dsInfo *models.DataSource +} + +// NewAzureMonitorExecutor initializes a http client +func NewAzureMonitorExecutor(dsInfo *models.DataSource) (tsdb.TsdbQueryEndpoint, error) { + httpClient, err := dsInfo.GetHttpClient() + if err != nil { + return nil, err + } + + return &AzureMonitorExecutor{ + httpClient: httpClient, + dsInfo: dsInfo, + }, nil +} + +func init() { + azlog = log.New("tsdb.azuremonitor") + tsdb.RegisterTsdbQueryEndpoint("grafana-azure-monitor-datasource", NewAzureMonitorExecutor) +} + +// Query takes in the frontend queries, parses them into the query format +// expected by chosen Azure Monitor service (Azure Monitor, App Insights etc.) +// executes the queries against the API and parses the response into +// the right format +func (e *AzureMonitorExecutor) Query(ctx context.Context, dsInfo *models.DataSource, tsdbQuery *tsdb.TsdbQuery) (*tsdb.Response, error) { + var result *tsdb.Response + var err error + + var azureMonitorQueries []*tsdb.Query + + for _, query := range tsdbQuery.Queries { + queryType := query.Model.Get("queryType").MustString("") + + switch queryType { + case "Azure Monitor": + azureMonitorQueries = append(azureMonitorQueries, query) + default: + return nil, fmt.Errorf("Alerting not supported for %s", queryType) + } + } + + azDatasource := &AzureMonitorDatasource{ + httpClient: e.httpClient, + dsInfo: e.dsInfo, + } + + result, err = azDatasource.executeTimeSeriesQuery(ctx, azureMonitorQueries, tsdbQuery.TimeRange) + + return result, err +} diff --git a/pkg/tsdb/azuremonitor/test-data/1-azure-monitor-response-avg.json b/pkg/tsdb/azuremonitor/test-data/1-azure-monitor-response-avg.json new file mode 100644 index 00000000000..5fc84f6afa6 --- /dev/null +++ b/pkg/tsdb/azuremonitor/test-data/1-azure-monitor-response-avg.json @@ -0,0 +1,47 @@ +{ + "cost": 0, + "timespan": "2019-02-08T10:13:50Z\/2019-02-08T16:13:50Z", + "interval": "PT1M", + "value": [ + { + "id": "\/subscriptions\/xxx\/resourceGroups\/grafanastaging\/providers\/Microsoft.Compute\/virtualMachines\/grafana\/providers\/Microsoft.Insights\/metrics\/Percentage CPU", + "type": "Microsoft.Insights\/metrics", + "name": { + "value": "Percentage CPU", + "localizedValue": "Percentage CPU" + }, + "unit": "Percent", + "timeseries": [ + { + "metadatavalues": [ + + ], + "data": [ + { + "timeStamp": "2019-02-08T10:13:00Z", + "average": 2.0875 + }, + { + "timeStamp": "2019-02-08T10:14:00Z", + "average": 2.1525 + }, + { + "timeStamp": "2019-02-08T10:15:00Z", + "average": 2.155 + }, + { + "timeStamp": "2019-02-08T10:16:00Z", + "average": 3.6925 + }, + { + "timeStamp": "2019-02-08T10:17:00Z", + "average": 2.44 + } + ] + } + ] + } + ], + "namespace": "Microsoft.Compute\/virtualMachines", + "resourceregion": "westeurope" +} diff --git a/pkg/tsdb/azuremonitor/test-data/2-azure-monitor-response-total.json b/pkg/tsdb/azuremonitor/test-data/2-azure-monitor-response-total.json new file mode 100644 index 00000000000..d0b22f1b02c --- /dev/null +++ b/pkg/tsdb/azuremonitor/test-data/2-azure-monitor-response-total.json @@ -0,0 +1,47 @@ +{ + "cost": 0, + "timespan": "2019-02-09T13:29:41Z\/2019-02-09T19:29:41Z", + "interval": "PT1M", + "value": [ + { + "id": "\/subscriptions\/xxx\/resourceGroups\/grafanastaging\/providers\/Microsoft.Compute\/virtualMachines\/grafana\/providers\/Microsoft.Insights\/metrics\/Percentage CPU", + "type": "Microsoft.Insights\/metrics", + "name": { + "value": "Percentage CPU", + "localizedValue": "Percentage CPU" + }, + "unit": "Percent", + "timeseries": [ + { + "metadatavalues": [ + + ], + "data": [ + { + "timeStamp": "2019-02-09T13:29:00Z", + "total": 8.26 + }, + { + "timeStamp": "2019-02-09T13:30:00Z", + "total": 8.7 + }, + { + "timeStamp": "2019-02-09T13:31:00Z", + "total": 14.82 + }, + { + "timeStamp": "2019-02-09T13:32:00Z", + "total": 10.07 + }, + { + "timeStamp": "2019-02-09T13:33:00Z", + "total": 8.52 + } + ] + } + ] + } + ], + "namespace": "Microsoft.Compute\/virtualMachines", + "resourceregion": "westeurope" +} diff --git a/pkg/tsdb/azuremonitor/test-data/3-azure-monitor-response-maximum.json b/pkg/tsdb/azuremonitor/test-data/3-azure-monitor-response-maximum.json new file mode 100644 index 00000000000..1e46cceb2be --- /dev/null +++ b/pkg/tsdb/azuremonitor/test-data/3-azure-monitor-response-maximum.json @@ -0,0 +1,47 @@ +{ + "cost": 0, + "timespan": "2019-02-09T14:26:12Z\/2019-02-09T20:26:12Z", + "interval": "PT1M", + "value": [ + { + "id": "\/subscriptions\/xxx\/resourceGroups\/grafanastaging\/providers\/Microsoft.Compute\/virtualMachines\/grafana\/providers\/Microsoft.Insights\/metrics\/Percentage CPU", + "type": "Microsoft.Insights\/metrics", + "name": { + "value": "Percentage CPU", + "localizedValue": "Percentage CPU" + }, + "unit": "Percent", + "timeseries": [ + { + "metadatavalues": [ + + ], + "data": [ + { + "timeStamp": "2019-02-09T14:26:00Z", + "maximum": 3.07 + }, + { + "timeStamp": "2019-02-09T14:27:00Z", + "maximum": 2.92 + }, + { + "timeStamp": "2019-02-09T14:28:00Z", + "maximum": 2.87 + }, + { + "timeStamp": "2019-02-09T14:29:00Z", + "maximum": 2.27 + }, + { + "timeStamp": "2019-02-09T14:30:00Z", + "maximum": 2.52 + } + ] + } + ] + } + ], + "namespace": "Microsoft.Compute\/virtualMachines", + "resourceregion": "westeurope" +} diff --git a/pkg/tsdb/azuremonitor/test-data/4-azure-monitor-response-minimum.json b/pkg/tsdb/azuremonitor/test-data/4-azure-monitor-response-minimum.json new file mode 100644 index 00000000000..16310614214 --- /dev/null +++ b/pkg/tsdb/azuremonitor/test-data/4-azure-monitor-response-minimum.json @@ -0,0 +1,47 @@ +{ + "cost": 0, + "timespan": "2019-02-09T14:43:21Z\/2019-02-09T20:43:21Z", + "interval": "PT1M", + "value": [ + { + "id": "\/subscriptions\/xxx\/resourceGroups\/grafanastaging\/providers\/Microsoft.Compute\/virtualMachines\/grafana\/providers\/Microsoft.Insights\/metrics\/Percentage CPU", + "type": "Microsoft.Insights\/metrics", + "name": { + "value": "Percentage CPU", + "localizedValue": "Percentage CPU" + }, + "unit": "Percent", + "timeseries": [ + { + "metadatavalues": [ + + ], + "data": [ + { + "timeStamp": "2019-02-09T14:43:00Z", + "minimum": 1.51 + }, + { + "timeStamp": "2019-02-09T14:44:00Z", + "minimum": 2.38 + }, + { + "timeStamp": "2019-02-09T14:45:00Z", + "minimum": 1.69 + }, + { + "timeStamp": "2019-02-09T14:46:00Z", + "minimum": 2.27 + }, + { + "timeStamp": "2019-02-09T14:47:00Z", + "minimum": 1.96 + } + ] + } + ] + } + ], + "namespace": "Microsoft.Compute\/virtualMachines", + "resourceregion": "westeurope" +} diff --git a/pkg/tsdb/azuremonitor/test-data/5-azure-monitor-response-count.json b/pkg/tsdb/azuremonitor/test-data/5-azure-monitor-response-count.json new file mode 100644 index 00000000000..91afc33f070 --- /dev/null +++ b/pkg/tsdb/azuremonitor/test-data/5-azure-monitor-response-count.json @@ -0,0 +1,47 @@ +{ + "cost": 0, + "timespan": "2019-02-09T14:44:52Z\/2019-02-09T20:44:52Z", + "interval": "PT1M", + "value": [ + { + "id": "\/subscriptions\/xxx\/resourceGroups\/grafanastaging\/providers\/Microsoft.Compute\/virtualMachines\/grafana\/providers\/Microsoft.Insights\/metrics\/Percentage CPU", + "type": "Microsoft.Insights\/metrics", + "name": { + "value": "Percentage CPU", + "localizedValue": "Percentage CPU" + }, + "unit": "Percent", + "timeseries": [ + { + "metadatavalues": [ + + ], + "data": [ + { + "timeStamp": "2019-02-09T14:44:00Z", + "count": 4 + }, + { + "timeStamp": "2019-02-09T14:45:00Z", + "count": 4 + }, + { + "timeStamp": "2019-02-09T14:46:00Z", + "count": 4 + }, + { + "timeStamp": "2019-02-09T14:47:00Z", + "count": 4 + }, + { + "timeStamp": "2019-02-09T14:48:00Z", + "count": 4 + } + ] + } + ] + } + ], + "namespace": "Microsoft.Compute\/virtualMachines", + "resourceregion": "westeurope" +} diff --git a/pkg/tsdb/azuremonitor/test-data/6-azure-monitor-response-multi-dimension.json b/pkg/tsdb/azuremonitor/test-data/6-azure-monitor-response-multi-dimension.json new file mode 100644 index 00000000000..dddcef0e79c --- /dev/null +++ b/pkg/tsdb/azuremonitor/test-data/6-azure-monitor-response-multi-dimension.json @@ -0,0 +1,128 @@ +{ + "cost": 0, + "timespan": "2019-02-09T15:21:39Z\/2019-02-09T21:21:39Z", + "interval": "PT1H", + "value": [ + { + "id": "\/subscriptions\/xxx\/resourceGroups\/grafanastaging\/providers\/Microsoft.Storage\/storageAccounts\/grafanastaging\/blobServices\/default\/providers\/Microsoft.Insights\/metrics\/BlobCount", + "type": "Microsoft.Insights\/metrics", + "name": { + "value": "BlobCount", + "localizedValue": "Blob Count" + }, + "unit": "Count", + "timeseries": [ + { + "metadatavalues": [ + { + "name": { + "value": "blobtype", + "localizedValue": "blobtype" + }, + "value": "PageBlob" + } + ], + "data": [ + { + "timeStamp": "2019-02-09T15:21:00Z", + "average": 3 + }, + { + "timeStamp": "2019-02-09T16:21:00Z", + "average": 3 + }, + { + "timeStamp": "2019-02-09T17:21:00Z", + "average": 3 + }, + { + "timeStamp": "2019-02-09T18:21:00Z", + "average": 3 + }, + { + "timeStamp": "2019-02-09T19:21:00Z", + "average": 3 + }, + { + "timeStamp": "2019-02-09T20:21:00Z" + } + ] + }, + { + "metadatavalues": [ + { + "name": { + "value": "blobtype", + "localizedValue": "blobtype" + }, + "value": "BlockBlob" + } + ], + "data": [ + { + "timeStamp": "2019-02-09T15:21:00Z", + "average": 1 + }, + { + "timeStamp": "2019-02-09T16:21:00Z", + "average": 1 + }, + { + "timeStamp": "2019-02-09T17:21:00Z", + "average": 1 + }, + { + "timeStamp": "2019-02-09T18:21:00Z", + "average": 1 + }, + { + "timeStamp": "2019-02-09T19:21:00Z", + "average": 1 + }, + { + "timeStamp": "2019-02-09T20:21:00Z" + } + ] + }, + { + "metadatavalues": [ + { + "name": { + "value": "blobtype", + "localizedValue": "blobtype" + }, + "value": "Azure Data Lake Storage" + } + ], + "data": [ + { + "timeStamp": "2019-02-09T15:21:00Z", + "average": 0 + }, + { + "timeStamp": "2019-02-09T16:21:00Z", + "average": 0 + }, + { + "timeStamp": "2019-02-09T17:21:00Z", + "average": 0 + }, + { + "timeStamp": "2019-02-09T18:21:00Z", + "average": 0 + }, + { + "timeStamp": "2019-02-09T19:21:00Z", + "average": 0 + }, + { + "timeStamp": "2019-02-09T20:21:00Z" + } + ] + } + ] + } + ], + "namespace": "Microsoft.Storage\/storageAccounts\/blobServices", + "resourceregion": "westeurope" +} diff --git a/pkg/tsdb/azuremonitor/time-grain.go b/pkg/tsdb/azuremonitor/time-grain.go new file mode 100644 index 00000000000..425e39b6208 --- /dev/null +++ b/pkg/tsdb/azuremonitor/time-grain.go @@ -0,0 +1,52 @@ +package azuremonitor + +import ( + "fmt" + "strconv" + "strings" + "time" + + "github.com/grafana/grafana/pkg/tsdb" +) + +// TimeGrain handles convertions between +// the ISO 8601 Duration format (PT1H), Kbn units (1h) and Time Grains (1 hour) +// Also handles using the automatic Grafana interval to calculate a ISO 8601 Duration. +type TimeGrain struct{} + +var ( + smallTimeUnits = []string{"hour", "minute", "h", "m"} +) + +func (tg *TimeGrain) createISO8601DurationFromIntervalMS(interval int64) (string, error) { + formatted := tsdb.FormatDuration(time.Duration(interval) * time.Millisecond) + + if strings.Contains(formatted, "ms") { + return "PT1M", nil + } + + timeValueString := formatted[0 : len(formatted)-1] + timeValue, err := strconv.Atoi(timeValueString) + if err != nil { + return "", fmt.Errorf("Could not parse interval %v to an ISO 8061 duration", interval) + } + + unit := formatted[len(formatted)-1:] + + if unit == "s" && timeValue < 60 { + // minimum interval is 1m for Azure Monitor + return "PT1M", nil + } + + return tg.createISO8601Duration(timeValue, unit), nil +} + +func (tg *TimeGrain) createISO8601Duration(timeValue int, timeUnit string) string { + for _, smallTimeUnit := range smallTimeUnits { + if timeUnit == smallTimeUnit { + return fmt.Sprintf("PT%v%v", timeValue, strings.ToUpper(timeUnit[0:1])) + } + } + + return fmt.Sprintf("P%v%v", timeValue, strings.ToUpper(timeUnit[0:1])) +} diff --git a/pkg/tsdb/azuremonitor/time-grain_test.go b/pkg/tsdb/azuremonitor/time-grain_test.go new file mode 100644 index 00000000000..2df3c92b0ff --- /dev/null +++ b/pkg/tsdb/azuremonitor/time-grain_test.go @@ -0,0 +1,71 @@ +package azuremonitor + +import ( + "testing" + + . "github.com/smartystreets/goconvey/convey" +) + +func TestTimeGrain(t *testing.T) { + Convey("TimeGrain", t, func() { + tgc := &TimeGrain{} + + Convey("create ISO 8601 Duration", func() { + Convey("when given a time unit smaller than a day", func() { + minuteKbnDuration := tgc.createISO8601Duration(1, "m") + hourKbnDuration := tgc.createISO8601Duration(2, "h") + minuteDuration := tgc.createISO8601Duration(1, "minute") + hourDuration := tgc.createISO8601Duration(2, "hour") + + Convey("should convert it to a time duration", func() { + So(minuteKbnDuration, ShouldEqual, "PT1M") + So(hourKbnDuration, ShouldEqual, "PT2H") + + So(minuteDuration, ShouldEqual, "PT1M") + So(hourDuration, ShouldEqual, "PT2H") + }) + }) + + Convey("when given the day time unit", func() { + kbnDuration := tgc.createISO8601Duration(1, "d") + duration := tgc.createISO8601Duration(2, "day") + + Convey("should convert it to a date duration", func() { + So(kbnDuration, ShouldEqual, "P1D") + So(duration, ShouldEqual, "P2D") + }) + }) + }) + + Convey("create ISO 8601 Duration from Grafana interval in milliseconds", func() { + Convey("and interval is less than a minute", func() { + durationMS, err := tgc.createISO8601DurationFromIntervalMS(100) + So(err, ShouldBeNil) + + durationS, err := tgc.createISO8601DurationFromIntervalMS(59999) + So(err, ShouldBeNil) + + Convey("should be rounded up to a minute as is the minimum interval for Azure Monitor", func() { + So(durationMS, ShouldEqual, "PT1M") + So(durationS, ShouldEqual, "PT1M") + }) + }) + + Convey("and interval is more than a minute", func() { + intervals := map[string]int64{ + "10m": 600000, + "2d": 172800000, + } + durationM, err := tgc.createISO8601DurationFromIntervalMS(intervals["10m"]) + So(err, ShouldBeNil) + durationD, err := tgc.createISO8601DurationFromIntervalMS(intervals["2d"]) + So(err, ShouldBeNil) + + Convey("should be rounded up to a minute as is the minimum interval for Azure Monitor", func() { + So(durationM, ShouldEqual, "PT10M") + So(durationD, ShouldEqual, "P2D") + }) + }) + }) + }) +} diff --git a/pkg/tsdb/azuremonitor/types.go b/pkg/tsdb/azuremonitor/types.go new file mode 100644 index 00000000000..b547c71f185 --- /dev/null +++ b/pkg/tsdb/azuremonitor/types.go @@ -0,0 +1,77 @@ +package azuremonitor + +import ( + "net/url" + "time" +) + +// AzureMonitorQuery is the query for all the services as they have similar queries +// with a url, a querystring and an alias field +type AzureMonitorQuery struct { + URL string + UrlComponents map[string]string + Target string + Params url.Values + RefID string + Alias string +} + +// AzureMonitorResponse is the json response from the Azure Monitor API +type AzureMonitorResponse struct { + Cost int `json:"cost"` + Timespan string `json:"timespan"` + Interval string `json:"interval"` + Value []struct { + ID string `json:"id"` + Type string `json:"type"` + Name struct { + Value string `json:"value"` + LocalizedValue string `json:"localizedValue"` + } `json:"name"` + Unit string `json:"unit"` + Timeseries []struct { + Metadatavalues []struct { + Name struct { + Value string `json:"value"` + LocalizedValue string `json:"localizedValue"` + } `json:"name"` + Value string `json:"value"` + } `json:"metadatavalues"` + Data []struct { + TimeStamp time.Time `json:"timeStamp"` + Average float64 `json:"average,omitempty"` + Total float64 `json:"total,omitempty"` + Count float64 `json:"count,omitempty"` + Maximum float64 `json:"maximum,omitempty"` + Minimum float64 `json:"minimum,omitempty"` + } `json:"data"` + } `json:"timeseries"` + } `json:"value"` + Namespace string `json:"namespace"` + Resourceregion string `json:"resourceregion"` +} + +// ApplicationInsightsResponse is the json response from the Application Insights API +type ApplicationInsightsResponse struct { + Tables []struct { + TableName string `json:"TableName"` + Columns []struct { + ColumnName string `json:"ColumnName"` + DataType string `json:"DataType"` + ColumnType string `json:"ColumnType"` + } `json:"Columns"` + Rows [][]interface{} `json:"Rows"` + } `json:"Tables"` +} + +// AzureLogAnalyticsResponse is the json response object from the Azure Log Analytics API. +type AzureLogAnalyticsResponse struct { + Tables []struct { + Name string `json:"name"` + Columns []struct { + Name string `json:"name"` + Type string `json:"type"` + } `json:"columns"` + Rows [][]interface{} `json:"rows"` + } `json:"tables"` +} diff --git a/pkg/tsdb/azuremonitor/url-builder.go b/pkg/tsdb/azuremonitor/url-builder.go new file mode 100644 index 00000000000..c252048f517 --- /dev/null +++ b/pkg/tsdb/azuremonitor/url-builder.go @@ -0,0 +1,28 @@ +package azuremonitor + +import ( + "fmt" + "strings" +) + +// urlBuilder builds the URL for calling the Azure Monitor API +type urlBuilder struct { + ResourceGroup string + MetricDefinition string + ResourceName string +} + +// Build checks the metric definition property to see which form of the url +// should be returned +func (ub *urlBuilder) Build() string { + + if strings.Count(ub.MetricDefinition, "/") > 1 { + rn := strings.Split(ub.ResourceName, "/") + lastIndex := strings.LastIndex(ub.MetricDefinition, "/") + service := ub.MetricDefinition[lastIndex+1:] + md := ub.MetricDefinition[0:lastIndex] + return fmt.Sprintf("resourceGroups/%s/providers/%s/%s/%s/%s/providers/microsoft.insights/metrics", ub.ResourceGroup, md, rn[0], service, rn[1]) + } + + return fmt.Sprintf("resourceGroups/%s/providers/%s/%s/providers/microsoft.insights/metrics", ub.ResourceGroup, ub.MetricDefinition, ub.ResourceName) +} diff --git a/pkg/tsdb/azuremonitor/url-builder_test.go b/pkg/tsdb/azuremonitor/url-builder_test.go new file mode 100644 index 00000000000..85c4f81bc83 --- /dev/null +++ b/pkg/tsdb/azuremonitor/url-builder_test.go @@ -0,0 +1,45 @@ +package azuremonitor + +import ( + "testing" + + . "github.com/smartystreets/goconvey/convey" +) + +func TestURLBuilder(t *testing.T) { + Convey("AzureMonitor URL Builder", t, func() { + + Convey("when metric definition is in the short form", func() { + ub := &urlBuilder{ + ResourceGroup: "rg", + MetricDefinition: "Microsoft.Compute/virtualMachines", + ResourceName: "rn", + } + + url := ub.Build() + So(url, ShouldEqual, "resourceGroups/rg/providers/Microsoft.Compute/virtualMachines/rn/providers/microsoft.insights/metrics") + }) + + Convey("when metric definition is Microsoft.Storage/storageAccounts/blobServices", func() { + ub := &urlBuilder{ + ResourceGroup: "rg", + MetricDefinition: "Microsoft.Storage/storageAccounts/blobServices", + ResourceName: "rn1/default", + } + + url := ub.Build() + So(url, ShouldEqual, "resourceGroups/rg/providers/Microsoft.Storage/storageAccounts/rn1/blobServices/default/providers/microsoft.insights/metrics") + }) + + Convey("when metric definition is Microsoft.Storage/storageAccounts/fileServices", func() { + ub := &urlBuilder{ + ResourceGroup: "rg", + MetricDefinition: "Microsoft.Storage/storageAccounts/fileServices", + ResourceName: "rn1/default", + } + + url := ub.Build() + So(url, ShouldEqual, "resourceGroups/rg/providers/Microsoft.Storage/storageAccounts/rn1/fileServices/default/providers/microsoft.insights/metrics") + }) + }) +} diff --git a/pkg/tsdb/interval.go b/pkg/tsdb/interval.go index fd6adee39d7..7819ef3ecad 100644 --- a/pkg/tsdb/interval.go +++ b/pkg/tsdb/interval.go @@ -59,11 +59,11 @@ func (ic *intervalCalculator) Calculate(timerange *TimeRange, minInterval time.D interval := time.Duration((to - from) / defaultRes) if interval < minInterval { - return Interval{Text: formatDuration(minInterval), Value: minInterval} + return Interval{Text: FormatDuration(minInterval), Value: minInterval} } rounded := roundInterval(interval) - return Interval{Text: formatDuration(rounded), Value: rounded} + return Interval{Text: FormatDuration(rounded), Value: rounded} } func GetIntervalFrom(dsInfo *models.DataSource, queryModel *simplejson.Json, defaultInterval time.Duration) (time.Duration, error) { @@ -89,7 +89,8 @@ func GetIntervalFrom(dsInfo *models.DataSource, queryModel *simplejson.Json, def return parsedInterval, nil } -func formatDuration(inter time.Duration) string { +// FormatDuration converts a duration into the kbn format e.g. 1m 2h or 3d +func FormatDuration(inter time.Duration) string { if inter >= year { return fmt.Sprintf("%dy", inter/year) } diff --git a/pkg/tsdb/interval_test.go b/pkg/tsdb/interval_test.go index 941b08dd554..4cd3fcea532 100644 --- a/pkg/tsdb/interval_test.go +++ b/pkg/tsdb/interval_test.go @@ -51,11 +51,11 @@ func TestInterval(t *testing.T) { }) Convey("Format value", func() { - So(formatDuration(time.Second*61), ShouldEqual, "1m") - So(formatDuration(time.Millisecond*30), ShouldEqual, "30ms") - So(formatDuration(time.Hour*23), ShouldEqual, "23h") - So(formatDuration(time.Hour*24), ShouldEqual, "1d") - So(formatDuration(time.Hour*24*367), ShouldEqual, "1y") + So(FormatDuration(time.Second*61), ShouldEqual, "1m") + So(FormatDuration(time.Millisecond*30), ShouldEqual, "30ms") + So(FormatDuration(time.Hour*23), ShouldEqual, "23h") + So(FormatDuration(time.Hour*24), ShouldEqual, "1d") + So(FormatDuration(time.Hour*24*367), ShouldEqual, "1y") }) }) } diff --git a/public/app/core/services/keybindingSrv.ts b/public/app/core/services/keybindingSrv.ts index 917d1801c0e..7dab7cffd6f 100644 --- a/public/app/core/services/keybindingSrv.ts +++ b/public/app/core/services/keybindingSrv.ts @@ -139,6 +139,10 @@ export class KeybindingSrv { ); } + unbind(keyArg: string, keyType?: string) { + Mousetrap.unbind(keyArg, keyType); + } + showDashEditView() { const search = _.extend(this.$location.search(), { editview: 'settings' }); this.$location.search(search); @@ -291,3 +295,17 @@ export class KeybindingSrv { } coreModule.service('keybindingSrv', KeybindingSrv); + +/** + * Code below exports the service to react components + */ + +let singletonInstance: KeybindingSrv; + +export function setKeybindingSrv(instance: KeybindingSrv) { + singletonInstance = instance; +} + +export function getKeybindingSrv(): KeybindingSrv { + return singletonInstance; +} diff --git a/public/app/features/api-keys/ApiKeysPage.tsx b/public/app/features/api-keys/ApiKeysPage.tsx index 21d1ca54a66..7bed498e2ac 100644 --- a/public/app/features/api-keys/ApiKeysPage.tsx +++ b/public/app/features/api-keys/ApiKeysPage.tsx @@ -107,7 +107,7 @@ export class ApiKeysPage extends PureComponent { renderEmptyList() { const { isAdding } = this.state; return ( -
+ <> {!isAdding && ( { /> )} {this.renderAddApiKeyForm()} -
+ ); } @@ -183,7 +183,7 @@ export class ApiKeysPage extends PureComponent { const { apiKeys, searchQuery } = this.props; return ( -
+ <>
+ ); } diff --git a/public/app/features/api-keys/__snapshots__/ApiKeysPage.test.tsx.snap b/public/app/features/api-keys/__snapshots__/ApiKeysPage.test.tsx.snap index 03f11f79cc3..2deb7fa5e7f 100644 --- a/public/app/features/api-keys/__snapshots__/ApiKeysPage.test.tsx.snap +++ b/public/app/features/api-keys/__snapshots__/ApiKeysPage.test.tsx.snap @@ -35,118 +35,114 @@ exports[`Render should render CTA if there are no API keys 1`] = ` -
- - + +
-
- -
- Add API Key -
-
+ +
+ Add API Key +
+ +
-
- - Key name - - + +
+
+ + Role + + + - - - - - -
-
- -
+ Viewer + + + + +
- -
- -
+
+ +
+
+ +
+
`; diff --git a/public/app/plugins/datasource/grafana-azure-monitor-datasource/editor/slate-plugins/prism/index.tsx b/public/app/features/explore/slate-plugins/prism/index.tsx similarity index 100% rename from public/app/plugins/datasource/grafana-azure-monitor-datasource/editor/slate-plugins/prism/index.tsx rename to public/app/features/explore/slate-plugins/prism/index.tsx diff --git a/public/app/features/org/OrgDetailsPage.tsx b/public/app/features/org/OrgDetailsPage.tsx index ee644f0006f..236558db40a 100644 --- a/public/app/features/org/OrgDetailsPage.tsx +++ b/public/app/features/org/OrgDetailsPage.tsx @@ -36,18 +36,16 @@ export class OrgDetailsPage extends PureComponent { return ( -
- {!isLoading && ( -
- this.onOrgNameChange(name)} - onSubmit={this.onUpdateOrganization} - orgName={organization.name} - /> - -
- )} + {!isLoading && ( +
+ this.onOrgNameChange(name)} + onSubmit={this.onUpdateOrganization} + orgName={organization.name} + /> +
+ )} ); diff --git a/public/app/features/org/__snapshots__/OrgDetailsPage.test.tsx.snap b/public/app/features/org/__snapshots__/OrgDetailsPage.test.tsx.snap index 9e13a73901e..2339975ca8b 100644 --- a/public/app/features/org/__snapshots__/OrgDetailsPage.test.tsx.snap +++ b/public/app/features/org/__snapshots__/OrgDetailsPage.test.tsx.snap @@ -15,11 +15,7 @@ exports[`Render should render component 1`] = ` > -
- + /> `; @@ -39,19 +35,15 @@ exports[`Render should render organization and preferences 1`] = ` -
-
- - -
+
+ +
diff --git a/public/app/features/teams/TeamList.tsx b/public/app/features/teams/TeamList.tsx index 8c4caca197e..4007c58f579 100644 --- a/public/app/features/teams/TeamList.tsx +++ b/public/app/features/teams/TeamList.tsx @@ -86,7 +86,7 @@ export class TeamList extends PureComponent { const { teams, searchQuery } = this.props; return ( -
+ <>
-
+ ); } diff --git a/public/app/features/teams/TeamPages.tsx b/public/app/features/teams/TeamPages.tsx index ebbde595601..7a38197ff71 100644 --- a/public/app/features/teams/TeamPages.tsx +++ b/public/app/features/teams/TeamPages.tsx @@ -84,7 +84,7 @@ export class TeamPages extends PureComponent { return ( - {team && Object.keys(team).length !== 0 &&
{this.renderPage()}
} + {team && Object.keys(team).length !== 0 && this.renderPage()}
); diff --git a/public/app/features/teams/__snapshots__/TeamList.test.tsx.snap b/public/app/features/teams/__snapshots__/TeamList.test.tsx.snap index 9331bb8cba5..3a26b566e14 100644 --- a/public/app/features/teams/__snapshots__/TeamList.test.tsx.snap +++ b/public/app/features/teams/__snapshots__/TeamList.test.tsx.snap @@ -36,320 +36,316 @@ exports[`Render should render teams table 1`] = ` isLoading={false} >
-
- -
-
+ - - - - - - - + + + + + + + + + + + + + + + + + + + +
- - Name - - Email - - Members - + +
+ + + + + + + + + + + + + + + - - - + + + - - - - - - + + - - - - - - + + - - - - - - + + - - - - - - + + + + + - - - - - - -
+ + Name + + Email + + Members + +
+ + + + + + test-1 + + + + test-1@test.com + + + + 1 + + + -
- - - - - - - test-1 - - - - test-1@test.com - - - - 1 - - - -
- - - - - - - test-2 - - - - test-2@test.com - - - - 2 - - - -
- - - - - - - test-3 - - - - test-3@test.com - - - - 3 - - - -
- - - - - - - test-4 - - - - test-4@test.com - - - - 4 - - - -
- +
+ - - - - - - test-5 - - - - test-5@test.com - - - - 5 - - - -
-
+ + +
+ + test-3 + + + + test-3@test.com + + + + 3 + + + +
+ + + + + + test-4 + + + + test-4@test.com + + + + 4 + + + +
+ + + + + + test-5 + + + + test-5@test.com + + + + 5 + + + +
diff --git a/public/app/features/teams/__snapshots__/TeamPages.test.tsx.snap b/public/app/features/teams/__snapshots__/TeamPages.test.tsx.snap index 0c09eb3f82d..70f37cea4c5 100644 --- a/public/app/features/teams/__snapshots__/TeamPages.test.tsx.snap +++ b/public/app/features/teams/__snapshots__/TeamPages.test.tsx.snap @@ -17,11 +17,7 @@ exports[`Render should render group sync page 1`] = ` -
- -
+
`; @@ -33,13 +29,9 @@ exports[`Render should render member page if team not empty 1`] = ` -
- -
+
`; @@ -51,11 +43,7 @@ exports[`Render should render settings and preferences page 1`] = ` -
- -
+
`; diff --git a/public/app/plugins/datasource/grafana-azure-monitor-datasource/app_insights/app_insights_datasource.ts b/public/app/plugins/datasource/grafana-azure-monitor-datasource/app_insights/app_insights_datasource.ts index 950fa73a16b..97f76d229fb 100644 --- a/public/app/plugins/datasource/grafana-azure-monitor-datasource/app_insights/app_insights_datasource.ts +++ b/public/app/plugins/datasource/grafana-azure-monitor-datasource/app_insights/app_insights_datasource.ts @@ -224,4 +224,13 @@ export default class AppInsightsDatasource { return new ResponseParser(result).parseGroupBys(); }); } + + getQuerySchema() { + const url = `${this.baseUrl}/query/schema`; + return this.doRequest(url).then(result => { + const schema = new ResponseParser(result).parseQuerySchema(); + // console.log(schema); + return schema; + }); + } } diff --git a/public/app/plugins/datasource/grafana-azure-monitor-datasource/app_insights/response_parser.ts b/public/app/plugins/datasource/grafana-azure-monitor-datasource/app_insights/response_parser.ts index 848472cf101..fa96e4a2e3e 100644 --- a/public/app/plugins/datasource/grafana-azure-monitor-datasource/app_insights/response_parser.ts +++ b/public/app/plugins/datasource/grafana-azure-monitor-datasource/app_insights/response_parser.ts @@ -199,6 +199,32 @@ export default class ResponseParser { return ResponseParser.toTextValueList(this.results.supportedGroupBy); } + parseQuerySchema() { + const result = { + Type: 'AppInsights', + Tables: {} + }; + if (this.results && this.results.data && this.results.data.Tables) { + for (let i = 0; i < this.results.data.Tables[0].Rows.length; i++) { + const column = this.results.data.Tables[0].Rows[i]; + const columnTable = column[0]; + const columnName = column[1]; + const columnType = column[2]; + if (result.Tables[columnTable]) { + result.Tables[columnTable].OrderedColumns.push({ Name: columnName, Type: columnType }); + } else { + result.Tables[columnTable] = { + Name: columnTable, + OrderedColumns: [ + { Name: columnName, Type: columnType } + ] + }; + } + } + } + return result; + } + static toTextValueList(values) { const list: any[] = []; for (let i = 0; i < values.length; i++) { diff --git a/public/app/plugins/datasource/grafana-azure-monitor-datasource/config_ctrl.ts b/public/app/plugins/datasource/grafana-azure-monitor-datasource/config_ctrl.ts index 98fe5a87a56..4ee5c94fad6 100644 --- a/public/app/plugins/datasource/grafana-azure-monitor-datasource/config_ctrl.ts +++ b/public/app/plugins/datasource/grafana-azure-monitor-datasource/config_ctrl.ts @@ -1,6 +1,6 @@ import AzureLogAnalyticsDatasource from './azure_log_analytics/azure_log_analytics_datasource'; import config from 'app/core/config'; -import { isVersionGtOrEq } from './version'; +import { isVersionGtOrEq } from 'app/core/utils/version'; export class AzureMonitorConfigCtrl { static templateUrl = 'public/app/plugins/datasource/grafana-azure-monitor-datasource/partials/config.html'; diff --git a/public/app/plugins/datasource/grafana-azure-monitor-datasource/editor/KustoQueryField.tsx b/public/app/plugins/datasource/grafana-azure-monitor-datasource/editor/KustoQueryField.tsx index 849cf62efe0..2a578176674 100644 --- a/public/app/plugins/datasource/grafana-azure-monitor-datasource/editor/KustoQueryField.tsx +++ b/public/app/plugins/datasource/grafana-azure-monitor-datasource/editor/KustoQueryField.tsx @@ -1,3 +1,4 @@ +import _ from 'lodash'; import Plain from 'slate-plain-serializer'; import QueryField from './query_field'; @@ -6,11 +7,11 @@ import QueryField from './query_field'; import debounce from 'app/features/explore/utils/debounce'; import { getNextCharacter } from 'app/features/explore/utils/dom'; -import { FUNCTIONS, KEYWORDS } from './kusto'; +import { KEYWORDS, functionTokens, operatorTokens, grafanaMacros } from './kusto/kusto'; // import '../sass/editor.base.scss'; -const TYPEAHEAD_DELAY = 500; +const TYPEAHEAD_DELAY = 100; interface Suggestion { text: string; @@ -25,24 +26,46 @@ interface SuggestionGroup { skipFilter?: boolean; } +interface KustoSchema { + Databases: { + Default?: KustoDBSchema; + }; + Plugins?: any[]; +} + +interface KustoDBSchema { + Name?: string; + Functions?: any; + Tables?: any; +} + +const defaultSchema = () => ({ + Databases: { + Default: {} + } +}); + const cleanText = s => s.replace(/[{}[\]="(),!~+\-*/^%]/g, '').trim(); const wrapText = text => ({ text }); export default class KustoQueryField extends QueryField { fields: any; events: any; + schema: KustoSchema; constructor(props, context) { super(props, context); + this.schema = defaultSchema(); this.onTypeahead = debounce(this.onTypeahead, TYPEAHEAD_DELAY); } componentDidMount() { - this.updateMenu(); + super.componentDidMount(); + this.fetchSchema(); } - onTypeahead = () => { + onTypeahead = (force?: boolean) => { const selection = window.getSelection(); if (selection.anchorNode) { const wrapperNode = selection.anchorNode.parentElement; @@ -73,62 +96,77 @@ export default class KustoQueryField extends QueryField { const wrapperClasses = wrapperNode.classList; let typeaheadContext: string | null = null; + // Built-in functions if (wrapperClasses.contains('function-context')) { typeaheadContext = 'context-function'; - if (this.fields) { - suggestionGroups = this._getKeywordSuggestions(); - } else { - this._fetchFields(); - return; - } - } else if (modelPrefix.match(/(facet\s$)/i)) { - typeaheadContext = 'context-facet'; - if (this.fields) { - suggestionGroups = this._getKeywordSuggestions(); - } else { - this._fetchFields(); - return; - } - } else if (modelPrefix.match(/(,\s*$)/)) { - typeaheadContext = 'context-multiple-fields'; - if (this.fields) { - suggestionGroups = this._getKeywordSuggestions(); - } else { - this._fetchFields(); - return; - } - } else if (modelPrefix.match(/(from\s$)/i)) { - typeaheadContext = 'context-from'; - if (this.events) { - suggestionGroups = this._getKeywordSuggestions(); - } else { - this._fetchEvents(); - return; - } - } else if (modelPrefix.match(/(^select\s\w*$)/i)) { - typeaheadContext = 'context-select'; - if (this.fields) { - suggestionGroups = this._getKeywordSuggestions(); - } else { - this._fetchFields(); - return; - } - } else if (modelPrefix.match(/from\s\S+\s\w*$/i)) { - prefix = ''; - typeaheadContext = 'context-since'; - suggestionGroups = this._getKeywordSuggestions(); - // } else if (modelPrefix.match(/\d+\s\w*$/)) { - // typeaheadContext = 'context-number'; - // suggestionGroups = this._getAfterNumberSuggestions(); - } else if (modelPrefix.match(/ago\b/i) || modelPrefix.match(/facet\b/i) || modelPrefix.match(/\$__timefilter\b/i)) { - typeaheadContext = 'context-timeseries'; - suggestionGroups = this._getKeywordSuggestions(); - } else if (prefix && !wrapperClasses.contains('argument')) { - typeaheadContext = 'context-builtin'; - suggestionGroups = this._getKeywordSuggestions(); - } else if (Plain.serialize(this.state.value) === '') { + suggestionGroups = this.getColumnSuggestions(); + + // where + } else if (modelPrefix.match(/(where\s(\w+\b)?$)/i)) { + typeaheadContext = 'context-where'; + suggestionGroups = this.getColumnSuggestions(); + + // summarize by + } else if (modelPrefix.match(/(summarize\s(\w+\b)?$)/i)) { + typeaheadContext = 'context-summarize'; + suggestionGroups = this.getFunctionSuggestions(); + } else if (modelPrefix.match(/(summarize\s(.+\s)?by\s+([^,\s]+,\s*)*([^,\s]+\b)?$)/i)) { + typeaheadContext = 'context-summarize-by'; + suggestionGroups = this.getColumnSuggestions(); + + // order by, top X by, ... by ... + } else if (modelPrefix.match(/(by\s+([^,\s]+,\s*)*([^,\s]+\b)?$)/i)) { + typeaheadContext = 'context-by'; + suggestionGroups = this.getColumnSuggestions(); + + // join + } else if (modelPrefix.match(/(on\s(.+\b)?$)/i)) { + typeaheadContext = 'context-join-on'; + suggestionGroups = this.getColumnSuggestions(); + } else if (modelPrefix.match(/(join\s+(\(\s+)?(\w+\b)?$)/i)) { + typeaheadContext = 'context-join'; + suggestionGroups = this.getTableSuggestions(); + + // distinct + } else if (modelPrefix.match(/(distinct\s(.+\b)?$)/i)) { + typeaheadContext = 'context-distinct'; + suggestionGroups = this.getColumnSuggestions(); + + // database() + } else if (modelPrefix.match(/(database\(\"(\w+)\"\)\.(.+\b)?$)/i)) { + typeaheadContext = 'context-database-table'; + const db = this.getDBFromDatabaseFunction(modelPrefix); + console.log(db); + suggestionGroups = this.getTableSuggestions(db); + prefix = prefix.replace('.', ''); + + // new + } else if (normalizeQuery(Plain.serialize(this.state.value)).match(/^\s*\w*$/i)) { typeaheadContext = 'context-new'; - suggestionGroups = this._getInitialSuggestions(); + if (this.schema) { + suggestionGroups = this.getInitialSuggestions(); + } else { + this.fetchSchema(); + setTimeout(this.onTypeahead, 0); + return; + } + + // built-in + } else if (prefix && !wrapperClasses.contains('argument') && !force) { + // Use only last typed word as a prefix for searching + if (modelPrefix.match(/\s$/i)) { + prefix = ''; + return; + } + prefix = getLastWord(prefix); + typeaheadContext = 'context-builtin'; + suggestionGroups = this.getKeywordSuggestions(); + } else if (force === true) { + typeaheadContext = 'context-builtin-forced'; + if (modelPrefix.match(/\s$/i)) { + prefix = ''; + } + suggestionGroups = this.getKeywordSuggestions(); } let results = 0; @@ -148,6 +186,7 @@ export default class KustoQueryField extends QueryField { .filter(group => group.items.length > 0); // console.log('onTypeahead', selection.anchorNode, wrapperClasses, text, offset, prefix, typeaheadContext); + // console.log('onTypeahead', prefix, typeaheadContext, force); this.setState({ typeaheadPrefix: prefix, @@ -253,7 +292,11 @@ export default class KustoQueryField extends QueryField { // ]; // } - private _getKeywordSuggestions(): SuggestionGroup[] { + private getInitialSuggestions(): SuggestionGroup[] { + return this.getTableSuggestions(); + } + + private getKeywordSuggestions(): SuggestionGroup[] { return [ { prefixMatch: true, @@ -262,51 +305,132 @@ export default class KustoQueryField extends QueryField { }, { prefixMatch: true, - label: 'Functions', - items: FUNCTIONS.map((s: any) => { s.type = 'function'; return s; }) - } - ]; - } - - private _getInitialSuggestions(): SuggestionGroup[] { - // TODO: return datbase tables as an initial suggestion - return [ - { - prefixMatch: true, - label: 'Keywords', - items: KEYWORDS.map(wrapText) + label: 'Operators', + items: operatorTokens }, { prefixMatch: true, label: 'Functions', - items: FUNCTIONS.map((s: any) => { s.type = 'function'; return s; }) + items: functionTokens.map((s: any) => { s.type = 'function'; return s; }) + }, + { + prefixMatch: true, + label: 'Macros', + items: grafanaMacros.map((s: any) => { s.type = 'function'; return s; }) + }, + { + prefixMatch: true, + label: 'Tables', + items: _.map(this.schema.Databases.Default.Tables, (t: any) => ({ text: t.Name })) } ]; } - private async _fetchEvents() { - // const query = 'events'; - // const result = await this.request(query); - - // if (result === undefined) { - // this.events = []; - // } else { - // this.events = result; - // } - // setTimeout(this.onTypeahead, 0); - - //Stub - this.events = []; + private getFunctionSuggestions(): SuggestionGroup[] { + return [ + { + prefixMatch: true, + label: 'Functions', + items: functionTokens.map((s: any) => { s.type = 'function'; return s; }) + }, + { + prefixMatch: true, + label: 'Macros', + items: grafanaMacros.map((s: any) => { s.type = 'function'; return s; }) + } + ]; } - private async _fetchFields() { - // const query = 'fields'; - // const result = await this.request(query); + getTableSuggestions(db = 'Default'): SuggestionGroup[] { + if (this.schema.Databases[db]) { + return [ + { + prefixMatch: true, + label: 'Tables', + items: _.map(this.schema.Databases[db].Tables, (t: any) => ({ text: t.Name })) + } + ]; + } else { + return []; + } + } - // this.fields = result || []; + private getColumnSuggestions(): SuggestionGroup[] { + const table = this.getTableFromContext(); + if (table) { + const tableSchema = this.schema.Databases.Default.Tables[table]; + if (tableSchema) { + return [ + { + prefixMatch: true, + label: 'Fields', + items: _.map(tableSchema.OrderedColumns, (f: any) => ({ + text: f.Name, + hint: f.Type + })) + } + ]; + } + } + return []; + } - // setTimeout(this.onTypeahead, 0); - // Stub - this.fields = []; + private getTableFromContext() { + const query = Plain.serialize(this.state.value); + const tablePattern = /^\s*(\w+)\s*|/g; + const normalizedQuery = normalizeQuery(query); + const match = tablePattern.exec(normalizedQuery); + if (match && match.length > 1 && match[0] && match[1]) { + return match[1]; + } else { + return null; + } + } + + private getDBFromDatabaseFunction(prefix: string) { + const databasePattern = /database\(\"(\w+)\"\)/gi; + const match = databasePattern.exec(prefix); + if (match && match.length > 1 && match[0] && match[1]) { + return match[1]; + } else { + return null; + } + } + + private async fetchSchema() { + let schema = await this.props.getSchema(); + if (schema) { + if (schema.Type === 'AppInsights') { + schema = castSchema(schema); + } + this.schema = schema; + } else { + this.schema = defaultSchema(); + } } } + +/** + * Cast schema from App Insights to default Kusto schema + */ +function castSchema(schema) { + const defaultSchemaTemplate = defaultSchema(); + defaultSchemaTemplate.Databases.Default = schema; + return defaultSchemaTemplate; +} + +function normalizeQuery(query: string): string { + const commentPattern = /\/\/.*$/gm; + let normalizedQuery = query.replace(commentPattern, ''); + normalizedQuery = normalizedQuery.replace('\n', ' '); + return normalizedQuery; +} + +function getLastWord(str: string): string { + const lastWordPattern = /(?:.*\s)?([^\s]+\s*)$/gi; + const match = lastWordPattern.exec(str); + if (match && match.length > 1) { + return match[1]; + } + return ''; +} diff --git a/public/app/plugins/datasource/grafana-azure-monitor-datasource/editor/editor_component.tsx b/public/app/plugins/datasource/grafana-azure-monitor-datasource/editor/editor_component.tsx index da7db58567f..bdc85f1577d 100644 --- a/public/app/plugins/datasource/grafana-azure-monitor-datasource/editor/editor_component.tsx +++ b/public/app/plugins/datasource/grafana-azure-monitor-datasource/editor/editor_component.tsx @@ -1,10 +1,23 @@ import KustoQueryField from './KustoQueryField'; -import Kusto from './kusto'; +import Kusto from './kusto/kusto'; import React, { Component } from 'react'; import coreModule from 'app/core/core_module'; -class Editor extends Component { +interface EditorProps { + index: number; + placeholder?: string; + change: (value: string, index: number) => void; + variables: () => string[] | string[]; + getSchema?: () => Promise; + execute?: () => void; +} + +class Editor extends Component { + static defaultProps = { + placeholder: 'Enter a query' + }; + constructor(props) { super(props); this.state = { @@ -31,7 +44,7 @@ class Editor extends Component { }; render() { - const { request, variables } = this.props; + const { variables, getSchema, placeholder } = this.props; const { edited, query } = this.state; return ( @@ -42,9 +55,9 @@ class Editor extends Component { onQueryChange={this.onChangeQuery} prismLanguage="kusto" prismDefinition={Kusto} - placeholder="Enter a query" - request={request} + placeholder={placeholder} templateVariables={variables} + getSchema={getSchema} />
); @@ -54,6 +67,9 @@ class Editor extends Component { coreModule.directive('kustoEditor', [ 'reactDirective', reactDirective => { - return reactDirective(Editor, ['change', 'database', 'execute', 'query', 'request', 'variables']); + return reactDirective(Editor, [ + 'change', 'database', 'execute', 'query', 'variables', 'placeholder', + ['getSchema', { watchDepth: 'reference' }] + ]); }, ]); diff --git a/public/app/plugins/datasource/grafana-azure-monitor-datasource/editor/kusto.ts b/public/app/plugins/datasource/grafana-azure-monitor-datasource/editor/kusto.ts deleted file mode 100644 index 647ebb8024a..00000000000 --- a/public/app/plugins/datasource/grafana-azure-monitor-datasource/editor/kusto.ts +++ /dev/null @@ -1,114 +0,0 @@ -export const FUNCTIONS = [ - { text: 'countof', display: 'countof()', hint: '' }, - { text: 'bin', display: 'bin()', hint: '' }, - { text: 'extentid', display: 'extentid()', hint: '' }, - { text: 'extract', display: 'extract()', hint: '' }, - { text: 'extractjson', display: 'extractjson()', hint: '' }, - { text: 'floor', display: 'floor()', hint: '' }, - { text: 'iif', display: 'iif()', hint: '' }, - { text: 'isnull', display: 'isnull()', hint: '' }, - { text: 'isnotnull', display: 'isnotnull()', hint: '' }, - { text: 'notnull', display: 'notnull()', hint: '' }, - { text: 'isempty', display: 'isempty()', hint: '' }, - { text: 'isnotempty', display: 'isnotempty()', hint: '' }, - { text: 'notempty', display: 'notempty()', hint: '' }, - { text: 'now', display: 'now()', hint: '' }, - { text: 're2', display: 're2()', hint: '' }, - { text: 'strcat', display: 'strcat()', hint: '' }, - { text: 'strlen', display: 'strlen()', hint: '' }, - { text: 'toupper', display: 'toupper()', hint: '' }, - { text: 'tostring', display: 'tostring()', hint: '' }, - { text: 'count', display: 'count()', hint: '' }, - { text: 'cnt', display: 'cnt()', hint: '' }, - { text: 'sum', display: 'sum()', hint: '' }, - { text: 'min', display: 'min()', hint: '' }, - { text: 'max', display: 'max()', hint: '' }, - { text: 'avg', display: 'avg()', hint: '' }, - { - text: '$__timeFilter', - display: '$__timeFilter()', - hint: 'Macro that uses the selected timerange in Grafana to filter the query.', - }, - { - text: '$__escapeMulti', - display: '$__escapeMulti()', - hint: 'Macro to escape multi-value template variables that contain illegal characters.', - }, - { text: '$__contains', display: '$__contains()', hint: 'Macro for multi-value template variables.' }, -]; - -export const KEYWORDS = [ - 'by', - 'on', - 'contains', - 'notcontains', - 'containscs', - 'notcontainscs', - 'startswith', - 'has', - 'matches', - 'regex', - 'true', - 'false', - 'and', - 'or', - 'typeof', - 'int', - 'string', - 'date', - 'datetime', - 'time', - 'long', - 'real', - '​boolean', - 'bool', - // add some more keywords - 'where', - 'order', -]; - -// Kusto operators -// export const OPERATORS = ['+', '-', '*', '/', '>', '<', '==', '<>', '<=', '>=', '~', '!~']; - -export const DURATION = ['SECONDS', 'MINUTES', 'HOURS', 'DAYS', 'WEEKS', 'MONTHS', 'YEARS']; - -const tokenizer = { - comment: { - pattern: /(^|[^\\:])\/\/.*/, - lookbehind: true, - greedy: true, - }, - 'function-context': { - pattern: /[a-z0-9_]+\([^)]*\)?/i, - inside: {}, - }, - duration: { - pattern: new RegExp(`${DURATION.join('?|')}?`, 'i'), - alias: 'number', - }, - builtin: new RegExp(`\\b(?:${FUNCTIONS.map(f => f.text).join('|')})(?=\\s*\\()`, 'i'), - string: { - pattern: /(["'])(?:\\(?:\r\n|[\s\S])|(?!\1)[^\\\r\n])*\1/, - greedy: true, - }, - keyword: new RegExp(`\\b(?:${KEYWORDS.join('|')}|\\*)\\b`, 'i'), - boolean: /\b(?:true|false)\b/, - number: /\b0x[\da-f]+\b|(?:\b\d+\.?\d*|\B\.\d+)(?:e[+-]?\d+)?/i, - operator: /-|\+|\*|\/|>|<|==|<=?|>=?|<>|!~|~|=|\|/, - punctuation: /[{};(),.:]/, - variable: /(\[\[(.+?)\]\])|(\$(.+?))\b/, -}; - -tokenizer['function-context'].inside = { - argument: { - pattern: /[a-z0-9_]+(?=:)/i, - alias: 'symbol', - }, - duration: tokenizer.duration, - number: tokenizer.number, - builtin: tokenizer.builtin, - string: tokenizer.string, - variable: tokenizer.variable, -}; - -export default tokenizer; diff --git a/public/app/plugins/datasource/grafana-azure-monitor-datasource/editor/kusto/kusto.ts b/public/app/plugins/datasource/grafana-azure-monitor-datasource/editor/kusto/kusto.ts new file mode 100644 index 00000000000..e2a1142597b --- /dev/null +++ b/public/app/plugins/datasource/grafana-azure-monitor-datasource/editor/kusto/kusto.ts @@ -0,0 +1,355 @@ +/* tslint:disable:max-line-length */ +export const operatorTokens = [ + { text: "!between", hint: "Matches the input that is outside the inclusive range." }, + { text: "as", hint: "Binds a name to the operator's input tabular expression." }, + { text: "between", hint: "Matches the input that is inside the inclusive range." }, + { text: "consume", hint: "The `consume` operator consumes the tabular data stream handed to it. It is\r\nmostly used for triggering the query side-effect without actually returning\r\nthe results back to the caller." }, + { text: "count", hint: "Returns the number of records in the input record set." }, + { text: "datatable", hint: "Returns a table whose schema and values are defined in the query itself." }, + { text: "distinct", hint: "Produces a table with the distinct combination of the provided columns of the input table." }, + { text: "evaluate", hint: "Invokes a service-side query extension (plugin)." }, + { text: "extend", hint: "Create calculated columns and append them to the result set." }, + { text: "externaldata", hint: "Returns a table whose schema is defined in the query itself, and whose data is read from an external raw file." }, + { text: "facet", hint: "Returns a set of tables, one for each specified column.\r\nEach table specifies the list of values taken by its column.\r\nAn additional table can be created by using the `with` clause." }, + { text: "find", hint: "Finds rows that match a predicate across a set of tables." }, + { text: "fork", hint: "Runs multiple consumer operators in parallel." }, + { text: "getschema", hint: "Produce a table that represents a tabular schema of the input." }, + { text: "in", hint: "Filters a recordset based on the provided set of values." }, + { text: "invoke", hint: "Invokes lambda that receives the source of `invoke` as tabular parameter argument." }, + { text: "join", hint: "Merge the rows of two tables to form a new table by matching values of the specified column(s) from each table." }, + { text: "limit", hint: "Return up to the specified number of rows." }, + { text: "make-series", hint: "Create series of specified aggregated values along specified axis." }, + { text: "mvexpand", hint: "Expands multi-value array or property bag." }, + { text: "order", hint: "Sort the rows of the input table into order by one or more columns." }, + { text: "parse", hint: "Evaluates a string expression and parses its value into one or more calculated columns." }, + { text: "print", hint: "Evaluates one or more scalar expressions and inserts the results (as a single-row table with as many columns as there are expressions) into the output." }, + { text: "project", hint: "Select the columns to include, rename or drop, and insert new computed columns." }, + { text: "project-away", hint: "Select what columns to exclude from the input." }, + { text: "project-rename", hint: "Renames columns in the result output." }, + { text: "range", hint: "Generates a single-column table of values." }, + { text: "reduce", hint: "Groups a set of strings together based on values similarity." }, + { text: "render", hint: "Instructs the user agent to render the results of the query in a particular way." }, + { text: "sample", hint: "Returns up to the specified number of random rows from the input table." }, + { text: "sample-distinct", hint: "Returns a single column that contains up to the specified number of distinct values of the requested column." }, + { text: "search", hint: "The search operator provides a multi-table/multi-column search experience." }, + { text: "serialize", hint: "Marks that order of the input row set is safe for window functions usage." }, + { text: "sort", hint: "Sort the rows of the input table into order by one or more columns." }, + { text: "summarize", hint: "Produces a table that aggregates the content of the input table." }, + { text: "take", hint: "Return up to the specified number of rows." }, + { text: "top", hint: "Returns the first *N* records sorted by the specified columns." }, + { text: "top-hitters", hint: "Returns an approximation of the first *N* results (assuming skewed distribution of the input)." }, + { text: "top-nested", hint: "Produces hierarchical top results, where each level is a drill-down based on previous level values." }, + { text: "union", hint: "Takes two or more tables and returns the rows of all of them." }, + { text: "where", hint: "Filters a table to the subset of rows that satisfy a predicate." }, +]; + +export const functionTokens = [ + { text: "abs", hint: "Calculates the absolute value of the input." }, + { text: "acos", hint: "Returns the angle whose cosine is the specified number (the inverse operation of [`cos()`](cosfunction.md)) ." }, + { text: "ago", hint: "Subtracts the given timespan from the current UTC clock time." }, + { text: "any", hint: "Returns random non-empty value from the specified expression values." }, + { text: "arg_max", hint: "Finds a row in the group that maximizes *ExprToMaximize*, and returns the value of *ExprToReturn* (or `*` to return the entire row)." }, + { text: "arg_min", hint: "Finds a row in the group that minimizes *ExprToMinimize*, and returns the value of *ExprToReturn* (or `*` to return the entire row)." }, + { text: "argmax", hint: "Finds a row in the group that maximizes *ExprToMaximize*, and returns the value of *ExprToReturn* (or `*` to return the entire row)." }, + { text: "argmin", hint: "Finds a row in the group that minimizes *ExprToMinimize*, and returns the value of *ExprToReturn* (or `*` to return the entire row)." }, + { text: "array_concat", hint: "Concatenates a number of dynamic arrays to a single array." }, + { text: "array_length", hint: "Calculates the number of elements in a dynamic array." }, + { text: "array_slice", hint: "Extracts a slice of a dynamic array." }, + { text: "array_split", hint: "Splits an array to multiple arrays according to the split indices and packs the generated array in a dynamic array." }, + { text: "asin", hint: "Returns the angle whose sine is the specified number (the inverse operation of [`sin()`](sinfunction.md)) ." }, + { text: "assert", hint: "Checks for a condition; if the condition is false, outputs error messages and fails the query." }, + { text: "atan", hint: "Returns the angle whose tangent is the specified number (the inverse operation of [`tan()`](tanfunction.md)) ." }, + { text: "atan2", hint: "Calculates the angle, in radians, between the positive x-axis and the ray from the origin to the point (y, x)." }, + { text: "avg", hint: "Calculates the average of *Expr* across the group." }, + { text: "avgif", hint: "Calculates the [average](avg-aggfunction.md) of *Expr* across the group for which *Predicate* evaluates to `true`." }, + { text: "bag_keys", hint: "Enumerates all the root keys in a dynamic property-bag object." }, + { text: "base64_decodestring", hint: "Decodes a base64 string to a UTF-8 string" }, + { text: "base64_encodestring", hint: "Encodes a string as base64 string" }, + { text: "beta_cdf", hint: "Returns the standard cumulative beta distribution function." }, + { text: "beta_inv", hint: "Returns the inverse of the beta cumulative probability beta density function." }, + { text: "beta_pdf", hint: "Returns the probability density beta function." }, + { text: "bin", hint: "Rounds values down to an integer multiple of a given bin size." }, + { text: "bin_at", hint: "Rounds values down to a fixed-size \'bin\', with control over the bin's starting point.\r\n(See also [`bin function`](./binfunction.md).)" }, + { text: "bin_auto", hint: "Rounds values down to a fixed-size \'bin\', with control over the bin size and starting point provided by a query property." }, + { text: "binary_and", hint: "Returns a result of the bitwise `and` operation between two values." }, + { text: "binary_not", hint: "Returns a bitwise negation of the input value." }, + { text: "binary_or", hint: "Returns a result of the bitwise `or` operation of the two values." }, + { text: "binary_shift_left", hint: "Returns binary shift left operation on a pair of numbers." }, + { text: "binary_shift_right", hint: "Returns binary shift right operation on a pair of numbers." }, + { text: "binary_xor", hint: "Returns a result of the bitwise `xor` operation of the two values." }, + { text: "buildschema", hint: "Returns the minimal schema that admits all values of *DynamicExpr*." }, + { text: "case", hint: "Evaluates a list of predicates and returns the first result expression whose predicate is satisfied." }, + { text: "ceiling", hint: "Calculates the smallest integer greater than, or equal to, the specified numeric expression." }, + { text: "cluster", hint: "Changes the reference of the query to a remote cluster." }, + { text: "coalesce", hint: "Evaluates a list of expressions and returns the first non-null (or non-empty for string) expression." }, + { text: "cos", hint: "Returns the cosine function." }, + { text: "cot", hint: "Calculates the trigonometric cotangent of the specified angle, in radians." }, + { text: "count", hint: "Returns a count of the records per summarization group (or in total if summarization is done without grouping)." }, + { text: "countif", hint: "Returns a count of rows for which *Predicate* evaluates to `true`." }, + { text: "countof", hint: "Counts occurrences of a substring in a string. Plain string matches may overlap; regex matches do not." }, + { text: "current_principal", hint: "Returns the current principal running this query." }, + { text: "cursor_after", hint: "A predicate over the records of a table to compare their ingestion time\r\nagainst a database cursor." }, + { text: "cursor_before_or_at", hint: "A predicate over the records of a table to compare their ingestion time\r\nagainst a database cursor." }, + { text: "database", hint: "Changes the reference of the query to a specific database within the cluster scope." }, + { text: "datetime_add", hint: "Calculates a new [datetime](./scalar-data-types/datetime.md) from a specified datepart multiplied by a specified amount, added to a specified [datetime](./scalar-data-types/datetime.md)." }, + { text: "datetime_diff", hint: "Calculates calendarian difference between two [datetime](./scalar-data-types/datetime.md) values." }, + { text: "datetime_part", hint: "Extracts the requested date part as an integer value." }, + { text: "dayofmonth", hint: "Returns the integer number representing the day number of the given month" }, + { text: "dayofweek", hint: "Returns the integer number of days since the preceding Sunday, as a `timespan`." }, + { text: "dayofyear", hint: "Returns the integer number represents the day number of the given year." }, + { text: "dcount", hint: "Returns an estimate of the number of distinct values of *Expr* in the group." }, + { text: "dcount_hll", hint: "Calculates the dcount from hll results (which was generated by [hll](hll-aggfunction.md) or [hll_merge](hll-merge-aggfunction.md))." }, + { text: "dcountif", hint: "Returns an estimate of the number of distinct values of *Expr* of rows for which *Predicate* evaluates to `true`." }, + { text: "degrees", hint: "Converts angle value in radians into value in degrees, using formula `degrees = (180 / PI ) * angle_in_radians`" }, + { text: "distance", hint: "Returns the distance between two points in meters." }, + { text: "endofday", hint: "Returns the end of the day containing the date, shifted by an offset, if provided." }, + { text: "endofmonth", hint: "Returns the end of the month containing the date, shifted by an offset, if provided." }, + { text: "endofweek", hint: "Returns the end of the week containing the date, shifted by an offset, if provided." }, + { text: "endofyear", hint: "Returns the end of the year containing the date, shifted by an offset, if provided." }, + { text: "estimate_data_size", hint: "Returns an estimated data size of the selected columns of the tabular expression." }, + { text: "exp", hint: "The base-e exponential function of x, which is e raised to the power x: e^x." }, + { text: "exp10", hint: "The base-10 exponential function of x, which is 10 raised to the power x: 10^x. \r\n**Syntax**" }, + { text: "exp2", hint: "The base-2 exponential function of x, which is 2 raised to the power x: 2^x." }, + { text: "extent_id", hint: "Returns a unique identifier that identifies the data shard (\"extent\") that the current record resides in." }, + { text: "extent_tags", hint: "Returns a dynamic array with the [tags](../management/extents-overview.md#extent-tagging) of the data shard (\"extent\") that the current record resides in." }, + { text: "extract", hint: "Get a match for a [regular expression](./re2.md) from a text string." }, + { text: "extract_all", hint: "Get all matches for a [regular expression](./re2.md) from a text string." }, + { text: "extractjson", hint: "Get a specified element out of a JSON text using a path expression." }, + { text: "floor", hint: "An alias for [`bin()`](binfunction.md)." }, + { text: "format_datetime", hint: "Formats a datetime parameter based on the format pattern parameter." }, + { text: "format_timespan", hint: "Formats a timespan parameter based on the format pattern parameter." }, + { text: "gamma", hint: "Computes [gamma function](https://en.wikipedia.org/wiki/Gamma_function)" }, + { text: "getmonth", hint: "Get the month number (1-12) from a datetime." }, + { text: "gettype", hint: "Returns the runtime type of its single argument." }, + { text: "getyear", hint: "Returns the year part of the `datetime` argument." }, + { text: "hash", hint: "Returns a hash value for the input value." }, + { text: "hash_sha256", hint: "Returns a sha256 hash value for the input value." }, + { text: "hll", hint: "Calculates the Intermediate results of [dcount](dcount-aggfunction.md) across the group." }, + { text: "hll_merge", hint: "Merges hll results (scalar version of the aggregate version [`hll_merge()`](hll-merge-aggfunction.md))." }, + { text: "hourofday", hint: "Returns the integer number representing the hour number of the given date" }, + { text: "iff", hint: "Evaluates the first argument (the predicate), and returns the value of either the second or third arguments, depending on whether the predicate evaluated to `true` (second) or `false` (third)." }, + { text: "iif", hint: "Evaluates the first argument (the predicate), and returns the value of either the second or third arguments, depending on whether the predicate evaluated to `true` (second) or `false` (third)." }, + { text: "indexof", hint: "Function reports the zero-based index of the first occurrence of a specified string within input string." }, + { text: "ingestion_time", hint: "Retrieves the record's `$IngestionTime` hidden `datetime` column, or null." }, + { text: "iscolumnexists", hint: "Returns a boolean value indicating if the given string argument exists in the schema produced by the preceding tabular operator." }, + { text: "isempty", hint: "Returns `true` if the argument is an empty string or is null." }, + { text: "isfinite", hint: "Returns whether input is a finite value (is neither infinite nor NaN)." }, + { text: "isinf", hint: "Returns whether input is an infinite (positive or negative) value." }, + { text: "isnan", hint: "Returns whether input is Not-a-Number (NaN) value." }, + { text: "isnotempty", hint: "Returns `true` if the argument is not an empty string nor it is a null." }, + { text: "isnotnull", hint: "Returns `true` if the argument is not null." }, + { text: "isnull", hint: "Evaluates its sole argument and returns a `bool` value indicating if the argument evaluates to a null value." }, + { text: "log", hint: "Returns the natural logarithm function." }, + { text: "log10", hint: "Returns the common (base-10) logarithm function." }, + { text: "log2", hint: "Returns the base-2 logarithm function." }, + { text: "loggamma", hint: "Computes log of absolute value of the [gamma function](https://en.wikipedia.org/wiki/Gamma_function)" }, + { text: "make_datetime", hint: "Creates a [datetime](./scalar-data-types/datetime.md) scalar value from the specified date and time." }, + { text: "make_dictionary", hint: "Returns a `dynamic` (JSON) property-bag (dictionary) of all the values of *Expr* in the group." }, + { text: "make_string", hint: "Returns the string generated by the Unicode characters." }, + { text: "make_timespan", hint: "Creates a [timespan](./scalar-data-types/timespan.md) scalar value from the specified time period." }, + { text: "makelist", hint: "Returns a `dynamic` (JSON) array of all the values of *Expr* in the group." }, + { text: "makeset", hint: "Returns a `dynamic` (JSON) array of the set of distinct values that *Expr* takes in the group." }, + { text: "materialize", hint: "Allows caching a sub-query result during the time of query execution in a way that other subqueries can reference the partial result." }, + { text: "max", hint: "Returns the maximum value across the group." }, + { text: "max_of", hint: "Returns the maximum value of several evaluated numeric expressions." }, + { text: "merge_tdigests", hint: "Merges tdigest results (scalar version of the aggregate version [`merge_tdigests()`](merge-tdigests-aggfunction.md))." }, + { text: "min", hint: "Returns the minimum value agross the group." }, + { text: "min_of", hint: "Returns the minimum value of several evaluated numeric expressions." }, + { text: "monthofyear", hint: "Returns the integer number represents the month number of the given year." }, + { text: "next", hint: "Returns the value of a column in a row that it at some offset following the\r\ncurrent row in a [serialized row set](./windowsfunctions.md#serialized-row-set)." }, + { text: "not", hint: "Reverses the value of its `bool` argument." }, + { text: "now", hint: "Returns the current UTC clock time, optionally offset by a given timespan.\r\nThis function can be used multiple times in a statement and the clock time being referenced will be the same for all instances." }, + { text: "pack", hint: "Creates a `dynamic` object (property bag) from a list of names and values." }, + { text: "pack_all", hint: "Creates a `dynamic` object (property bag) from all the columns of the tabular expression." }, + { text: "pack_array", hint: "Packs all input values into a dynamic array." }, + { text: "parse_ipv4", hint: "Converts input to integer (signed 64-bit) number representation." }, + { text: "parse_json", hint: "Interprets a `string` as a [JSON value](https://json.org/)) and returns the value as [`dynamic`](./scalar-data-types/dynamic.md). \r\nIt is superior to using [extractjson() function](./extractjsonfunction.md)\r\nwhen you need to extract more than one element of a JSON compound object." }, + { text: "parse_path", hint: "Parses a file path `string` and returns a [`dynamic`](./scalar-data-types/dynamic.md) object that contains the following parts of the path: \r\nScheme, RootPath, DirectoryPath, DirectoryName, FileName, Extension, AlternateDataStreamName.\r\nIn addition to the simple paths with both types of slashes, supports paths with schemas (e.g. \"file://...\"), shared paths (e.g. \"\\\\shareddrive\\users...\"), long paths (e.g \"\\\\?\\C:...\"\"), alternate data streams (e.g. \"file1.exe:file2.exe\")" }, + { text: "parse_url", hint: "Parses an absolute URL `string` and returns a [`dynamic`](./scalar-data-types/dynamic.md) object contains all parts of the URL (Scheme, Host, Port, Path, Username, Password, Query Parameters, Fragment)." }, + { text: "parse_urlquery", hint: "Parses a url query `string` and returns a [`dynamic`](./scalar-data-types/dynamic.md) object contains the Query parameters." }, + { text: "parse_user_agent", hint: "Interprets a user-agent string, which identifies the user's browser and provides certain system details to servers hosting the websites the user visits. The result is returned as [`dynamic`](./scalar-data-types/dynamic.md)." }, + { text: "parse_version", hint: "Converts input string representation of version to a comparable decimal number." }, + { text: "parse_xml", hint: "Interprets a `string` as a XML value, converts the value to a [JSON value](https://json.org/) and returns the value as [`dynamic`](./scalar-data-types/dynamic.md)." }, + { text: "percentile", hint: "Returns an estimate for the specified [nearest-rank percentile](#nearest-rank-percentile) of the population defined by *Expr*. \r\nThe accuracy depends on the density of population in the region of the percentile." }, + { text: "percentile_tdigest", hint: "Calculates the percentile result from tdigest results (which was generated by [tdigest](tdigest-aggfunction.md) or [merge-tdigests](merge-tdigests-aggfunction.md))" }, + { text: "percentrank_tdigest", hint: "Calculates the approximate rank of the value in a set where rank is expressed as percentage of set's size. \r\nThis function can be viewed as the inverse of the percentile." }, + { text: "pi", hint: "Returns the constant value of Pi (π)." }, + { text: "point", hint: "Returns a dynamic array representation of a point." }, + { text: "pow", hint: "Returns a result of raising to power" }, + { text: "prev", hint: "Returns the value of a column in a row that it at some offset prior to the\r\ncurrent row in a [serialized row set](./windowsfunctions.md#serialized-row-set)." }, + { text: "radians", hint: "Converts angle value in degrees into value in radians, using formula `radians = (PI / 180 ) * angle_in_degrees`" }, + { text: "rand", hint: "Returns a random number." }, + { text: "range", hint: "Generates a dynamic array holding a series of equally-spaced values." }, + { text: "repeat", hint: "Generates a dynamic array holding a series of equal values." }, + { text: "replace", hint: "Replace all regex matches with another string." }, + { text: "reverse", hint: "Function makes reverse of input string." }, + { text: "round", hint: "Returns the rounded source to the specified precision." }, + { text: "row_cumsum", hint: "Calculates the cumulative sum of a column in a [serialized row set](./windowsfunctions.md#serialized-row-set)." }, + { text: "row_number", hint: "Returns the current row's index in a [serialized row set](./windowsfunctions.md#serialized-row-set).\r\nThe row index starts by default at `1` for the first row, and is incremented by `1` for each additional row.\r\nOptionally, the row index can start at a different value than `1`.\r\nAdditionally, the row index may be reset according to some provided predicate." }, + { text: "series_add", hint: "Calculates the element-wise addition of two numeric series inputs." }, + { text: "series_decompose", hint: "Applies a decomposition transformation on a series." }, + { text: "series_decompose_anomalies", hint: "Anomaly Detection based on series decomposition (refer to [series_decompose()](series-decomposefunction.md))" }, + { text: "series_decompose_forecast", hint: "Forecast based on series decomposition." }, + { text: "series_divide", hint: "Calculates the element-wise division of two numeric series inputs." }, + { text: "series_equals", hint: "Calculates the element-wise equals (`==`) logic operation of two numeric series inputs." }, + { text: "series_fill_backward", hint: "Performs backward fill interpolation of missing values in a series." }, + { text: "series_fill_const", hint: "Replaces missing values in a series with a specified constant value." }, + { text: "series_fill_forward", hint: "Performs forward fill interpolation of missing values in a series." }, + { text: "series_fill_linear", hint: "Performs linear interpolation of missing values in a series." }, + { text: "series_fir", hint: "Applies a Finite Impulse Response filter on a series." }, + { text: "series_fit_2lines", hint: "Applies two segments linear regression on a series, returning multiple columns." }, + { text: "series_fit_2lines_dynamic", hint: "Applies two segments linear regression on a series, returning dynamic object." }, + { text: "series_fit_line", hint: "Applies linear regression on a series, returning multiple columns." }, + { text: "series_fit_line_dynamic", hint: "Applies linear regression on a series, returning dynamic object." }, + { text: "series_greater", hint: "Calculates the element-wise greater (`>`) logic operation of two numeric series inputs." }, + { text: "series_greater_equals", hint: "Calculates the element-wise greater or equals (`>=`) logic operation of two numeric series inputs." }, + { text: "series_iir", hint: "Applies a Infinite Impulse Response filter on a series." }, + { text: "series_less", hint: "Calculates the element-wise less (`<`) logic operation of two numeric series inputs." }, + { text: "series_less_equals", hint: "Calculates the element-wise less or equal (`<=`) logic operation of two numeric series inputs." }, + { text: "series_multiply", hint: "Calculates the element-wise multiplication of two numeric series inputs." }, + { text: "series_not_equals", hint: "Calculates the element-wise not equals (`!=`) logic operation of two numeric series inputs." }, + { text: "series_outliers", hint: "Scores anomaly points in a series." }, + { text: "series_periods_detect", hint: "Finds the most significant periods that exist in a time series." }, + { text: "series_periods_validate", hint: "Checks whether a time series contains periodic patterns of given lengths." }, + { text: "series_seasonal", hint: "Calculates the seasonal component of a series according to the detected or given seasonal period." }, + { text: "series_stats", hint: "Returns statistics for a series in multiple columns." }, + { text: "series_stats_dynamic", hint: "Returns statistics for a series in dynamic object." }, + { text: "series_subtract", hint: "Calculates the element-wise subtraction of two numeric series inputs." }, + { text: "sign", hint: "Sign of a numeric expression" }, + { text: "sin", hint: "Returns the sine function." }, + { text: "split", hint: "Splits a given string according to a given delimiter and returns a string array with the contained substrings." }, + { text: "sqrt", hint: "Returns the square root function." }, + { text: "startofday", hint: "Returns the start of the day containing the date, shifted by an offset, if provided." }, + { text: "startofmonth", hint: "Returns the start of the month containing the date, shifted by an offset, if provided." }, + { text: "startofweek", hint: "Returns the start of the week containing the date, shifted by an offset, if provided." }, + { text: "startofyear", hint: "Returns the start of the year containing the date, shifted by an offset, if provided." }, + { text: "stdev", hint: "Calculates the standard deviation of *Expr* across the group, considering the group as a [sample](https://en.wikipedia.org/wiki/Sample_%28statistics%29)." }, + { text: "stdevif", hint: "Calculates the [stdev](stdev-aggfunction.md) of *Expr* across the group for which *Predicate* evaluates to `true`." }, + { text: "stdevp", hint: "Calculates the standard deviation of *Expr* across the group, considering the group as a [population](https://en.wikipedia.org/wiki/Statistical_population)." }, + { text: "strcat", hint: "Concatenates between 1 and 64 arguments." }, + { text: "strcat_array", hint: "Creates a concatenated string of array values using specified delimiter." }, + { text: "strcat_delim", hint: "Concatenates between 2 and 64 arguments, with delimiter, provided as first argument." }, + { text: "strcmp", hint: "Compares two strings." }, + { text: "string_size", hint: "Returns the size, in bytes, of the input string." }, + { text: "strlen", hint: "Returns the length, in characters, of the input string." }, + { text: "strrep", hint: "Repeats given [string](./scalar-data-types/string.md) provided amount of times." }, + { text: "substring", hint: "Extracts a substring from a source string starting from some index to the end of the string." }, + { text: "sum", hint: "Calculates the sum of *Expr* across the group." }, + { text: "sumif", hint: "Returns a sum of *Expr* for which *Predicate* evaluates to `true`." }, + { text: "table", hint: "References specific table using an query-time evaluated string-expression." }, + { text: "tan", hint: "Returns the tangent function." }, + { text: "tdigest", hint: "Calculates the Intermediate results of [`percentiles()`](percentiles-aggfunction.md) across the group." }, + { text: "tdigest_merge", hint: "Merges tdigest results (scalar version of the aggregate version [`tdigest_merge()`](tdigest-merge-aggfunction.md))." }, + { text: "tobool", hint: "Converts input to boolean (signed 8-bit) representation." }, + { text: "todatetime", hint: "Converts input to [datetime](./scalar-data-types/datetime.md) scalar." }, + { text: "todecimal", hint: "Converts input to decimal number representation." }, + { text: "todouble", hint: "Converts the input to a value of type `real`. (`todouble()` and `toreal()` are synonyms.)" }, + { text: "todynamic", hint: "Interprets a `string` as a [JSON value](https://json.org/) and returns the value as [`dynamic`](./scalar-data-types/dynamic.md)." }, + { text: "toguid", hint: "Converts input to [`guid`](./scalar-data-types/guid.md) representation." }, + { text: "tohex", hint: "Converts input to a hexadecimal string." }, + { text: "toint", hint: "Converts input to integer (signed 32-bit) number representation." }, + { text: "tolong", hint: "Converts input to long (signed 64-bit) number representation." }, + { text: "tolower", hint: "Converts input string to lower case." }, + { text: "toscalar", hint: "Returns a scalar constant value of the evaluated expression." }, + { text: "tostring", hint: "Converts input to a string representation." }, + { text: "totimespan", hint: "Converts input to [timespan](./scalar-data-types/timespan.md) scalar." }, + { text: "toupper", hint: "Converts a string to upper case." }, + { text: "translate", hint: "Replaces a set of characters ('searchList') with another set of characters ('replacementList') in a given a string.\r\nThe function searches for characters in the 'searchList' and replaces them with the corresponding characters in 'replacementList'" }, + { text: "treepath", hint: "Enumerates all the path expressions that identify leaves in a dynamic object." }, + { text: "trim", hint: "Removes all leading and trailing matches of the specified regular expression." }, + { text: "trim_end", hint: "Removes trailing match of the specified regular expression." }, + { text: "trim_start", hint: "Removes leading match of the specified regular expression." }, + { text: "url_decode", hint: "The function converts encoded URL into a to regular URL representation." }, + { text: "url_encode", hint: "The function converts characters of the input URL into a format that can be transmitted over the Internet." }, + { text: "variance", hint: "Calculates the variance of *Expr* across the group, considering the group as a [sample](https://en.wikipedia.org/wiki/Sample_%28statistics%29)." }, + { text: "varianceif", hint: "Calculates the [variance](variance-aggfunction.md) of *Expr* across the group for which *Predicate* evaluates to `true`." }, + { text: "variancep", hint: "Calculates the variance of *Expr* across the group, considering the group as a [population](https://en.wikipedia.org/wiki/Statistical_population)." }, + { text: "weekofyear", hint: "Returns the integer number represents the week number." }, + { text: "welch_test", hint: "Computes the p_value of the [Welch-test function](https://en.wikipedia.org/wiki/Welch%27s_t-test)" }, + { text: "zip", hint: "The `zip` function accepts any number of `dynamic` arrays, and returns an\r\narray whose elements are each an array holding the elements of the input\r\narrays of the same index." }, +]; + +export const KEYWORDS = [ + 'by', + 'on', + 'contains', + 'notcontains', + 'containscs', + 'notcontainscs', + 'startswith', + 'has', + 'matches', + 'regex', + 'true', + 'false', + 'and', + 'or', + 'typeof', + 'int', + 'string', + 'date', + 'datetime', + 'time', + 'long', + 'real', + '​boolean', + 'bool', +]; + +export const grafanaMacros = [ + { text: '$__timeFilter', display: '$__timeFilter()', hint: 'Macro that uses the selected timerange in Grafana to filter the query.', }, + { text: '$__escapeMulti', display: '$__escapeMulti()', hint: 'Macro to escape multi-value template variables that contain illegal characters.', }, + { text: '$__contains', display: '$__contains()', hint: 'Macro for multi-value template variables.' }, +]; + +// Kusto operators +// export const OPERATORS = ['+', '-', '*', '/', '>', '<', '==', '<>', '<=', '>=', '~', '!~']; + +export const DURATION = ['SECONDS', 'MINUTES', 'HOURS', 'DAYS', 'WEEKS', 'MONTHS', 'YEARS']; + +const tokenizer = { + comment: { + pattern: /(^|[^\\:])\/\/.*/, + lookbehind: true, + greedy: true, + }, + 'function-context': { + pattern: /[a-z0-9_]+\([^)]*\)?/i, + inside: {}, + }, + duration: { + pattern: new RegExp(`${DURATION.join('?|')}?`, 'i'), + alias: 'number', + }, + builtin: new RegExp(`\\b(?:${functionTokens.map(f => f.text).join('|')})(?=\\s*\\()`, 'i'), + string: { + pattern: /(["'])(?:\\(?:\r\n|[\s\S])|(?!\1)[^\\\r\n])*\1/, + greedy: true, + }, + keyword: new RegExp(`\\b(?:${KEYWORDS.join('|')}|${operatorTokens.map(f => f.text).join('|')}|\\*)\\b`, 'i'), + boolean: /\b(?:true|false)\b/, + number: /\b0x[\da-f]+\b|(?:\b\d+\.?\d*|\B\.\d+)(?:e[+-]?\d+)?/i, + operator: /-|\+|\*|\/|>|<|==|<=?|>=?|<>|!~|~|=|\|/, + punctuation: /[{};(),.:]/, + variable: /(\[\[(.+?)\]\])|(\$(.+?))\b/, +}; + +tokenizer['function-context'].inside = { + argument: { + pattern: /[a-z0-9_]+(?=:)/i, + alias: 'symbol', + }, + duration: tokenizer.duration, + number: tokenizer.number, + builtin: tokenizer.builtin, + string: tokenizer.string, + variable: tokenizer.variable, +}; + +// console.log(tokenizer.builtin); + +export default tokenizer; + +// function escapeRegExp(str: string): string { +// return str.replace(/[.*+?^${}()|[\]\\]/g, '\\$&'); // $& means the whole matched string +// } diff --git a/public/app/plugins/datasource/grafana-azure-monitor-datasource/editor/query_field.tsx b/public/app/plugins/datasource/grafana-azure-monitor-datasource/editor/query_field.tsx index 1c883a40c31..f24fd9e9dd1 100644 --- a/public/app/plugins/datasource/grafana-azure-monitor-datasource/editor/query_field.tsx +++ b/public/app/plugins/datasource/grafana-azure-monitor-datasource/editor/query_field.tsx @@ -1,14 +1,11 @@ -import PluginPrism from './slate-plugins/prism'; -// import PluginPrism from 'slate-prism'; -// import Prism from 'prismjs'; - +import PluginPrism from 'app/features/explore/slate-plugins/prism'; import BracesPlugin from 'app/features/explore/slate-plugins/braces'; import ClearPlugin from 'app/features/explore/slate-plugins/clear'; -// Custom plugins (new line on Enter and run on Shift+Enter) -import NewlinePlugin from './slate-plugins/newline'; -import RunnerPlugin from './slate-plugins/runner'; +import NewlinePlugin from 'app/features/explore/slate-plugins/newline'; +import RunnerPlugin from 'app/features/explore/slate-plugins/runner'; import Typeahead from './typeahead'; +import { getKeybindingSrv, KeybindingSrv } from 'app/core/services/keybindingSrv'; import { Block, Document, Text, Value } from 'slate'; import { Editor } from 'slate-react'; @@ -61,6 +58,7 @@ class QueryField extends React.Component { menuEl: any; plugins: any; resetTimer: any; + keybindingSrv: KeybindingSrv = getKeybindingSrv(); constructor(props, context) { super(props, context); @@ -90,6 +88,7 @@ class QueryField extends React.Component { } componentWillUnmount() { + this.restoreEscapeKeyBinding(); clearTimeout(this.resetTimer); } @@ -101,11 +100,11 @@ class QueryField extends React.Component { const changed = value.document !== this.state.value.document; this.setState({ value }, () => { if (changed) { + // call typeahead only if query changed + requestAnimationFrame(() => this.onTypeahead()); this.onChangeQuery(); } }); - - window.requestAnimationFrame(this.onTypeahead); }; request = (url?) => { @@ -140,7 +139,7 @@ class QueryField extends React.Component { case ' ': { if (event.ctrlKey) { event.preventDefault(); - this.onTypeahead(); + this.onTypeahead(true); return true; } break; @@ -218,6 +217,7 @@ class QueryField extends React.Component { if (onBlur) { onBlur(); } + this.restoreEscapeKeyBinding(); }; handleFocus = () => { @@ -225,8 +225,18 @@ class QueryField extends React.Component { if (onFocus) { onFocus(); } + // Don't go back to dashboard if Escape pressed inside the editor. + this.removeEscapeKeyBinding(); }; + removeEscapeKeyBinding() { + this.keybindingSrv.unbind('esc', 'keydown'); + } + + restoreEscapeKeyBinding() { + this.keybindingSrv.setupGlobal(); + } + onClickItem = item => { const { suggestions } = this.state; if (!suggestions || suggestions.length === 0) { @@ -269,12 +279,18 @@ class QueryField extends React.Component { const rect = node.parentElement.getBoundingClientRect(); const scrollX = window.scrollX; const scrollY = window.scrollY; + const screenHeight = window.innerHeight; + + const menuLeft = rect.left + scrollX - 2; + const menuTop = rect.top + scrollY + rect.height + 4; + const menuHeight = screenHeight - menuTop - 10; // Write DOM requestAnimationFrame(() => { menu.style.opacity = 1; - menu.style.top = `${rect.top + scrollY + rect.height + 4}px`; - menu.style.left = `${rect.left + scrollX - 2}px`; + menu.style.top = `${menuTop}px`; + menu.style.left = `${menuLeft}px`; + menu.style.maxHeight = `${menuHeight}px`; }); } }; diff --git a/public/app/plugins/datasource/grafana-azure-monitor-datasource/editor/slate-plugins/newline.ts b/public/app/plugins/datasource/grafana-azure-monitor-datasource/editor/slate-plugins/newline.ts deleted file mode 100644 index d484d93a542..00000000000 --- a/public/app/plugins/datasource/grafana-azure-monitor-datasource/editor/slate-plugins/newline.ts +++ /dev/null @@ -1,35 +0,0 @@ -function getIndent(text) { - let offset = text.length - text.trimLeft().length; - if (offset) { - let indent = text[0]; - while (--offset) { - indent += text[0]; - } - return indent; - } - return ''; -} - -export default function NewlinePlugin() { - return { - onKeyDown(event, change) { - const { value } = change; - if (!value.isCollapsed) { - return undefined; - } - - if (event.key === 'Enter' && !event.shiftKey) { - event.preventDefault(); - - const { startBlock } = value; - const currentLineText = startBlock.text; - const indent = getIndent(currentLineText); - - return change - .splitBlock() - .insertText(indent) - .focus(); - } - }, - }; -} diff --git a/public/app/plugins/datasource/grafana-azure-monitor-datasource/editor/slate-plugins/runner.ts b/public/app/plugins/datasource/grafana-azure-monitor-datasource/editor/slate-plugins/runner.ts deleted file mode 100644 index 068bd9f0ad1..00000000000 --- a/public/app/plugins/datasource/grafana-azure-monitor-datasource/editor/slate-plugins/runner.ts +++ /dev/null @@ -1,14 +0,0 @@ -export default function RunnerPlugin({ handler }) { - return { - onKeyDown(event) { - // Handle enter - if (handler && event.key === 'Enter' && event.shiftKey) { - // Submit on Enter - event.preventDefault(); - handler(event); - return true; - } - return undefined; - }, - }; -} diff --git a/public/app/plugins/datasource/grafana-azure-monitor-datasource/partials/query.editor.html b/public/app/plugins/datasource/grafana-azure-monitor-datasource/partials/query.editor.html index 49f02ec8355..6299947b30a 100644 --- a/public/app/plugins/datasource/grafana-azure-monitor-datasource/partials/query.editor.html +++ b/public/app/plugins/datasource/grafana-azure-monitor-datasource/partials/query.editor.html @@ -124,12 +124,11 @@
@@ -285,9 +284,20 @@
-
+ +
+
diff --git a/public/app/plugins/datasource/grafana-azure-monitor-datasource/plugin.json b/public/app/plugins/datasource/grafana-azure-monitor-datasource/plugin.json index 76a56f2baaa..e4f48c581e3 100644 --- a/public/app/plugins/datasource/grafana-azure-monitor-datasource/plugin.json +++ b/public/app/plugins/datasource/grafana-azure-monitor-datasource/plugin.json @@ -158,5 +158,6 @@ }, "metrics": true, - "annotations": true + "annotations": true, + "alerting": true } diff --git a/public/app/plugins/datasource/grafana-azure-monitor-datasource/query_ctrl.ts b/public/app/plugins/datasource/grafana-azure-monitor-datasource/query_ctrl.ts index fd42c172f11..cee67d11ab3 100644 --- a/public/app/plugins/datasource/grafana-azure-monitor-datasource/query_ctrl.ts +++ b/public/app/plugins/datasource/grafana-azure-monitor-datasource/query_ctrl.ts @@ -304,7 +304,7 @@ export class AzureMonitorQueryCtrl extends QueryCtrl { /* Azure Log Analytics */ - getWorkspaces() { + getWorkspaces = () => { return this.datasource.azureLogAnalyticsDatasource .getWorkspaces() .then(list => { @@ -316,7 +316,7 @@ export class AzureMonitorQueryCtrl extends QueryCtrl { .catch(this.handleQueryCtrlError.bind(this)); } - getAzureLogAnalyticsSchema() { + getAzureLogAnalyticsSchema = () => { return this.getWorkspaces() .then(() => { return this.datasource.azureLogAnalyticsDatasource.getSchema(this.target.azureLogAnalytics.workspace); @@ -345,6 +345,7 @@ export class AzureMonitorQueryCtrl extends QueryCtrl { } return interval; } + getAppInsightsMetricNames() { if (!this.datasource.appInsightsDatasource.isConfigured()) { return; @@ -377,6 +378,19 @@ export class AzureMonitorQueryCtrl extends QueryCtrl { .catch(this.handleQueryCtrlError.bind(this)); } + onAppInsightsQueryChange = (nextQuery: string) => { + this.target.appInsights.rawQueryString = nextQuery; + } + + onAppInsightsQueryExecute = () => { + return this.refresh(); + } + + getAppInsightsQuerySchema = () => { + return this.datasource.appInsightsDatasource.getQuerySchema() + .catch(this.handleQueryCtrlError.bind(this)); + } + getAppInsightsGroupBySegments(query) { return _.map(this.target.appInsights.groupByOptions, option => { return { text: option, value: option }; diff --git a/public/app/plugins/datasource/grafana-azure-monitor-datasource/version.test.ts b/public/app/plugins/datasource/grafana-azure-monitor-datasource/version.test.ts deleted file mode 100644 index 17a6ce9bb0b..00000000000 --- a/public/app/plugins/datasource/grafana-azure-monitor-datasource/version.test.ts +++ /dev/null @@ -1,53 +0,0 @@ -import { SemVersion, isVersionGtOrEq } from './version'; - -describe('SemVersion', () => { - let version = '1.0.0-alpha.1'; - - describe('parsing', () => { - it('should parse version properly', () => { - const semver = new SemVersion(version); - expect(semver.major).toBe(1); - expect(semver.minor).toBe(0); - expect(semver.patch).toBe(0); - expect(semver.meta).toBe('alpha.1'); - }); - }); - - describe('comparing', () => { - beforeEach(() => { - version = '3.4.5'; - }); - - it('should detect greater version properly', () => { - const semver = new SemVersion(version); - const cases = [ - { value: '3.4.5', expected: true }, - { value: '3.4.4', expected: true }, - { value: '3.4.6', expected: false }, - { value: '4', expected: false }, - { value: '3.5', expected: false }, - ]; - cases.forEach(testCase => { - expect(semver.isGtOrEq(testCase.value)).toBe(testCase.expected); - }); - }); - }); - - describe('isVersionGtOrEq', () => { - it('should compare versions properly (a >= b)', () => { - const cases = [ - { values: ['3.4.5', '3.4.5'], expected: true }, - { values: ['3.4.5', '3.4.4'], expected: true }, - { values: ['3.4.5', '3.4.6'], expected: false }, - { values: ['3.4', '3.4.0'], expected: true }, - { values: ['3', '3.0.0'], expected: true }, - { values: ['3.1.1-beta1', '3.1'], expected: true }, - { values: ['3.4.5', '4'], expected: false }, - { values: ['3.4.5', '3.5'], expected: false }, - ]; - cases.forEach(testCase => { - expect(isVersionGtOrEq(testCase.values[0], testCase.values[1])).toBe(testCase.expected); - }); - }); - }); -}); diff --git a/public/app/plugins/datasource/grafana-azure-monitor-datasource/version.ts b/public/app/plugins/datasource/grafana-azure-monitor-datasource/version.ts deleted file mode 100644 index 1131e1d2ab8..00000000000 --- a/public/app/plugins/datasource/grafana-azure-monitor-datasource/version.ts +++ /dev/null @@ -1,34 +0,0 @@ -import _ from 'lodash'; - -const versionPattern = /^(\d+)(?:\.(\d+))?(?:\.(\d+))?(?:-([0-9A-Za-z\.]+))?/; - -export class SemVersion { - major: number; - minor: number; - patch: number; - meta: string; - - constructor(version: string) { - const match = versionPattern.exec(version); - if (match) { - this.major = Number(match[1]); - this.minor = Number(match[2] || 0); - this.patch = Number(match[3] || 0); - this.meta = match[4]; - } - } - - isGtOrEq(version: string): boolean { - const compared = new SemVersion(version); - return !(this.major < compared.major || this.minor < compared.minor || this.patch < compared.patch); - } - - isValid(): boolean { - return _.isNumber(this.major); - } -} - -export function isVersionGtOrEq(a: string, b: string): boolean { - const aSemver = new SemVersion(a); - return aSemver.isGtOrEq(b); -} diff --git a/public/app/routes/GrafanaCtrl.ts b/public/app/routes/GrafanaCtrl.ts index a6d97856e74..d327bc0cf7d 100644 --- a/public/app/routes/GrafanaCtrl.ts +++ b/public/app/routes/GrafanaCtrl.ts @@ -12,6 +12,7 @@ import appEvents from 'app/core/app_events'; import { BackendSrv, setBackendSrv } from 'app/core/services/backend_srv'; import { TimeSrv, setTimeSrv } from 'app/features/dashboard/services/TimeSrv'; import { DatasourceSrv, setDatasourceSrv } from 'app/features/plugins/datasource_srv'; +import { KeybindingSrv, setKeybindingSrv } from 'app/core/services/keybindingSrv'; import { AngularLoader, setAngularLoader } from 'app/core/services/AngularLoader'; import { configureStore } from 'app/store/configureStore'; @@ -30,6 +31,7 @@ export class GrafanaCtrl { backendSrv: BackendSrv, timeSrv: TimeSrv, datasourceSrv: DatasourceSrv, + keybindingSrv: KeybindingSrv, angularLoader: AngularLoader ) { // make angular loader service available to react components @@ -37,6 +39,7 @@ export class GrafanaCtrl { setBackendSrv(backendSrv); setDatasourceSrv(datasourceSrv); setTimeSrv(timeSrv); + setKeybindingSrv(keybindingSrv); configureStore(); $scope.init = () => { diff --git a/yarn.lock b/yarn.lock index df2e1cea37e..2fb4a5d3ee2 100644 --- a/yarn.lock +++ b/yarn.lock @@ -1040,6 +1040,20 @@ resolved "https://registry.yarnpkg.com/@emotion/weak-memoize/-/weak-memoize-0.1.3.tgz#b700d97385fa91affed60c71dfd51c67e9dad762" integrity sha512-QsYGKdhhuDFNq7bjm2r44y0mp5xW3uO3csuTPDWZc0OIiMQv+AIY5Cqwd4mJiC5N8estVl7qlvOx1hbtOuUWbw== +"@iamstarkov/listr-update-renderer@0.4.1": + version "0.4.1" + resolved "https://registry.yarnpkg.com/@iamstarkov/listr-update-renderer/-/listr-update-renderer-0.4.1.tgz#d7c48092a2dcf90fd672b6c8b458649cb350c77e" + integrity sha512-IJyxQWsYDEkf8C8QthBn5N8tIUR9V9je6j3sMIpAkonaadjbvxmRC6RAhpa3RKxndhNnU2M6iNbtJwd7usQYIA== + dependencies: + chalk "^1.1.3" + cli-truncate "^0.2.1" + elegant-spinner "^1.0.1" + figures "^1.7.0" + indent-string "^3.0.0" + log-symbols "^1.0.2" + log-update "^2.3.0" + strip-ansi "^3.0.1" + "@icons/material@^0.2.4": version "0.2.4" resolved "https://registry.yarnpkg.com/@icons/material/-/material-0.2.4.tgz#e90c9f71768b3736e76d7dd6783fc6c2afa88bc8" @@ -2468,7 +2482,7 @@ ansi-colors@^3.0.0: resolved "https://registry.yarnpkg.com/ansi-colors/-/ansi-colors-3.2.3.tgz#57d35b8686e851e2cc04c403f1c00203976a1813" integrity sha512-LEHHyuhlPY3TmuUYMh2oz89lTShfvgbmzaBcxve9t/9Wuy7Dwf4yoAKcND7KFT1HAQfqZ12qtc+DUrBMeKF9nw== -ansi-escapes@^1.0.0, ansi-escapes@^1.1.0: +ansi-escapes@^1.1.0: version "1.4.0" resolved "https://registry.yarnpkg.com/ansi-escapes/-/ansi-escapes-1.4.0.tgz#d3a8a83b319aa67793662b13e761c7911422306e" integrity sha1-06ioOzGapneTZisT52HHkRQiMG4= @@ -2525,11 +2539,6 @@ ansistyles@~0.1.3: resolved "https://registry.yarnpkg.com/ansistyles/-/ansistyles-0.1.3.tgz#5de60415bda071bb37127854c864f41b23254539" integrity sha1-XeYEFb2gcbs3EnhUyGT0GyMlRTk= -any-observable@^0.2.0: - version "0.2.0" - resolved "https://registry.yarnpkg.com/any-observable/-/any-observable-0.2.0.tgz#c67870058003579009083f54ac0abafb5c33d242" - integrity sha1-xnhwBYADV5AJCD9UrAq6+1wz0kI= - any-observable@^0.3.0: version "0.3.0" resolved "https://registry.yarnpkg.com/any-observable/-/any-observable-0.3.0.tgz#af933475e5806a67d0d7df090dd5e8bef65d119b" @@ -2548,11 +2557,6 @@ app-root-dir@^1.0.2: resolved "https://registry.yarnpkg.com/app-root-dir/-/app-root-dir-1.0.2.tgz#38187ec2dea7577fff033ffcb12172692ff6e118" integrity sha1-OBh+wt6nV3//Az/8sSFyaS/24Rg= -app-root-path@^2.0.0: - version "2.1.0" - resolved "https://registry.yarnpkg.com/app-root-path/-/app-root-path-2.1.0.tgz#98bf6599327ecea199309866e8140368fd2e646a" - integrity sha1-mL9lmTJ+zqGZMJhm6BQDaP0uZGo= - append-transform@^0.4.0: version "0.4.0" resolved "https://registry.yarnpkg.com/append-transform/-/append-transform-0.4.0.tgz#d76ebf8ca94d276e247a36bad44a4b74ab611991" @@ -4588,7 +4592,7 @@ chalk@^1.0.0, chalk@^1.1.1, chalk@^1.1.3, chalk@~1.1.1: strip-ansi "^3.0.0" supports-color "^2.0.0" -chalk@^2.0.0, chalk@^2.0.1, chalk@^2.1.0, chalk@^2.3.0, chalk@^2.3.2, chalk@^2.4.1, chalk@^2.4.2: +chalk@^2.0.0, chalk@^2.0.1, chalk@^2.1.0, chalk@^2.3.0, chalk@^2.3.1, chalk@^2.3.2, chalk@^2.4.1, chalk@^2.4.2: version "2.4.2" resolved "https://registry.yarnpkg.com/chalk/-/chalk-2.4.2.tgz#cd42541677a54333cf541a49108c1432b44c9424" integrity sha512-Mti+f9lpJNcwF4tWV8/OrTTtF1gZi+f8FqlyAdouralcFWFQWF2+NgCHShjkCb+IFBLq9buZwE1xckQU4peSuQ== @@ -4783,7 +4787,7 @@ cli-columns@^3.1.2: string-width "^2.0.0" strip-ansi "^3.0.1" -cli-cursor@^1.0.1, cli-cursor@^1.0.2: +cli-cursor@^1.0.1: version "1.0.2" resolved "https://registry.yarnpkg.com/cli-cursor/-/cli-cursor-1.0.2.tgz#64da3f7d56a54412e59794bd62dc35295e8f2987" integrity sha1-ZNo/fValRBLll5S9Ytw1KV6PKYc= @@ -4797,11 +4801,6 @@ cli-cursor@^2.0.0, cli-cursor@^2.1.0: dependencies: restore-cursor "^2.0.0" -cli-spinners@^0.1.2: - version "0.1.2" - resolved "https://registry.yarnpkg.com/cli-spinners/-/cli-spinners-0.1.2.tgz#bb764d88e185fb9e1e6a2a1f19772318f605e31c" - integrity sha1-u3ZNiOGF+54eaiofGXcjGPYF4xw= - cli-table2@~0.2.0: version "0.2.0" resolved "https://registry.yarnpkg.com/cli-table2/-/cli-table2-0.2.0.tgz#2d1ef7f218a0e786e214540562d4bd177fe32d97" @@ -5075,7 +5074,7 @@ comma-separated-tokens@^1.0.0: dependencies: trim "0.0.1" -commander@2, commander@^2.11.0, commander@^2.12.1, commander@^2.13.0, commander@^2.19.0, commander@^2.8.1, commander@^2.9.0: +commander@2, commander@^2.12.1, commander@^2.13.0, commander@^2.14.1, commander@^2.19.0, commander@^2.8.1, commander@^2.9.0: version "2.19.0" resolved "https://registry.yarnpkg.com/commander/-/commander-2.19.0.tgz#f6198aa84e5b83c46054b94ddedbfed5ee9ff12a" integrity sha512-6tvAOO+D6OENvRAh524Dh9jcfKTYDQAqvqezbCW82xj5X0pSrcpxtvRKHLG0yBY6SD7PSDrJaj+0AiOcKVd1Xg== @@ -5312,7 +5311,7 @@ cosmiconfig@^4.0.0: parse-json "^4.0.0" require-from-string "^2.0.1" -cosmiconfig@^5.0.5, cosmiconfig@^5.0.7: +cosmiconfig@^5.0.2, cosmiconfig@^5.0.5, cosmiconfig@^5.0.7: version "5.0.7" resolved "https://registry.yarnpkg.com/cosmiconfig/-/cosmiconfig-5.0.7.tgz#39826b292ee0d78eda137dfa3173bd1c21a43b04" integrity sha512-PcLqxTKiDmNT6pSpy4N6KtuPwb53W+2tzNvwOZw0WH9N6O0vLIBq0x8aj8Oj75ere4YcGi48bDFCL+3fRJdlNA== @@ -6085,7 +6084,7 @@ debug@^3.1.0, debug@^3.2.5: dependencies: ms "^2.1.1" -debug@^4.1.0: +debug@^4.0.1, debug@^4.1.0: version "4.1.1" resolved "https://registry.yarnpkg.com/debug/-/debug-4.1.1.tgz#3b72260255109c6b589cee050f1d516139664791" integrity sha512-pYAIzeRo8J6KPEaJ0VWOh5Pzkbw/RetuzehGM7QRRX5he4fPHx2rdKMB256ehJCkX+XRQm16eZLqLNS8RSZXZw== @@ -6915,7 +6914,7 @@ escape-html@^1.0.3, escape-html@~1.0.3: resolved "https://registry.yarnpkg.com/escape-html/-/escape-html-1.0.3.tgz#0258eae4d3d0c0974de1c169188ef0051d1d1988" integrity sha1-Aljq5NPQwJdN4cFpGI7wBR0dGYg= -escape-string-regexp@1.0.5, escape-string-regexp@^1.0.2, escape-string-regexp@^1.0.5: +escape-string-regexp@1.0.5, escape-string-regexp@^1.0.2, escape-string-regexp@^1.0.4, escape-string-regexp@^1.0.5: version "1.0.5" resolved "https://registry.yarnpkg.com/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz#1b61c0562190a8dff6ae3bb2cf0200ca130b86d4" integrity sha1-G2HAViGQqN/2rjuyzwIAyhMLhtQ= @@ -7129,19 +7128,6 @@ execa@^0.7.0: signal-exit "^3.0.0" strip-eof "^1.0.0" -execa@^0.8.0: - version "0.8.0" - resolved "https://registry.yarnpkg.com/execa/-/execa-0.8.0.tgz#d8d76bbc1b55217ed190fd6dd49d3c774ecfc8da" - integrity sha1-2NdrvBtVIX7RkP1t1J08d07PyNo= - dependencies: - cross-spawn "^5.0.1" - get-stream "^3.0.0" - is-stream "^1.1.0" - npm-run-path "^2.0.0" - p-finally "^1.0.0" - signal-exit "^3.0.0" - strip-eof "^1.0.0" - execa@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/execa/-/execa-1.0.0.tgz#c6236a5bb4df6d6f15e88e7f017798216749ddd8" @@ -7631,6 +7617,11 @@ flush-write-stream@^1.0.0: inherits "^2.0.1" readable-stream "^2.0.4" +fn-name@~2.0.1: + version "2.0.1" + resolved "https://registry.yarnpkg.com/fn-name/-/fn-name-2.0.1.tgz#5214d7537a4d06a4a301c0cc262feb84188002e7" + integrity sha1-UhTXU3pNBqSjAcDMJi/rhBiAAuc= + follow-redirects@^1.0.0, follow-redirects@^1.2.5: version "1.6.1" resolved "https://registry.yarnpkg.com/follow-redirects/-/follow-redirects-1.6.1.tgz#514973c44b5757368bad8bddfe52f81f015c94cb" @@ -7848,6 +7839,15 @@ fuse.js@^3.0.1, fuse.js@^3.3.0: resolved "https://registry.yarnpkg.com/fuse.js/-/fuse.js-3.3.0.tgz#1e4fe172a60687230fb54a5cb247eb96e2e7e885" integrity sha512-ESBRkGLWMuVkapqYCcNO1uqMg5qbCKkgb+VS6wsy17Rix0/cMS9kSOZoYkjH8Ko//pgJ/EEGu0GTjk2mjX2LGQ== +g-status@^2.0.2: + version "2.0.2" + resolved "https://registry.yarnpkg.com/g-status/-/g-status-2.0.2.tgz#270fd32119e8fc9496f066fe5fe88e0a6bc78b97" + integrity sha512-kQoE9qH+T1AHKgSSD0Hkv98bobE90ILQcXAF4wvGgsr7uFqNvwmh8j+Lq3l0RVt3E3HjSbv2B9biEGcEtpHLCA== + dependencies: + arrify "^1.0.1" + matcher "^1.0.0" + simple-git "^1.85.0" + gauge@~2.7.3: version "2.7.4" resolved "https://registry.yarnpkg.com/gauge/-/gauge-2.7.4.tgz#2c03405c7538c39d7eb37b317022e325fb018bf7" @@ -9497,13 +9497,6 @@ is-object@^1.0.1: resolved "https://registry.yarnpkg.com/is-object/-/is-object-1.0.1.tgz#8952688c5ec2ffd6b03ecc85e769e02903083470" integrity sha1-iVJojF7C/9awPsyF52ngKQMINHA= -is-observable@^0.2.0: - version "0.2.0" - resolved "https://registry.yarnpkg.com/is-observable/-/is-observable-0.2.0.tgz#b361311d83c6e5d726cabf5e250b0237106f5ae2" - integrity sha1-s2ExHYPG5dcmyr9eJQsCNxBvWuI= - dependencies: - symbol-observable "^0.2.2" - is-observable@^1.1.0: version "1.1.0" resolved "https://registry.yarnpkg.com/is-observable/-/is-observable-1.1.0.tgz#b3e986c8f44de950867cab5403f5a3465005975e" @@ -9917,11 +9910,6 @@ jest-environment-node@^23.4.0: jest-mock "^23.2.0" jest-util "^23.4.0" -jest-get-type@^21.2.0: - version "21.2.0" - resolved "https://registry.yarnpkg.com/jest-get-type/-/jest-get-type-21.2.0.tgz#f6376ab9db4b60d81e39f30749c6c466f40d4a23" - integrity sha512-y2fFw3C+D0yjNSDp7ab1kcd6NUYfy3waPTlD8yWkAtiocJdBRQqNoRqVfMNxgj+IjT0V5cBIHJO0z9vuSSZ43Q== - jest-get-type@^22.1.0: version "22.4.3" resolved "https://registry.yarnpkg.com/jest-get-type/-/jest-get-type-22.4.3.tgz#e3a8504d8479342dd4420236b322869f18900ce4" @@ -10094,16 +10082,6 @@ jest-util@^23.4.0: slash "^1.0.0" source-map "^0.6.0" -jest-validate@^21.1.0: - version "21.2.1" - resolved "https://registry.yarnpkg.com/jest-validate/-/jest-validate-21.2.1.tgz#cc0cbca653cd54937ba4f2a111796774530dd3c7" - integrity sha512-k4HLI1rZQjlU+EC682RlQ6oZvLrE5SCh3brseQc24vbZTxzT/k/3urar5QMCVgjadmSO7lECeGdc6YxnM3yEGg== - dependencies: - chalk "^2.0.1" - jest-get-type "^21.2.0" - leven "^2.1.0" - pretty-format "^21.2.1" - jest-validate@^23.6.0: version "23.6.0" resolved "https://registry.yarnpkg.com/jest-validate/-/jest-validate-23.6.0.tgz#36761f99d1ed33fcd425b4e4c5595d62b6597474" @@ -10566,51 +10544,42 @@ libnpx@^10.2.0: y18n "^4.0.0" yargs "^11.0.0" -lint-staged@^6.0.0: - version "6.1.1" - resolved "https://registry.yarnpkg.com/lint-staged/-/lint-staged-6.1.1.tgz#cd08c4d9b8ccc2d37198d1c47ce77d22be6cf324" - integrity sha512-M/7bwLdXbeG7ZNLcasGeLMBDg60/w6obj3KOtINwJyxAxb53XGY0yH5FSZlWklEzuVbTtqtIfAajh6jYIN90AA== +lint-staged@^8.1.3: + version "8.1.3" + resolved "https://registry.yarnpkg.com/lint-staged/-/lint-staged-8.1.3.tgz#bb069db5466c0fe16710216e633a84f2b362fa60" + integrity sha512-6TGkikL1B+6mIOuSNq2TV6oP21IhPMnV8q0cf9oYZ296ArTVNcbFh1l1pfVOHHbBIYLlziWNsQ2q45/ffmJ4AA== dependencies: - app-root-path "^2.0.0" - chalk "^2.1.0" - commander "^2.11.0" - cosmiconfig "^4.0.0" + "@iamstarkov/listr-update-renderer" "0.4.1" + chalk "^2.3.1" + commander "^2.14.1" + cosmiconfig "^5.0.2" debug "^3.1.0" dedent "^0.7.0" - execa "^0.8.0" + del "^3.0.0" + execa "^1.0.0" find-parent-dir "^0.3.0" + g-status "^2.0.2" is-glob "^4.0.0" - jest-validate "^21.1.0" - listr "^0.13.0" - lodash "^4.17.4" - log-symbols "^2.0.0" - minimatch "^3.0.0" + is-windows "^1.0.2" + listr "^0.14.2" + lodash "^4.17.5" + log-symbols "^2.2.0" + micromatch "^3.1.8" npm-which "^3.0.1" p-map "^1.1.1" path-is-inside "^1.0.2" pify "^3.0.0" - staged-git-files "1.0.0" - stringify-object "^3.2.0" + please-upgrade-node "^3.0.2" + staged-git-files "1.1.2" + string-argv "^0.0.2" + stringify-object "^3.2.2" + yup "^0.26.10" listr-silent-renderer@^1.1.1: version "1.1.1" resolved "https://registry.yarnpkg.com/listr-silent-renderer/-/listr-silent-renderer-1.1.1.tgz#924b5a3757153770bf1a8e3fbf74b8bbf3f9242e" integrity sha1-kktaN1cVN3C/Go4/v3S4u/P5JC4= -listr-update-renderer@^0.4.0: - version "0.4.0" - resolved "https://registry.yarnpkg.com/listr-update-renderer/-/listr-update-renderer-0.4.0.tgz#344d980da2ca2e8b145ba305908f32ae3f4cc8a7" - integrity sha1-NE2YDaLKLosUW6MFkI8yrj9MyKc= - dependencies: - chalk "^1.1.3" - cli-truncate "^0.2.1" - elegant-spinner "^1.0.1" - figures "^1.7.0" - indent-string "^3.0.0" - log-symbols "^1.0.2" - log-update "^1.0.2" - strip-ansi "^3.0.1" - listr-update-renderer@^0.5.0: version "0.5.0" resolved "https://registry.yarnpkg.com/listr-update-renderer/-/listr-update-renderer-0.5.0.tgz#4ea8368548a7b8aecb7e06d8c95cb45ae2ede6a2" @@ -10625,16 +10594,6 @@ listr-update-renderer@^0.5.0: log-update "^2.3.0" strip-ansi "^3.0.1" -listr-verbose-renderer@^0.4.0: - version "0.4.1" - resolved "https://registry.yarnpkg.com/listr-verbose-renderer/-/listr-verbose-renderer-0.4.1.tgz#8206f4cf6d52ddc5827e5fd14989e0e965933a35" - integrity sha1-ggb0z21S3cWCfl/RSYng6WWTOjU= - dependencies: - chalk "^1.1.3" - cli-cursor "^1.0.2" - date-fns "^1.27.2" - figures "^1.7.0" - listr-verbose-renderer@^0.5.0: version "0.5.0" resolved "https://registry.yarnpkg.com/listr-verbose-renderer/-/listr-verbose-renderer-0.5.0.tgz#f1132167535ea4c1261102b9f28dac7cba1e03db" @@ -10645,30 +10604,7 @@ listr-verbose-renderer@^0.5.0: date-fns "^1.27.2" figures "^2.0.0" -listr@^0.13.0: - version "0.13.0" - resolved "https://registry.yarnpkg.com/listr/-/listr-0.13.0.tgz#20bb0ba30bae660ee84cc0503df4be3d5623887d" - integrity sha1-ILsLowuuZg7oTMBQPfS+PVYjiH0= - dependencies: - chalk "^1.1.3" - cli-truncate "^0.2.1" - figures "^1.7.0" - indent-string "^2.1.0" - is-observable "^0.2.0" - is-promise "^2.1.0" - is-stream "^1.1.0" - listr-silent-renderer "^1.1.1" - listr-update-renderer "^0.4.0" - listr-verbose-renderer "^0.4.0" - log-symbols "^1.0.2" - log-update "^1.0.2" - ora "^0.2.3" - p-map "^1.1.1" - rxjs "^5.4.2" - stream-to-observable "^0.2.0" - strip-ansi "^3.0.1" - -listr@^0.14.1: +listr@^0.14.1, listr@^0.14.2: version "0.14.3" resolved "https://registry.yarnpkg.com/listr/-/listr-0.14.3.tgz#2fea909604e434be464c50bddba0d496928fa586" integrity sha512-RmAl7su35BFd/xoMamRjpIE4j3v+L28o8CT5YhAXQJm1fD+1l9ngXY8JAQRJ+tFK2i5njvi0iRUKV09vPwA0iA== @@ -10949,14 +10885,6 @@ log-symbols@^2.0.0, log-symbols@^2.1.0, log-symbols@^2.2.0: dependencies: chalk "^2.0.1" -log-update@^1.0.2: - version "1.0.2" - resolved "https://registry.yarnpkg.com/log-update/-/log-update-1.0.2.tgz#19929f64c4093d2d2e7075a1dad8af59c296b8d1" - integrity sha1-GZKfZMQJPS0ucHWh2tivWcKWuNE= - dependencies: - ansi-escapes "^1.0.0" - cli-cursor "^1.0.2" - log-update@^2.3.0: version "2.3.0" resolved "https://registry.yarnpkg.com/log-update/-/log-update-2.3.0.tgz#88328fd7d1ce7938b29283746f0b1bc126b24708" @@ -11154,6 +11082,13 @@ marksy@^6.1.0: he "^1.1.1" marked "^0.3.12" +matcher@^1.0.0: + version "1.1.1" + resolved "https://registry.yarnpkg.com/matcher/-/matcher-1.1.1.tgz#51d8301e138f840982b338b116bb0c09af62c1c2" + integrity sha512-+BmqxWIubKTRKNWx/ahnCkk3mG8m7OturVlqq6HiojGJTd5hVYbgZm6WzcYPCoB+KBT4Vd6R7WSRG2OADNaCjg== + dependencies: + escape-string-regexp "^1.0.4" + material-colors@^1.2.1: version "1.2.6" resolved "https://registry.yarnpkg.com/material-colors/-/material-colors-1.2.6.tgz#6d1958871126992ceecc72f4bcc4d8f010865f46" @@ -12512,16 +12447,6 @@ optionator@^0.8.1: type-check "~0.3.2" wordwrap "~1.0.0" -ora@^0.2.3: - version "0.2.3" - resolved "https://registry.yarnpkg.com/ora/-/ora-0.2.3.tgz#37527d220adcd53c39b73571d754156d5db657a4" - integrity sha1-N1J9Igrc1Tw5tzVx11QVbV22V6Q= - dependencies: - chalk "^1.1.1" - cli-cursor "^1.0.2" - cli-spinners "^0.1.2" - object-assign "^4.0.1" - ordered-ast-traverse@~1.1.1: version "1.1.1" resolved "https://registry.yarnpkg.com/ordered-ast-traverse/-/ordered-ast-traverse-1.1.1.tgz#6843a170bc0eee8b520cc8ddc1ddd3aa30fa057c" @@ -13023,6 +12948,13 @@ pkg-up@^1.0.0: dependencies: find-up "^1.0.0" +please-upgrade-node@^3.0.2: + version "3.1.1" + resolved "https://registry.yarnpkg.com/please-upgrade-node/-/please-upgrade-node-3.1.1.tgz#ed320051dfcc5024fae696712c8288993595e8ac" + integrity sha512-KY1uHnQ2NlQHqIJQpnh/i54rKkuxCEBx+voJIS/Mvb+L2iYd2NMotwduhKTMjfC1uKoX3VXOxLjIYG66dfJTVQ== + dependencies: + semver-compare "^1.0.0" + pluralize@^1.2.1: version "1.2.1" resolved "https://registry.yarnpkg.com/pluralize/-/pluralize-1.2.1.tgz#d1a21483fd22bb41e58a12fa3421823140897c45" @@ -13560,14 +13492,6 @@ pretty-error@^2.0.2, pretty-error@^2.1.1: renderkid "^2.0.1" utila "~0.4" -pretty-format@^21.2.1: - version "21.2.1" - resolved "https://registry.yarnpkg.com/pretty-format/-/pretty-format-21.2.1.tgz#ae5407f3cf21066cd011aa1ba5fce7b6a2eddb36" - integrity sha512-ZdWPGYAnYfcVP8yKA3zFjCn8s4/17TeYH28MXuC8vTp0o21eXjbFGcOAXZEaDaOFJjc3h2qa7HQNHNshhvoh2A== - dependencies: - ansi-regex "^3.0.0" - ansi-styles "^3.2.0" - pretty-format@^23.6.0: version "23.6.0" resolved "https://registry.yarnpkg.com/pretty-format/-/pretty-format-23.6.0.tgz#5eaac8eeb6b33b987b7fe6097ea6a8a146ab5760" @@ -13670,6 +13594,11 @@ prop-types@15.x, prop-types@^15.5.10, prop-types@^15.5.4, prop-types@^15.5.8, pr loose-envify "^1.3.1" object-assign "^4.1.1" +property-expr@^1.5.0: + version "1.5.1" + resolved "https://registry.yarnpkg.com/property-expr/-/property-expr-1.5.1.tgz#22e8706894a0c8e28d58735804f6ba3a3673314f" + integrity sha512-CGuc0VUTGthpJXL36ydB6jnbyOf/rAHFvmVrJlH+Rg0DqqLFQGAP6hIaxD/G0OAmBJPhXDHuEJigrp0e0wFV6g== + property-information@^5.0.0, property-information@^5.0.1: version "5.0.1" resolved "https://registry.yarnpkg.com/property-information/-/property-information-5.0.1.tgz#c3b09f4f5750b1634c0b24205adbf78f18bdf94f" @@ -15078,7 +15007,7 @@ rx-lite@^3.1.2: resolved "https://registry.yarnpkg.com/rx-lite/-/rx-lite-3.1.2.tgz#19ce502ca572665f3b647b10939f97fd1615f102" integrity sha1-Gc5QLKVyZl87ZHsQk5+X/RYV8QI= -rxjs@^5.4.2, rxjs@^5.5.2: +rxjs@^5.5.2: version "5.5.12" resolved "https://registry.yarnpkg.com/rxjs/-/rxjs-5.5.12.tgz#6fa61b8a77c3d793dbaf270bee2f43f652d741cc" integrity sha512-xx2itnL5sBbqeeiVgNPVuQQ1nC8Jp2WfNJhXWHmElW9YmrpS9UVnNzhP3EH3HFqexO5Tlp8GhYY+WEcqcVMvGw== @@ -15247,6 +15176,11 @@ selfsigned@^1.9.1: dependencies: node-forge "0.7.5" +semver-compare@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/semver-compare/-/semver-compare-1.0.0.tgz#0dee216a1c941ab37e9efb1788f6afc5ff5537fc" + integrity sha1-De4hahyUGrN+nvsXiPavxf9VN/w= + semver-diff@^2.0.0: version "2.1.0" resolved "https://registry.yarnpkg.com/semver-diff/-/semver-diff-2.1.0.tgz#4bbb8437c8d37e4b0cf1a68fd726ec6d645d6d36" @@ -15482,6 +15416,13 @@ simple-get@^2.7.0: once "^1.3.1" simple-concat "^1.0.0" +simple-git@^1.85.0: + version "1.107.0" + resolved "https://registry.yarnpkg.com/simple-git/-/simple-git-1.107.0.tgz#12cffaf261c14d6f450f7fdb86c21ccee968b383" + integrity sha512-t4OK1JRlp4ayKRfcW6owrWcRVLyHRUlhGd0uN6ZZTqfDq8a5XpcUdOKiGRNobHEuMtNqzp0vcJNvhYWwh5PsQA== + dependencies: + debug "^4.0.1" + simple-is@~0.2.0: version "0.2.0" resolved "https://registry.yarnpkg.com/simple-is/-/simple-is-0.2.0.tgz#2abb75aade39deb5cc815ce10e6191164850baf0" @@ -15929,10 +15870,10 @@ stack-utils@^1.0.1: resolved "https://registry.yarnpkg.com/stack-utils/-/stack-utils-1.0.2.tgz#33eba3897788558bebfc2db059dc158ec36cebb8" integrity sha512-MTX+MeG5U994cazkjd/9KNAapsHnibjMLnfXodlkXw76JEea0UiNzrqidzo1emMwk7w5Qhc9jd4Bn9TBb1MFwA== -staged-git-files@1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/staged-git-files/-/staged-git-files-1.0.0.tgz#cdb847837c1fcc52c08a872d4883cc0877668a80" - integrity sha1-zbhHg3wfzFLAioctSIPMCHdmioA= +staged-git-files@1.1.2: + version "1.1.2" + resolved "https://registry.yarnpkg.com/staged-git-files/-/staged-git-files-1.1.2.tgz#4326d33886dc9ecfa29a6193bf511ba90a46454b" + integrity sha512-0Eyrk6uXW6tg9PYkhi/V/J4zHp33aNyi2hOCmhFLqLTIhbgqWn5jlSzI+IU0VqrZq6+DbHcabQl/WP6P3BG0QA== static-extend@^0.1.1: version "0.1.2" @@ -16009,13 +15950,6 @@ stream-shift@^1.0.0: resolved "https://registry.yarnpkg.com/stream-shift/-/stream-shift-1.0.0.tgz#d5c752825e5367e786f78e18e445ea223a155952" integrity sha1-1cdSgl5TZ+eG944Y5EXqIjoVWVI= -stream-to-observable@^0.2.0: - version "0.2.0" - resolved "https://registry.yarnpkg.com/stream-to-observable/-/stream-to-observable-0.2.0.tgz#59d6ea393d87c2c0ddac10aa0d561bc6ba6f0e10" - integrity sha1-WdbqOT2HwsDdrBCqDVYbxrpvDhA= - dependencies: - any-observable "^0.2.0" - strict-uri-encode@^1.0.0: version "1.1.0" resolved "https://registry.yarnpkg.com/strict-uri-encode/-/strict-uri-encode-1.1.0.tgz#279b225df1d582b1f54e65addd4352e18faa0713" @@ -16026,6 +15960,11 @@ strict-uri-encode@^2.0.0: resolved "https://registry.yarnpkg.com/strict-uri-encode/-/strict-uri-encode-2.0.0.tgz#b9c7330c7042862f6b142dc274bbcc5866ce3546" integrity sha1-ucczDHBChi9rFC3CdLvMWGbONUY= +string-argv@^0.0.2: + version "0.0.2" + resolved "https://registry.yarnpkg.com/string-argv/-/string-argv-0.0.2.tgz#dac30408690c21f3c3630a3ff3a05877bdcbd736" + integrity sha1-2sMECGkMIfPDYwo/86BYd73L1zY= + string-length@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/string-length/-/string-length-2.0.0.tgz#d40dbb686a3ace960c1cffca562bf2c45f8363ed" @@ -16131,7 +16070,7 @@ stringifier@^1.3.0: traverse "^0.6.6" type-name "^2.0.1" -stringify-object@^3.2.0: +stringify-object@^3.2.2: version "3.3.0" resolved "https://registry.yarnpkg.com/stringify-object/-/stringify-object-3.3.0.tgz#703065aefca19300d3ce88af4f5b3956d7556629" integrity sha512-rHqiFh1elqCQ9WPLIC8I0Q/g/wj5J1eMkyoiD6eoQApWHP0FtlK7rqnhmabL5VUY9JQCcqwwvlOaSuutekgyrw== @@ -16336,11 +16275,6 @@ symbol-observable@1.0.1: resolved "https://registry.yarnpkg.com/symbol-observable/-/symbol-observable-1.0.1.tgz#8340fc4702c3122df5d22288f88283f513d3fdd4" integrity sha1-g0D8RwLDEi310iKI+IKD9RPT/dQ= -symbol-observable@^0.2.2: - version "0.2.4" - resolved "https://registry.yarnpkg.com/symbol-observable/-/symbol-observable-0.2.4.tgz#95a83db26186d6af7e7a18dbd9760a2f86d08f40" - integrity sha1-lag9smGG1q9+ehjb2XYKL4bQj0A= - symbol-observable@^1.1.0, symbol-observable@^1.2.0: version "1.2.0" resolved "https://registry.yarnpkg.com/symbol-observable/-/symbol-observable-1.2.0.tgz#c22688aed4eab3cdc2dfeacbb561660560a00804" @@ -16358,6 +16292,11 @@ symbol.prototype.description@^1.0.0: dependencies: has-symbols "^1.0.0" +synchronous-promise@^2.0.5: + version "2.0.6" + resolved "https://registry.yarnpkg.com/synchronous-promise/-/synchronous-promise-2.0.6.tgz#de76e0ea2b3558c1e673942e47e714a930fa64aa" + integrity sha512-TyOuWLwkmtPL49LHCX1caIwHjRzcVd62+GF6h8W/jHOeZUFHpnd2XJDVuUlaTaLPH1nuu2M69mfHr5XbQJnf/g== + systemjs-plugin-css@^0.1.36: version "0.1.37" resolved "https://registry.yarnpkg.com/systemjs-plugin-css/-/systemjs-plugin-css-0.1.37.tgz#684847252ca69b7da24a1201094c86274324e82f" @@ -16649,6 +16588,11 @@ toposort@^1.0.0: resolved "https://registry.yarnpkg.com/toposort/-/toposort-1.0.7.tgz#2e68442d9f64ec720b8cc89e6443ac6caa950029" integrity sha1-LmhELZ9k7HILjMieZEOsbKqVACk= +toposort@^2.0.2: + version "2.0.2" + resolved "https://registry.yarnpkg.com/toposort/-/toposort-2.0.2.tgz#ae21768175d1559d48bef35420b2f4962f09c330" + integrity sha1-riF2gXXRVZ1IvvNUILL0li8JwzA= + touch@^2.0.1: version "2.0.2" resolved "https://registry.yarnpkg.com/touch/-/touch-2.0.2.tgz#ca0b2a3ae3211246a61b16ba9e6cbf1596287164" @@ -18075,6 +18019,18 @@ yeoman-generator@^2.0.5: through2 "^2.0.0" yeoman-environment "^2.0.5" +yup@^0.26.10: + version "0.26.10" + resolved "https://registry.yarnpkg.com/yup/-/yup-0.26.10.tgz#3545839663289038faf25facfc07e11fd67c0cb1" + integrity sha512-keuNEbNSnsOTOuGCt3UJW69jDE3O4P+UHAakO7vSeFMnjaitcmlbij/a3oNb9g1Y1KvSKH/7O1R2PQ4m4TRylw== + dependencies: + "@babel/runtime" "7.0.0" + fn-name "~2.0.1" + lodash "^4.17.10" + property-expr "^1.5.0" + synchronous-promise "^2.0.5" + toposort "^2.0.2" + zip-stream@^1.1.0: version "1.2.0" resolved "https://registry.yarnpkg.com/zip-stream/-/zip-stream-1.2.0.tgz#a8bc45f4c1b49699c6b90198baacaacdbcd4ba04"