From be2fa54459bd6aa9305ec6ee83be8599370f9ffe Mon Sep 17 00:00:00 2001 From: Martin Molnar Date: Tue, 20 Feb 2018 11:15:31 +0100 Subject: [PATCH 001/488] feat(ldap): Allow use of DN in user attribute filter (#3132) --- pkg/login/ldap.go | 10 +++++++++- 1 file changed, 9 insertions(+), 1 deletion(-) diff --git a/pkg/login/ldap.go b/pkg/login/ldap.go index be3babac02e..12e10557ffc 100644 --- a/pkg/login/ldap.go +++ b/pkg/login/ldap.go @@ -408,6 +408,10 @@ func (a *ldapAuther) searchForUser(username string) (*LdapUserInfo, error) { if a.server.GroupSearchFilterUserAttribute == "" { filter_replace = getLdapAttr(a.server.Attr.Username, searchResult) } + if a.server.GroupSearchFilterUserAttribute == "dn" { + filter_replace = searchResult.Entries[0].DN + } + filter := strings.Replace(a.server.GroupSearchFilter, "%s", ldap.EscapeFilter(filter_replace), -1) a.log.Info("Searching for user's groups", "filter", filter) @@ -430,7 +434,11 @@ func (a *ldapAuther) searchForUser(username string) (*LdapUserInfo, error) { if len(groupSearchResult.Entries) > 0 { for i := range groupSearchResult.Entries { - memberOf = append(memberOf, getLdapAttrN(a.server.Attr.MemberOf, groupSearchResult, i)) + if a.server.Attr.MemberOf == "dn" { + memberOf = append(memberOf, groupSearchResult.Entries[i].DN) + } else { + memberOf = append(memberOf, getLdapAttrN(a.server.Attr.MemberOf, groupSearchResult, i)) + } } break } From af63a26be0a16acbada306fa053386f93e342af7 Mon Sep 17 00:00:00 2001 From: flopp999 <21694965+flopp999@users.noreply.github.com> Date: Tue, 13 Mar 2018 22:11:58 +0100 Subject: [PATCH 002/488] Added W/m2(energy) and l/h(flow) both as .fixedUnit --- public/app/core/utils/kbn.ts | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/public/app/core/utils/kbn.ts b/public/app/core/utils/kbn.ts index 3b78ccfc001..3f2f0ad9419 100644 --- a/public/app/core/utils/kbn.ts +++ b/public/app/core/utils/kbn.ts @@ -496,6 +496,7 @@ kbn.valueFormats.watt = kbn.formatBuilders.decimalSIPrefix('W'); kbn.valueFormats.kwatt = kbn.formatBuilders.decimalSIPrefix('W', 1); kbn.valueFormats.mwatt = kbn.formatBuilders.decimalSIPrefix('W', -1); kbn.valueFormats.kwattm = kbn.formatBuilders.decimalSIPrefix('W/Min', 1); +kbn.valueFormats.Wm2 = kbn.formatBuilders.fixedUnit('W/m2'); kbn.valueFormats.voltamp = kbn.formatBuilders.decimalSIPrefix('VA'); kbn.valueFormats.kvoltamp = kbn.formatBuilders.decimalSIPrefix('VA', 1); kbn.valueFormats.voltampreact = kbn.formatBuilders.decimalSIPrefix('var'); @@ -576,6 +577,7 @@ kbn.valueFormats.flowgpm = kbn.formatBuilders.fixedUnit('gpm'); kbn.valueFormats.flowcms = kbn.formatBuilders.fixedUnit('cms'); kbn.valueFormats.flowcfs = kbn.formatBuilders.fixedUnit('cfs'); kbn.valueFormats.flowcfm = kbn.formatBuilders.fixedUnit('cfm'); +kbn.valueFormats.litreh = kbn.formatBuilders.fixedUnit('l/h'); // Angle kbn.valueFormats.degree = kbn.formatBuilders.fixedUnit('°'); @@ -1007,6 +1009,7 @@ kbn.getUnitFormats = function() { { text: 'Watt (W)', value: 'watt' }, { text: 'Kilowatt (kW)', value: 'kwatt' }, { text: 'Milliwatt (mW)', value: 'mwatt' }, + { text: 'Watt per square metre (W/m2)', value: 'Wm2' }, { text: 'Volt-ampere (VA)', value: 'voltamp' }, { text: 'Kilovolt-ampere (kVA)', value: 'kvoltamp' }, { text: 'Volt-ampere reactive (var)', value: 'voltampreact' }, @@ -1062,6 +1065,7 @@ kbn.getUnitFormats = function() { { text: 'Cubic meters/sec (cms)', value: 'flowcms' }, { text: 'Cubic feet/sec (cfs)', value: 'flowcfs' }, { text: 'Cubic feet/min (cfm)', value: 'flowcfm' }, + { text: 'Litre/hour', value: 'litreh' }, ], }, { From 08461408a279afa9af2bd8b746d474de373fd6fa Mon Sep 17 00:00:00 2001 From: flopp999 <21694965+flopp999@users.noreply.github.com> Date: Tue, 13 Mar 2018 22:17:56 +0100 Subject: [PATCH 003/488] Added Kilopascals(kPa) under pressure --- public/app/core/utils/kbn.ts | 2 ++ 1 file changed, 2 insertions(+) diff --git a/public/app/core/utils/kbn.ts b/public/app/core/utils/kbn.ts index 3f2f0ad9419..87075e0de2e 100644 --- a/public/app/core/utils/kbn.ts +++ b/public/app/core/utils/kbn.ts @@ -526,6 +526,7 @@ kbn.valueFormats.pressurebar = kbn.formatBuilders.decimalSIPrefix('bar'); kbn.valueFormats.pressurembar = kbn.formatBuilders.decimalSIPrefix('bar', -1); kbn.valueFormats.pressurekbar = kbn.formatBuilders.decimalSIPrefix('bar', 1); kbn.valueFormats.pressurehpa = kbn.formatBuilders.fixedUnit('hPa'); +kbn.valueFormats.pressurekpa = kbn.formatBuilders.fixedUnit('kPa'); kbn.valueFormats.pressurehg = kbn.formatBuilders.fixedUnit('"Hg'); kbn.valueFormats.pressurepsi = kbn.formatBuilders.scaledUnits(1000, [' psi', ' ksi', ' Mpsi']); @@ -1045,6 +1046,7 @@ kbn.getUnitFormats = function() { { text: 'Bars', value: 'pressurebar' }, { text: 'Kilobars', value: 'pressurekbar' }, { text: 'Hectopascals', value: 'pressurehpa' }, + { text: 'Kilopascals', value: 'pressurekpa' }, { text: 'Inches of mercury', value: 'pressurehg' }, { text: 'PSI', value: 'pressurepsi' }, ], From 8e7d23cdebc3df236d519777e3e4485d5ad32d12 Mon Sep 17 00:00:00 2001 From: wph95 Date: Fri, 23 Mar 2018 23:50:16 +0800 Subject: [PATCH 004/488] wip Signed-off-by: wph95 --- pkg/cmd/grafana-server/main.go | 1 + pkg/tsdb/elasticsearch/elasticsearch.go | 131 +++++++++++ pkg/tsdb/elasticsearch/model_parser.go | 97 +++++++++ pkg/tsdb/elasticsearch/models.go | 131 +++++++++++ pkg/tsdb/elasticsearch/query.go | 204 ++++++++++++++++++ pkg/tsdb/elasticsearch/response_parser.go | 111 ++++++++++ .../datasource/elasticsearch/plugin.json | 1 + 7 files changed, 676 insertions(+) create mode 100644 pkg/tsdb/elasticsearch/elasticsearch.go create mode 100644 pkg/tsdb/elasticsearch/model_parser.go create mode 100644 pkg/tsdb/elasticsearch/models.go create mode 100644 pkg/tsdb/elasticsearch/query.go create mode 100644 pkg/tsdb/elasticsearch/response_parser.go diff --git a/pkg/cmd/grafana-server/main.go b/pkg/cmd/grafana-server/main.go index ab0e12f2d9f..21090153bc0 100644 --- a/pkg/cmd/grafana-server/main.go +++ b/pkg/cmd/grafana-server/main.go @@ -21,6 +21,7 @@ import ( _ "github.com/grafana/grafana/pkg/services/alerting/conditions" _ "github.com/grafana/grafana/pkg/services/alerting/notifiers" _ "github.com/grafana/grafana/pkg/tsdb/cloudwatch" + _ "github.com/grafana/grafana/pkg/tsdb/elasticsearch" _ "github.com/grafana/grafana/pkg/tsdb/graphite" _ "github.com/grafana/grafana/pkg/tsdb/influxdb" _ "github.com/grafana/grafana/pkg/tsdb/mysql" diff --git a/pkg/tsdb/elasticsearch/elasticsearch.go b/pkg/tsdb/elasticsearch/elasticsearch.go new file mode 100644 index 00000000000..d67b4ad902d --- /dev/null +++ b/pkg/tsdb/elasticsearch/elasticsearch.go @@ -0,0 +1,131 @@ +package elasticsearch + +import ( + "context" + "encoding/json" + "errors" + "fmt" + "github.com/davecgh/go-spew/spew" + "github.com/grafana/grafana/pkg/log" + "github.com/grafana/grafana/pkg/models" + "github.com/grafana/grafana/pkg/setting" + "github.com/grafana/grafana/pkg/tsdb" + "golang.org/x/net/context/ctxhttp" + "net/http" + "net/url" + "path" + "strings" + "time" +) + +type ElasticsearchExecutor struct { + Transport *http.Transport +} + +var ( + glog log.Logger + intervalCalculator tsdb.IntervalCalculator +) + +func NewElasticsearchExecutor(dsInfo *models.DataSource) (tsdb.TsdbQueryEndpoint, error) { + transport, err := dsInfo.GetHttpTransport() + if err != nil { + return nil, err + } + + return &ElasticsearchExecutor{ + Transport: transport, + }, nil +} + +func init() { + glog = log.New("tsdb.elasticsearch") + tsdb.RegisterTsdbQueryEndpoint("elasticsearch", NewElasticsearchExecutor) + intervalCalculator = tsdb.NewIntervalCalculator(&tsdb.IntervalOptions{MinInterval: time.Millisecond * 1}) +} + +func (e *ElasticsearchExecutor) Query(ctx context.Context, dsInfo *models.DataSource, tsdbQuery *tsdb.TsdbQuery) (*tsdb.Response, error) { + result := &tsdb.Response{} + result.Results = make(map[string]*tsdb.QueryResult) + + queryParser := ElasticSearchQueryParser{ + dsInfo, + tsdbQuery.TimeRange, + tsdbQuery.Queries, + glog, + } + + glog.Warn(spew.Sdump(dsInfo)) + glog.Warn(spew.Sdump(tsdbQuery)) + + payload, err := queryParser.Parse() + if err != nil { + return nil, err + } + + if setting.Env == setting.DEV { + glog.Debug("Elasticsearch playload", "raw playload", payload) + } + glog.Info("Elasticsearch playload", "raw playload", payload) + + req, err := e.createRequest(dsInfo, payload) + if err != nil { + return nil, err + } + + httpClient, err := dsInfo.GetHttpClient() + if err != nil { + return nil, err + } + + resp, err := ctxhttp.Do(ctx, httpClient, req) + if err != nil { + return nil, err + } + + if resp.StatusCode/100 != 2 { + return nil, fmt.Errorf("elasticsearch returned statuscode invalid status code: %v", resp.Status) + } + + var responses Responses + dec := json.NewDecoder(resp.Body) + defer resp.Body.Close() + dec.UseNumber() + err = dec.Decode(&responses) + if err != nil { + return nil, err + } + + glog.Warn(spew.Sdump(responses)) + for _, res := range responses.Responses { + if res.Err != nil { + return nil, errors.New(res.getErrMsg()) + } + + } + + return result, nil +} + +func (e *ElasticsearchExecutor) createRequest(dsInfo *models.DataSource, query string) (*http.Request, error) { + u, _ := url.Parse(dsInfo.Url) + u.Path = path.Join(u.Path, "_msearch") + req, err := http.NewRequest(http.MethodPost, u.String(), strings.NewReader(query)) + if err != nil { + return nil, err + } + req.Header.Set("User-Agent", "Grafana") + req.Header.Set("Content-Type", "application/json") + + if dsInfo.BasicAuth { + req.SetBasicAuth(dsInfo.BasicAuthUser, dsInfo.BasicAuthPassword) + } + + if !dsInfo.BasicAuth && dsInfo.User != "" { + req.SetBasicAuth(dsInfo.User, dsInfo.Password) + } + + glog.Debug("Elasticsearch request", "url", req.URL.String()) + glog.Debug("Elasticsearch request", "body", query) + return req, nil +} diff --git a/pkg/tsdb/elasticsearch/model_parser.go b/pkg/tsdb/elasticsearch/model_parser.go new file mode 100644 index 00000000000..136db6baed7 --- /dev/null +++ b/pkg/tsdb/elasticsearch/model_parser.go @@ -0,0 +1,97 @@ +package elasticsearch + +import ( + "bytes" + "encoding/json" + "fmt" + "github.com/grafana/grafana/pkg/components/simplejson" + "github.com/grafana/grafana/pkg/log" + "github.com/grafana/grafana/pkg/models" + "github.com/grafana/grafana/pkg/tsdb" + "src/github.com/davecgh/go-spew/spew" + "strconv" + "strings" + "time" +) + +type ElasticSearchQueryParser struct { + DsInfo *models.DataSource + TimeRange *tsdb.TimeRange + Queries []*tsdb.Query + glog log.Logger +} + +func (qp *ElasticSearchQueryParser) Parse() (string, error) { + payload := bytes.Buffer{} + queryHeader := qp.getQueryHeader() + + for _, q := range qp.Queries { + timeField, err := q.Model.Get("timeField").String() + if err != nil { + return "", err + } + rawQuery := q.Model.Get("query").MustString("") + bucketAggs := q.Model.Get("bucketAggs").MustArray() + metrics := q.Model.Get("metrics").MustArray() + alias := q.Model.Get("alias").MustString("") + builder := QueryBuilder{timeField, rawQuery, bucketAggs, metrics, alias} + + query, err := builder.Build() + if err != nil { + return "", err + } + queryBytes, err := json.Marshal(query) + if err != nil { + return "", err + } + + payload.WriteString(queryHeader.String() + "\n") + payload.WriteString(string(queryBytes) + "\n") + } + + return qp.payloadReplace(payload.String(), qp.DsInfo.JsonData) + +} + +func (qp *ElasticSearchQueryParser) getQueryHeader() *QueryHeader { + var header QueryHeader + esVersion := qp.DsInfo.JsonData.Get("esVersion").MustInt() + + searchType := "query_then_fetch" + if esVersion < 5 { + searchType = "count" + } + header.SearchType = searchType + header.IgnoreUnavailable = true + header.Index = qp.getIndexList() + + if esVersion >= 56 { + header.MaxConcurrentShardRequests = qp.DsInfo.JsonData.Get("maxConcurrentShardRequests").MustInt() + } + return &header +} +func (qp *ElasticSearchQueryParser) payloadReplace(payload string, model *simplejson.Json) (string, error) { + parsedInterval, err := tsdb.GetIntervalFrom(qp.DsInfo, model, time.Millisecond) + if err != nil { + return "", nil + } + + interval := intervalCalculator.Calculate(qp.TimeRange, parsedInterval) + glog.Warn(spew.Sdump(interval)) + payload = strings.Replace(payload, "$timeFrom", fmt.Sprintf("%d", qp.TimeRange.GetFromAsMsEpoch()), -1) + payload = strings.Replace(payload, "$timeTo", fmt.Sprintf("%d", qp.TimeRange.GetToAsMsEpoch()), -1) + payload = strings.Replace(payload, "$interval", interval.Text, -1) + payload = strings.Replace(payload, "$__interval_ms", strconv.FormatInt(interval.Value.Nanoseconds()/int64(time.Millisecond), 10), -1) + payload = strings.Replace(payload, "$__interval", interval.Text, -1) + + return payload, nil +} + +func (qp *ElasticSearchQueryParser) getIndexList() string { + _, err := qp.DsInfo.JsonData.Get("interval").String() + if err != nil { + return qp.DsInfo.Database + } + // todo: support interval + return qp.DsInfo.Database +} diff --git a/pkg/tsdb/elasticsearch/models.go b/pkg/tsdb/elasticsearch/models.go new file mode 100644 index 00000000000..8662f6efbd3 --- /dev/null +++ b/pkg/tsdb/elasticsearch/models.go @@ -0,0 +1,131 @@ +package elasticsearch + +import ( + "github.com/grafana/grafana/pkg/components/simplejson" + "bytes" + "fmt" + "encoding/json" +) + +type QueryHeader struct { + SearchType string `json:"search_type"` + IgnoreUnavailable bool `json:"ignore_unavailable"` + Index interface{} `json:"index"` + MaxConcurrentShardRequests int `json:"max_concurrent_shard_requests"` +} + +func (q *QueryHeader) String() (string) { + r, _ := json.Marshal(q) + return string(r) +} + +type Query struct { + Query map[string]interface{} `json:"query"` + Aggs Aggs `json:"aggs"` + Size int `json:"size"` +} + +type Aggs map[string]interface{} + +type HistogramAgg struct { + Interval string `json:"interval,omitempty"` + Field string `json:"field"` + MinDocCount int `json:"min_doc_count"` + Missing string `json:"missing,omitempty"` +} + +type DateHistogramAgg struct { + HistogramAgg + ExtendedBounds ExtendedBounds `json:"extended_bounds"` + Format string `json:"format"` +} + +type FiltersAgg struct { + Filter map[string]interface{} `json:"filter"` +} + +type TermsAggSetting struct { + Field string `json:"field"` + Size int `json:"size"` + Order map[string]interface{} `json:"order"` + MinDocCount int `json:"min_doc_count"` + Missing string `json:"missing"` +} + +type TermsAgg struct { + Terms TermsAggSetting `json:"terms"` + Aggs Aggs `json:"aggs"` +} + +type ExtendedBounds struct { + Min string `json:"min"` + Max string `json:"max"` +} + +type RangeFilter struct { + Range map[string]RangeFilterSetting `json:"range"` +} +type RangeFilterSetting struct { + Gte string `json:"gte"` + Lte string `json:"lte"` + Format string `json:"format"` +} + +func newRangeFilter(field string, rangeFilterSetting RangeFilterSetting) *RangeFilter { + return &RangeFilter{ + map[string]RangeFilterSetting{field: rangeFilterSetting}} +} + +type QueryStringFilter struct { + QueryString QueryStringFilterSetting `json:"query_string"` +} +type QueryStringFilterSetting struct { + AnalyzeWildcard bool `json:"analyze_wildcard"` + Query string `json:"query"` +} + +func newQueryStringFilter(analyzeWildcard bool, query string) *QueryStringFilter { + return &QueryStringFilter{QueryStringFilterSetting{AnalyzeWildcard: analyzeWildcard, Query: query}} +} + +type BoolQuery struct { + Filter []interface{} `json:"filter"` +} + +type Metric map[string]interface{} + +type Responses struct { + Responses []Response `json:"responses"` +} + +type Response struct { + Status int `json:"status"` + Err map[string]interface{} `json:"error"` + Aggregations map[string]interface{} `json:"aggregations"` +} + +func (r *Response) getErrMsg() (string) { + var msg bytes.Buffer + errJson := simplejson.NewFromAny(r.Err) + errType, err := errJson.Get("type").String() + if err == nil { + msg.WriteString(fmt.Sprintf("type:%s", errType)) + } + + reason, err := errJson.Get("type").String() + if err == nil { + msg.WriteString(fmt.Sprintf("reason:%s", reason)) + } + return msg.String() +} + +type PercentilesResult struct { + Buckets struct { + map[string]struct { + Values map[string]string `json:"values"` + } + KeyAsString string `json:"key_as_string"` + Key int64 `json:"key"` + DocCount int `json:"doc_count"` + } `json:"buckets"` +} diff --git a/pkg/tsdb/elasticsearch/query.go b/pkg/tsdb/elasticsearch/query.go new file mode 100644 index 00000000000..69dd5caa3b4 --- /dev/null +++ b/pkg/tsdb/elasticsearch/query.go @@ -0,0 +1,204 @@ +package elasticsearch + +import ( + "errors" + "github.com/grafana/grafana/pkg/components/simplejson" +) + +var rangeFilterSetting = RangeFilterSetting{Gte: "$timeFrom", + Lte: "$timeTo", + Format: "epoch_millis"} + +type QueryBuilder struct { + TimeField string + RawQuery string + BucketAggs []interface{} + Metrics []interface{} + Alias string +} + +func (b *QueryBuilder) Build() (Query, error) { + var err error + var res Query + res.Query = make(map[string]interface{}) + res.Size = 0 + + if err != nil { + return res, err + } + + boolQuery := BoolQuery{} + boolQuery.Filter = append(boolQuery.Filter, newRangeFilter(b.TimeField, rangeFilterSetting)) + boolQuery.Filter = append(boolQuery.Filter, newQueryStringFilter(true, b.RawQuery)) + res.Query["bool"] = boolQuery + + // handle document query + if len(b.BucketAggs) == 0 { + if len(b.Metrics) > 0 { + metric := simplejson.NewFromAny(b.Metrics[0]) + if metric.Get("type").MustString("") == "raw_document" { + return res, errors.New("alert not support Raw_Document") + } + } + } + aggs, err := b.parseAggs(b.BucketAggs, b.Metrics) + res.Aggs = aggs["aggs"].(Aggs) + + return res, err +} + +func (b *QueryBuilder) parseAggs(bucketAggs []interface{}, metrics []interface{}) (Aggs, error) { + query := make(Aggs) + nestedAggs := query + for _, aggRaw := range bucketAggs { + esAggs := make(Aggs) + aggJson := simplejson.NewFromAny(aggRaw) + aggType, err := aggJson.Get("type").String() + if err != nil { + return nil, err + } + id, err := aggJson.Get("id").String() + if err != nil { + return nil, err + } + + switch aggType { + case "date_histogram": + esAggs["date_histogram"] = b.getDateHistogramAgg(aggJson) + case "histogram": + esAggs["histogram"] = b.getHistogramAgg(aggJson) + case "filters": + esAggs["filters"] = b.getFilters(aggJson) + case "terms": + terms := b.getTerms(aggJson) + esAggs["terms"] = terms.Terms + esAggs["aggs"] = terms.Aggs + case "geohash_grid": + return nil, errors.New("alert not support Geo_Hash_Grid") + } + + if _, ok := nestedAggs["aggs"]; !ok { + nestedAggs["aggs"] = make(Aggs) + } + + if aggs, ok := (nestedAggs["aggs"]).(Aggs); ok { + aggs[id] = esAggs + } + nestedAggs = esAggs + + } + nestedAggs["aggs"] = make(Aggs) + + for _, metricRaw := range metrics { + metric := make(Metric) + metricJson := simplejson.NewFromAny(metricRaw) + + id, err := metricJson.Get("id").String() + if err != nil { + return nil, err + } + metricType, err := metricJson.Get("type").String() + if err != nil { + return nil, err + } + if metricType == "count" { + continue + } + + // todo support pipeline Agg + + settings := metricJson.Get("settings").MustMap() + settings["field"] = metricJson.Get("field").MustString() + metric[metricType] = settings + nestedAggs["aggs"].(Aggs)[id] = metric + } + return query, nil +} + +func (b *QueryBuilder) getDateHistogramAgg(model *simplejson.Json) DateHistogramAgg { + agg := &DateHistogramAgg{} + settings := simplejson.NewFromAny(model.Get("settings").Interface()) + interval, err := settings.Get("interval").String() + if err == nil { + agg.Interval = interval + } + agg.Field = b.TimeField + agg.MinDocCount = settings.Get("min_doc_count").MustInt(0) + agg.ExtendedBounds = ExtendedBounds{"$timeFrom", "$timeTo"} + agg.Format = "epoch_millis" + + if agg.Interval == "auto" { + agg.Interval = "$__interval" + } + + missing, err := settings.Get("missing").String() + if err == nil { + agg.Missing = missing + } + return *agg +} + +func (b *QueryBuilder) getHistogramAgg(model *simplejson.Json) HistogramAgg { + agg := &HistogramAgg{} + settings := simplejson.NewFromAny(model.Get("settings").Interface()) + interval, err := settings.Get("interval").String() + if err == nil { + agg.Interval = interval + } + field, err := model.Get("field").String() + if err == nil { + agg.Field = field + } + agg.MinDocCount = settings.Get("min_doc_count").MustInt(0) + missing, err := settings.Get("missing").String() + if err == nil { + agg.Missing = missing + } + return *agg +} + +func (b *QueryBuilder) getFilters(model *simplejson.Json) FiltersAgg { + agg := &FiltersAgg{} + settings := simplejson.NewFromAny(model.Get("settings").Interface()) + for filter := range settings.Get("filters").MustArray() { + filterJson := simplejson.NewFromAny(filter) + query := filterJson.Get("query").MustString("") + label := filterJson.Get("label").MustString("") + if label == "" { + label = query + } + agg.Filter[label] = newQueryStringFilter(true, query) + } + return *agg +} + +func (b *QueryBuilder) getTerms(model *simplejson.Json) TermsAgg { + agg := &TermsAgg{} + settings := simplejson.NewFromAny(model.Get("settings").Interface()) + agg.Terms.Field = model.Get("field").MustString() + if settings == nil { + return *agg + } + agg.Terms.Size = settings.Get("size").MustInt(0) + if agg.Terms.Size == 0 { + agg.Terms.Size = 500 + } + orderBy := settings.Get("orderBy").MustString("") + if orderBy != "" { + agg.Terms.Order[orderBy] = settings.Get("order").MustString("") + // if orderBy is a int, means this fields is metric result value + // TODO set subAggs + } + + minDocCount, err := settings.Get("min_doc_count").Int() + if err == nil { + agg.Terms.MinDocCount = minDocCount + } + + missing, err := settings.Get("missing").String() + if err == nil { + agg.Terms.Missing = missing + } + + return *agg +} diff --git a/pkg/tsdb/elasticsearch/response_parser.go b/pkg/tsdb/elasticsearch/response_parser.go new file mode 100644 index 00000000000..bc47a3f935e --- /dev/null +++ b/pkg/tsdb/elasticsearch/response_parser.go @@ -0,0 +1,111 @@ +package elasticsearch + +import ( + "errors" + "fmt" + "github.com/grafana/grafana/pkg/components/null" + "github.com/grafana/grafana/pkg/components/simplejson" + "github.com/grafana/grafana/pkg/tsdb" + "strconv" +) + +type ElasticsearchResponseParser struct { + Responses []Response + Targets []QueryBuilder +} + +func (rp *ElasticsearchResponseParser) getTimeSeries() []interface{} { + for i, res := range rp.Responses { + var series []interface{} + target := rp.Targets[i] + props := make(map[string]interface{}) + rp.processBuckets(res.Aggregations, target, &series, props, 0) + } +} + +func findAgg(target QueryBuilder, aggId string) (*simplejson.Json, error) { + for _, v := range target.BucketAggs { + aggDef := simplejson.NewFromAny(v) + if aggId == aggDef.Get("id").MustString() { + return aggDef, nil + } + } + return nil, errors.New("can't found aggDef, aggID:" + aggId) +} + +func (rp *ElasticsearchResponseParser) processBuckets(aggs map[string]interface{}, target QueryBuilder, series *[]interface{}, props map[string]interface{}, depth int) error { + maxDepth := len(target.BucketAggs) - 1 + for aggId, v := range aggs { + aggDef, _ := findAgg(target, aggId) + esAgg := simplejson.NewFromAny(v) + if aggDef == nil { + continue + } + + if depth == maxDepth { + if aggDef.Get("type").MustString() == "date_histogram" { + rp.processMetrics(esAgg, target, series, props) + } + } + + } + +} + +func mapCopy(originalMap, newMap *map[string]string) { + for k, v := range originalMap { + newMap[k] = v + } + +} + +func (rp *ElasticsearchResponseParser) processMetrics(esAgg *simplejson.Json, target QueryBuilder, props map[string]string) ([]*tsdb.TimeSeries, error) { + var series []*tsdb.TimeSeries + for _, v := range target.Metrics { + metric := simplejson.NewFromAny(v) + if metric.Get("hide").MustBool(false) { + continue + } + metricId := fmt.Sprintf("%d", metric.Get("id").MustInt()) + metricField := metric.Get("field").MustString() + + switch metric.Get("type").MustString() { + case "count": + newSeries := tsdb.TimeSeries{} + for _, v := range esAgg.Get("buckets").MustMap() { + bucket := simplejson.NewFromAny(v) + value := bucket.Get("doc_count").MustFloat64() + key := bucket.Get("key").MustFloat64() + newSeries.Points = append(newSeries.Points, tsdb.TimePoint{null.FloatFromPtr(&value), null.FloatFromPtr(&key)}) + } + newSeries.Tags = props + newSeries.Tags["metric"] = "count" + series = append(series, &newSeries) + + case "percentiles": + buckets := esAgg.Get("buckets").MustArray() + if len(buckets) == 0 { + break + } + + firstBucket := simplejson.NewFromAny(buckets[0]) + percentiles := firstBucket.GetPath(metricId, "values").MustMap() + + for percentileName := range percentiles { + newSeries := tsdb.TimeSeries{} + newSeries.Tags = props + newSeries.Tags["metric"] = "p" + percentileName + newSeries.Tags["field"] = metricField + for _, v := range buckets { + bucket := simplejson.NewFromAny(v) + valueStr := bucket.GetPath(metricId, "values", percentileName).MustString() + value, _ := strconv.ParseFloat(valueStr, 64) + key := bucket.Get("key").MustFloat64() + newSeries.Points = append(newSeries.Points, tsdb.TimePoint{null.FloatFromPtr(&value), null.FloatFromPtr(&key)}) + } + series = append(series, &newSeries) + } + } + } + return series +} diff --git a/public/app/plugins/datasource/elasticsearch/plugin.json b/public/app/plugins/datasource/elasticsearch/plugin.json index 59d26b785ac..89cca1251d5 100644 --- a/public/app/plugins/datasource/elasticsearch/plugin.json +++ b/public/app/plugins/datasource/elasticsearch/plugin.json @@ -20,6 +20,7 @@ "version": "5.0.0" }, + "alerting": true, "annotations": true, "metrics": true, From bc5b59737c2f6f99b64b395de2e20b888d043c97 Mon Sep 17 00:00:00 2001 From: wph95 Date: Sat, 24 Mar 2018 13:06:21 +0800 Subject: [PATCH 005/488] finished CODING PHASE 1 Signed-off-by: wph95 --- pkg/tsdb/elasticsearch/elasticsearch.go | 13 ++++--------- pkg/tsdb/elasticsearch/model_parser.go | 16 ++++++++-------- pkg/tsdb/elasticsearch/models.go | 11 ----------- 3 files changed, 12 insertions(+), 28 deletions(-) diff --git a/pkg/tsdb/elasticsearch/elasticsearch.go b/pkg/tsdb/elasticsearch/elasticsearch.go index d67b4ad902d..8fd82a179e8 100644 --- a/pkg/tsdb/elasticsearch/elasticsearch.go +++ b/pkg/tsdb/elasticsearch/elasticsearch.go @@ -5,7 +5,6 @@ import ( "encoding/json" "errors" "fmt" - "github.com/davecgh/go-spew/spew" "github.com/grafana/grafana/pkg/log" "github.com/grafana/grafana/pkg/models" "github.com/grafana/grafana/pkg/setting" @@ -52,13 +51,9 @@ func (e *ElasticsearchExecutor) Query(ctx context.Context, dsInfo *models.DataSo dsInfo, tsdbQuery.TimeRange, tsdbQuery.Queries, - glog, } - glog.Warn(spew.Sdump(dsInfo)) - glog.Warn(spew.Sdump(tsdbQuery)) - - payload, err := queryParser.Parse() + payload, targets, err := queryParser.Parse() if err != nil { return nil, err } @@ -96,14 +91,14 @@ func (e *ElasticsearchExecutor) Query(ctx context.Context, dsInfo *models.DataSo return nil, err } - glog.Warn(spew.Sdump(responses)) for _, res := range responses.Responses { if res.Err != nil { return nil, errors.New(res.getErrMsg()) } - } - + responseParser := ElasticsearchResponseParser{responses.Responses, targets} + queryRes := responseParser.getTimeSeries() + result.Results["A"] = queryRes return result, nil } diff --git a/pkg/tsdb/elasticsearch/model_parser.go b/pkg/tsdb/elasticsearch/model_parser.go index 136db6baed7..233a35efdc6 100644 --- a/pkg/tsdb/elasticsearch/model_parser.go +++ b/pkg/tsdb/elasticsearch/model_parser.go @@ -5,7 +5,6 @@ import ( "encoding/json" "fmt" "github.com/grafana/grafana/pkg/components/simplejson" - "github.com/grafana/grafana/pkg/log" "github.com/grafana/grafana/pkg/models" "github.com/grafana/grafana/pkg/tsdb" "src/github.com/davecgh/go-spew/spew" @@ -18,38 +17,39 @@ type ElasticSearchQueryParser struct { DsInfo *models.DataSource TimeRange *tsdb.TimeRange Queries []*tsdb.Query - glog log.Logger } -func (qp *ElasticSearchQueryParser) Parse() (string, error) { +func (qp *ElasticSearchQueryParser) Parse() (string, []*QueryBuilder, error) { payload := bytes.Buffer{} queryHeader := qp.getQueryHeader() - + targets := make([]*QueryBuilder, 0) for _, q := range qp.Queries { timeField, err := q.Model.Get("timeField").String() if err != nil { - return "", err + return "", nil, err } rawQuery := q.Model.Get("query").MustString("") bucketAggs := q.Model.Get("bucketAggs").MustArray() metrics := q.Model.Get("metrics").MustArray() alias := q.Model.Get("alias").MustString("") builder := QueryBuilder{timeField, rawQuery, bucketAggs, metrics, alias} + targets = append(targets, &builder) query, err := builder.Build() if err != nil { - return "", err + return "", nil, err } queryBytes, err := json.Marshal(query) if err != nil { - return "", err + return "", nil, err } payload.WriteString(queryHeader.String() + "\n") payload.WriteString(string(queryBytes) + "\n") } + p, err := qp.payloadReplace(payload.String(), qp.DsInfo.JsonData) - return qp.payloadReplace(payload.String(), qp.DsInfo.JsonData) + return p, targets, err } diff --git a/pkg/tsdb/elasticsearch/models.go b/pkg/tsdb/elasticsearch/models.go index 8662f6efbd3..d758e2159de 100644 --- a/pkg/tsdb/elasticsearch/models.go +++ b/pkg/tsdb/elasticsearch/models.go @@ -118,14 +118,3 @@ func (r *Response) getErrMsg() (string) { } return msg.String() } - -type PercentilesResult struct { - Buckets struct { - map[string]struct { - Values map[string]string `json:"values"` - } - KeyAsString string `json:"key_as_string"` - Key int64 `json:"key"` - DocCount int `json:"doc_count"` - } `json:"buckets"` -} From 1e275d0cd1ff976f44dfce6affe8661160cdd873 Mon Sep 17 00:00:00 2001 From: wph95 Date: Sun, 25 Mar 2018 02:18:28 +0800 Subject: [PATCH 006/488] set right series name Signed-off-by: wph95 --- pkg/tsdb/elasticsearch/query.go | 14 +- pkg/tsdb/elasticsearch/query_def.go | 26 +++ pkg/tsdb/elasticsearch/response_parser.go | 219 ++++++++++++++++++---- 3 files changed, 215 insertions(+), 44 deletions(-) create mode 100644 pkg/tsdb/elasticsearch/query_def.go diff --git a/pkg/tsdb/elasticsearch/query.go b/pkg/tsdb/elasticsearch/query.go index 69dd5caa3b4..d6d70e79a2a 100644 --- a/pkg/tsdb/elasticsearch/query.go +++ b/pkg/tsdb/elasticsearch/query.go @@ -3,10 +3,11 @@ package elasticsearch import ( "errors" "github.com/grafana/grafana/pkg/components/simplejson" + "strconv" ) var rangeFilterSetting = RangeFilterSetting{Gte: "$timeFrom", - Lte: "$timeTo", + Lte: "$timeTo", Format: "epoch_millis"} type QueryBuilder struct { @@ -173,18 +174,21 @@ func (b *QueryBuilder) getFilters(model *simplejson.Json) FiltersAgg { } func (b *QueryBuilder) getTerms(model *simplejson.Json) TermsAgg { - agg := &TermsAgg{} + agg := &TermsAgg{Aggs: make(Aggs)} settings := simplejson.NewFromAny(model.Get("settings").Interface()) agg.Terms.Field = model.Get("field").MustString() if settings == nil { return *agg } - agg.Terms.Size = settings.Get("size").MustInt(0) - if agg.Terms.Size == 0 { - agg.Terms.Size = 500 + sizeStr := settings.Get("size").MustString("") + size, err := strconv.Atoi(sizeStr) + if err != nil { + size = 500 } + agg.Terms.Size = size orderBy := settings.Get("orderBy").MustString("") if orderBy != "" { + agg.Terms.Order = make(map[string]interface{}) agg.Terms.Order[orderBy] = settings.Get("order").MustString("") // if orderBy is a int, means this fields is metric result value // TODO set subAggs diff --git a/pkg/tsdb/elasticsearch/query_def.go b/pkg/tsdb/elasticsearch/query_def.go new file mode 100644 index 00000000000..5dc02aa359e --- /dev/null +++ b/pkg/tsdb/elasticsearch/query_def.go @@ -0,0 +1,26 @@ +package elasticsearch + +var metricAggType = map[string]string{ + "count": "Count", + "avg": "Average", + "sum": "Sum", + "max": "Max", + "min": "Min", + "extended_stats": "Extended Stats", + "percentiles": "Percentiles", + "cardinality": "Unique Count", + "moving_avg": "Moving Average", + "derivative": "Derivative", + "raw_document": "Raw Document", +} + +var extendedStats = map[string]string{ + "avg": "Avg", + "min": "Min", + "max": "Max", + "sum": "Sum", + "count": "Count", + "std_deviation": "Std Dev", + "std_deviation_bounds_upper": "Std Dev Upper", + "std_deviation_bounds_lower": "Std Dev Lower", +} diff --git a/pkg/tsdb/elasticsearch/response_parser.go b/pkg/tsdb/elasticsearch/response_parser.go index bc47a3f935e..a2a8565641f 100644 --- a/pkg/tsdb/elasticsearch/response_parser.go +++ b/pkg/tsdb/elasticsearch/response_parser.go @@ -7,33 +7,30 @@ import ( "github.com/grafana/grafana/pkg/components/simplejson" "github.com/grafana/grafana/pkg/tsdb" "strconv" + "regexp" + "strings" ) type ElasticsearchResponseParser struct { Responses []Response - Targets []QueryBuilder + Targets []*QueryBuilder } -func (rp *ElasticsearchResponseParser) getTimeSeries() []interface{} { +func (rp *ElasticsearchResponseParser) getTimeSeries() *tsdb.QueryResult { + queryRes := tsdb.NewQueryResult() for i, res := range rp.Responses { - var series []interface{} target := rp.Targets[i] - props := make(map[string]interface{}) + props := make(map[string]string) + series := make([]*tsdb.TimeSeries, 0) rp.processBuckets(res.Aggregations, target, &series, props, 0) + rp.nameSeries(&series, target) + queryRes.Series = append(queryRes.Series, series...) } + return queryRes } -func findAgg(target QueryBuilder, aggId string) (*simplejson.Json, error) { - for _, v := range target.BucketAggs { - aggDef := simplejson.NewFromAny(v) - if aggId == aggDef.Get("id").MustString() { - return aggDef, nil - } - } - return nil, errors.New("can't found aggDef, aggID:" + aggId) -} - -func (rp *ElasticsearchResponseParser) processBuckets(aggs map[string]interface{}, target QueryBuilder, series *[]interface{}, props map[string]interface{}, depth int) error { +func (rp *ElasticsearchResponseParser) processBuckets(aggs map[string]interface{}, target *QueryBuilder, series *[]*tsdb.TimeSeries, props map[string]string, depth int) (error) { + var err error maxDepth := len(target.BucketAggs) - 1 for aggId, v := range aggs { aggDef, _ := findAgg(target, aggId) @@ -44,43 +41,59 @@ func (rp *ElasticsearchResponseParser) processBuckets(aggs map[string]interface{ if depth == maxDepth { if aggDef.Get("type").MustString() == "date_histogram" { - rp.processMetrics(esAgg, target, series, props) + err = rp.processMetrics(esAgg, target, series, props) + if err != nil { + return err + } + } else { + return fmt.Errorf("not support type:%s", aggDef.Get("type").MustString()) + } + } else { + for i, b := range esAgg.Get("buckets").MustArray() { + field := aggDef.Get("field").MustString() + bucket := simplejson.NewFromAny(b) + newProps := props + if key, err := bucket.Get("key").String(); err == nil { + newProps[field] = key + } else { + props["filter"] = strconv.Itoa(i) + } + + if key, err := bucket.Get("key_as_string").String(); err == nil { + props[field] = key + } + rp.processBuckets(bucket.MustMap(), target, series, newProps, depth+1) } } } + return nil } -func mapCopy(originalMap, newMap *map[string]string) { - for k, v := range originalMap { - newMap[k] = v - } - -} - -func (rp *ElasticsearchResponseParser) processMetrics(esAgg *simplejson.Json, target QueryBuilder, props map[string]string) ([]*tsdb.TimeSeries, error) { - var series []*tsdb.TimeSeries +func (rp *ElasticsearchResponseParser) processMetrics(esAgg *simplejson.Json, target *QueryBuilder, series *[]*tsdb.TimeSeries, props map[string]string) (error) { for _, v := range target.Metrics { metric := simplejson.NewFromAny(v) if metric.Get("hide").MustBool(false) { continue } - metricId := fmt.Sprintf("%d", metric.Get("id").MustInt()) - metricField := metric.Get("field").MustString() - switch metric.Get("type").MustString() { + metricId := metric.Get("id").MustString() + metricField := metric.Get("field").MustString() + metricType := metric.Get("type").MustString() + + switch metricType { case "count": newSeries := tsdb.TimeSeries{} - for _, v := range esAgg.Get("buckets").MustMap() { + for _, v := range esAgg.Get("buckets").MustArray() { bucket := simplejson.NewFromAny(v) - value := bucket.Get("doc_count").MustFloat64() - key := bucket.Get("key").MustFloat64() - newSeries.Points = append(newSeries.Points, tsdb.TimePoint{null.FloatFromPtr(&value), null.FloatFromPtr(&key)}) + value := castToNullFloat(bucket.Get("doc_count")) + key := castToNullFloat(bucket.Get("key")) + newSeries.Points = append(newSeries.Points, tsdb.TimePoint{value, key}) } newSeries.Tags = props newSeries.Tags["metric"] = "count" - series = append(series, &newSeries) + *series = append(*series, &newSeries) case "percentiles": buckets := esAgg.Get("buckets").MustArray() @@ -98,14 +111,142 @@ func (rp *ElasticsearchResponseParser) processMetrics(esAgg *simplejson.Json, ta newSeries.Tags["field"] = metricField for _, v := range buckets { bucket := simplejson.NewFromAny(v) - valueStr := bucket.GetPath(metricId, "values", percentileName).MustString() - value, _ := strconv.ParseFloat(valueStr, 64) - key := bucket.Get("key").MustFloat64() - newSeries.Points = append(newSeries.Points, tsdb.TimePoint{null.FloatFromPtr(&value), null.FloatFromPtr(&key)}) + value := castToNullFloat(bucket.GetPath(metricId, "values", percentileName)) + key := castToNullFloat(bucket.Get("key")) + newSeries.Points = append(newSeries.Points, tsdb.TimePoint{value, key}) } - series = append(series, &newSeries) + *series = append(*series, &newSeries) + } + default: + newSeries := tsdb.TimeSeries{} + newSeries.Tags = props + newSeries.Tags["metric"] = metricType + newSeries.Tags["field"] = metricField + for _, v := range esAgg.Get("buckets").MustArray() { + bucket := simplejson.NewFromAny(v) + key := castToNullFloat(bucket.Get("key")) + valueObj, err := bucket.Get(metricId).Map() + if err != nil { + break + } + var value null.Float + if _, ok := valueObj["normalized_value"]; ok { + value = castToNullFloat(bucket.GetPath(metricId, "normalized_value")) + } else { + value = castToNullFloat(bucket.GetPath(metricId, "value")) + } + newSeries.Points = append(newSeries.Points, tsdb.TimePoint{value, key}) + } + *series = append(*series, &newSeries) + } + } + return nil +} + +func (rp *ElasticsearchResponseParser) nameSeries(seriesList *[]*tsdb.TimeSeries, target *QueryBuilder) { + set := make(map[string]string) + for _, v := range *seriesList { + if metricType, exists := v.Tags["metric"]; exists { + if _, ok := set[metricType]; !ok { + set[metricType] = "" } } } - return series + metricTypeCount := len(set) + for _, series := range *seriesList { + series.Name = rp.getSeriesName(series, target, metricTypeCount) + } + +} + +func (rp *ElasticsearchResponseParser) getSeriesName(series *tsdb.TimeSeries, target *QueryBuilder, metricTypeCount int) (string) { + metricName := rp.getMetricName(series.Tags["metric"]) + delete(series.Tags, "metric") + + field := "" + if v, ok := series.Tags["field"]; ok { + field = v + delete(series.Tags, "field") + } + + if target.Alias != "" { + var re = regexp.MustCompile(`{{([\s\S]+?)}}`) + for _, match := range re.FindAllString(target.Alias, -1) { + group := match[2:len(match)-2] + + if strings.HasPrefix(group, "term ") { + if term, ok := series.Tags["term "]; ok { + strings.Replace(target.Alias, match, term, 1) + } + } + if v, ok := series.Tags[group]; ok { + strings.Replace(target.Alias, match, v, 1) + } + + switch group { + case "metric": + strings.Replace(target.Alias, match, metricName, 1) + case "field": + strings.Replace(target.Alias, match, field, 1) + } + + } + } + // todo, if field and pipelineAgg + if field != "" { + metricName += " " + field + } + + if len(series.Tags) == 0 { + return metricName + } + + name := "" + for _, v := range series.Tags { + name += v + " " + } + + if metricTypeCount == 1 { + return strings.TrimSpace(name) + } + + return strings.TrimSpace(name) + " " + metricName + +} + +func (rp *ElasticsearchResponseParser) getMetricName(metric string) string { + if text, ok := metricAggType[metric]; ok { + return text + } + + if text, ok := extendedStats[metric]; ok { + return text + } + + return metric +} + +func castToNullFloat(j *simplejson.Json) null.Float { + f, err := j.Float64() + if err == nil { + return null.FloatFrom(f) + } + + s, err := j.String() + if err == nil { + v, _ := strconv.ParseFloat(s, 64) + return null.FloatFromPtr(&v) + } + + return null.NewFloat(0, false) +} + +func findAgg(target *QueryBuilder, aggId string) (*simplejson.Json, error) { + for _, v := range target.BucketAggs { + aggDef := simplejson.NewFromAny(v) + if aggId == aggDef.Get("id").MustString() { + return aggDef, nil + } + } + return nil, errors.New("can't found aggDef, aggID:" + aggId) } From d6cdc2497c929039f93830dd8b7a61661046ae57 Mon Sep 17 00:00:00 2001 From: wph95 Date: Mon, 26 Mar 2018 16:13:14 +0800 Subject: [PATCH 007/488] Handle Interval Date Format similar to the JS variant https://github.com/grafana/grafana/pull/10343/commits/7e14e272fa37df5b4d412c16845d1e525711f726 --- Gopkg.lock | 8 +- Gopkg.toml | 4 + pkg/tsdb/elasticsearch/model_parser.go | 46 +- pkg/tsdb/elasticsearch/model_parser_test.go | 49 + vendor/github.com/leibowitz/moment/diff.go | 75 ++ vendor/github.com/leibowitz/moment/moment.go | 1185 +++++++++++++++++ .../leibowitz/moment/moment_parser.go | 100 ++ .../github.com/leibowitz/moment/parse_day.go | 32 + .../leibowitz/moment/strftime_parser.go | 68 + 9 files changed, 1559 insertions(+), 8 deletions(-) create mode 100644 pkg/tsdb/elasticsearch/model_parser_test.go create mode 100644 vendor/github.com/leibowitz/moment/diff.go create mode 100644 vendor/github.com/leibowitz/moment/moment.go create mode 100644 vendor/github.com/leibowitz/moment/moment_parser.go create mode 100644 vendor/github.com/leibowitz/moment/parse_day.go create mode 100644 vendor/github.com/leibowitz/moment/strftime_parser.go diff --git a/Gopkg.lock b/Gopkg.lock index ebadad8331b..78316b77664 100644 --- a/Gopkg.lock +++ b/Gopkg.lock @@ -295,6 +295,12 @@ packages = ["."] revision = "7cafcd837844e784b526369c9bce262804aebc60" +[[projects]] + branch = "master" + name = "github.com/leibowitz/moment" + packages = ["."] + revision = "8548108dcca204a1110b99e5fec966817499fe84" + [[projects]] branch = "master" name = "github.com/lib/pq" @@ -642,6 +648,6 @@ [solve-meta] analyzer-name = "dep" analyzer-version = 1 - inputs-digest = "5e65aeace832f1b4be17e7ff5d5714513c40f31b94b885f64f98f2332968d7c6" + inputs-digest = "9895ff7b1516b9639d0fc280ca155c8958486656a2086fc45e91f727fccea0d2" solver-name = "gps-cdcl" solver-version = 1 diff --git a/Gopkg.toml b/Gopkg.toml index df163e01ed3..1f8cbba6e11 100644 --- a/Gopkg.toml +++ b/Gopkg.toml @@ -201,3 +201,7 @@ ignored = [ [[constraint]] name = "github.com/denisenkom/go-mssqldb" revision = "270bc3860bb94dd3a3ffd047377d746c5e276726" + +[[constraint]] + branch = "master" + name = "github.com/leibowitz/moment" diff --git a/pkg/tsdb/elasticsearch/model_parser.go b/pkg/tsdb/elasticsearch/model_parser.go index 233a35efdc6..7da6765e06c 100644 --- a/pkg/tsdb/elasticsearch/model_parser.go +++ b/pkg/tsdb/elasticsearch/model_parser.go @@ -7,6 +7,7 @@ import ( "github.com/grafana/grafana/pkg/components/simplejson" "github.com/grafana/grafana/pkg/models" "github.com/grafana/grafana/pkg/tsdb" + "github.com/leibowitz/moment" "src/github.com/davecgh/go-spew/spew" "strconv" "strings" @@ -63,7 +64,7 @@ func (qp *ElasticSearchQueryParser) getQueryHeader() *QueryHeader { } header.SearchType = searchType header.IgnoreUnavailable = true - header.Index = qp.getIndexList() + header.Index = getIndexList(qp.DsInfo.Database, qp.DsInfo.JsonData.Get("interval").MustString(""), qp.TimeRange) if esVersion >= 56 { header.MaxConcurrentShardRequests = qp.DsInfo.JsonData.Get("maxConcurrentShardRequests").MustInt() @@ -87,11 +88,42 @@ func (qp *ElasticSearchQueryParser) payloadReplace(payload string, model *simple return payload, nil } -func (qp *ElasticSearchQueryParser) getIndexList() string { - _, err := qp.DsInfo.JsonData.Get("interval").String() - if err != nil { - return qp.DsInfo.Database +func getIndexList(pattern string, interval string, timeRange *tsdb.TimeRange) string { + if interval == "" { + return pattern } - // todo: support interval - return qp.DsInfo.Database + + var indexes []string + indexParts := strings.Split(strings.TrimLeft(pattern, "["), "]") + indexBase := indexParts[0] + if len(indexParts) <= 1 { + return pattern + } + + indexDateFormat := indexParts[1] + + start := moment.NewMoment(timeRange.MustGetFrom()) + end := moment.NewMoment(timeRange.MustGetTo()) + + indexes = append(indexes, fmt.Sprintf("%s%s", indexBase, start.Format(indexDateFormat))) + for start.IsBefore(*end) { + switch interval { + case "Hourly": + start = start.AddHours(1) + + case "Daily": + start = start.AddDay() + + case "Weekly": + start = start.AddWeeks(1) + + case "Monthly": + start = start.AddMonths(1) + + case "Yearly": + start = start.AddYears(1) + } + indexes = append(indexes, fmt.Sprintf("%s%s", indexBase, start.Format(indexDateFormat))) + } + return strings.Join(indexes, ",") } diff --git a/pkg/tsdb/elasticsearch/model_parser_test.go b/pkg/tsdb/elasticsearch/model_parser_test.go new file mode 100644 index 00000000000..aa7336fb69b --- /dev/null +++ b/pkg/tsdb/elasticsearch/model_parser_test.go @@ -0,0 +1,49 @@ +package elasticsearch + +import ( + "github.com/grafana/grafana/pkg/tsdb" + . "github.com/smartystreets/goconvey/convey" + "strconv" + "strings" + "testing" +) + +func makeTime(hour int) string { + //unixtime 1500000000 == 2017-07-14T02:40:00+00:00 + return strconv.Itoa((1500000000 + hour*60*60) * 1000) +} + +func getIndexListByTime(pattern string, interval string, hour int) string { + timeRange := &tsdb.TimeRange{ + From: makeTime(0), + To: makeTime(hour), + } + return getIndexList(pattern, interval, timeRange) +} + +func TestElasticsearchGetIndexList(t *testing.T) { + Convey("Test Elasticsearch getIndex ", t, func() { + + Convey("Parse Interval Formats", func() { + So(getIndexListByTime("[logstash-]YYYY.MM.DD", "Daily", 48), + ShouldEqual, "logstash-2017.07.14,logstash-2017.07.15,logstash-2017.07.16") + + So(len(strings.Split(getIndexListByTime("[logstash-]YYYY.MM.DD.HH", "Hourly", 3), ",")), + ShouldEqual, 4) + + So(getIndexListByTime("[logstash-]YYYY.W", "Weekly", 100), + ShouldEqual, "logstash-2017.28,logstash-2017.29") + + So(getIndexListByTime("[logstash-]YYYY.MM", "Monthly", 700), + ShouldEqual, "logstash-2017.07,logstash-2017.08") + + So(getIndexListByTime("[logstash-]YYYY", "Yearly", 10000), + ShouldEqual, "logstash-2017,logstash-2018,logstash-2019") + }) + + Convey("No Interval", func() { + index := getIndexListByTime("logstash-test", "", 1) + So(index, ShouldEqual, "logstash-test") + }) + }) +} diff --git a/vendor/github.com/leibowitz/moment/diff.go b/vendor/github.com/leibowitz/moment/diff.go new file mode 100644 index 00000000000..0d6b3935adf --- /dev/null +++ b/vendor/github.com/leibowitz/moment/diff.go @@ -0,0 +1,75 @@ +package moment + +import ( + "fmt" + "math" + "time" +) + +// @todo In months/years requires the old and new to calculate correctly, right? +// @todo decide how to handle rounding (i.e. always floor?) +type Diff struct { + duration time.Duration +} + +func (d *Diff) InSeconds() int { + return int(d.duration.Seconds()) +} + +func (d *Diff) InMinutes() int { + return int(d.duration.Minutes()) +} + +func (d *Diff) InHours() int { + return int(d.duration.Hours()) +} + +func (d *Diff) InDays() int { + return int(math.Floor(float64(d.InSeconds()) / 86400)) +} + +// This depends on where the weeks fall? +func (d *Diff) InWeeks() int { + return int(math.Floor(float64(d.InDays() / 7))) +} + +func (d *Diff) InMonths() int { + return 0 +} + +func (d *Diff) InYears() int { + return 0 +} + +// http://momentjs.com/docs/#/durations/humanize/ +func (d *Diff) Humanize() string { + diffInSeconds := d.InSeconds() + + if diffInSeconds <= 45 { + return fmt.Sprintf("%d seconds ago", diffInSeconds) + } else if diffInSeconds <= 90 { + return "a minute ago" + } + + diffInMinutes := d.InMinutes() + + if diffInMinutes <= 45 { + return fmt.Sprintf("%d minutes ago", diffInMinutes) + } else if diffInMinutes <= 90 { + return "an hour ago" + } + + diffInHours := d.InHours() + + if diffInHours <= 22 { + return fmt.Sprintf("%d hours ago", diffInHours) + } else if diffInHours <= 36 { + return "a day ago" + } + + return "diff is in days" +} + +// In Months + +// In years diff --git a/vendor/github.com/leibowitz/moment/moment.go b/vendor/github.com/leibowitz/moment/moment.go new file mode 100644 index 00000000000..13c8ef7dbef --- /dev/null +++ b/vendor/github.com/leibowitz/moment/moment.go @@ -0,0 +1,1185 @@ +package moment + +import ( + "fmt" + "regexp" + "strconv" + "strings" + "time" +) + +// links +// http://en.wikipedia.org/wiki/ISO_week_date +// http://golang.org/src/pkg/time/format.go +// http://www.php.net/manual/en/class.datetime.php#datetime.constants.rfc822 +// http://php.net/manual/en/function.date.php +// http://www.php.net/manual/en/datetime.formats.relative.php + +// @todo are these constants needed if they are in the time package? +// There are a lot of extras here, and RFC822 doesn't match up. Why? +// Also, is timezone usage wrong? Double-check +const ( + ATOM = "2006-01-02T15:04:05Z07:00" + COOKIE = "Monday, 02-Jan-06 15:04:05 MST" + ISO8601 = "2006-01-02T15:04:05Z0700" + RFC822 = "Mon, 02 Jan 06 15:04:05 Z0700" + RFC850 = "Monday, 02-Jan-06 15:04:05 MST" + RFC1036 = "Mon, 02 Jan 06 15:04:05 Z0700" + RFC1123 = "Mon, 02 Jan 2006 15:04:05 Z0700" + RFC2822 = "Mon, 02 Jan 2006 15:04:05 Z0700" + RFC3339 = "2006-01-02T15:04:05Z07:00" + RSS = "Mon, 02 Jan 2006 15:04:05 Z0700" + W3C = "2006-01-02T15:04:05Z07:00" +) + +var ( + regex_days = "monday|mon|tuesday|tues|wednesday|wed|thursday|thurs|friday|fri|saturday|sat|sunday|sun" + regex_period = "second|minute|hour|day|week|month|year" + regex_numbers = "one|two|three|four|five|six|seven|eight|nine|ten" +) + +// regexp +var ( + compiled = regexp.MustCompile(`\s{2,}`) + relativeday = regexp.MustCompile(`(yesterday|today|tomorrow)`) + //relative1 = regexp.MustCompile(`(first|last) day of (this|next|last|previous) (week|month|year)`) + //relative2 = regexp.MustCompile(`(first|last) day of (` + "jan|january|feb|february|mar|march|apr|april|may|jun|june|jul|july|aug|august|sep|september|oct|october|nov|november|dec|december" + `)(?:\s(\d{4,4}))?`) + relative3 = regexp.MustCompile(`((?Pthis|next|last|previous) )?(` + regex_days + `)`) + //relativeval = regexp.MustCompile(`([0-9]+) (day|week|month|year)s? ago`) + ago = regexp.MustCompile(`([0-9]+) (` + regex_period + `)s? ago`) + ordinal = regexp.MustCompile("([0-9]+)(st|nd|rd|th)") + written = regexp.MustCompile(regex_numbers) + relativediff = regexp.MustCompile(`([\+\-])?([0-9]+),? ?(` + regex_period + `)s?`) + relativetime = regexp.MustCompile(`(?P\d\d?):(?P\d\d?)(:(?P\d\d?))?\s?(?Pam|pm)?\s?(?P[a-z]{3,3})?|(?Pnoon|midnight)`) + yearmonthday = regexp.MustCompile(`(?P\d{4})-(?P\d{1,2})-(?P\d{1,2})`) + relativeperiod = regexp.MustCompile(`(?Pthis|next|last) (week|month|year)`) + numberRegex = regexp.MustCompile("([0-9]+)(?:)") +) + +// http://golang.org/src/pkg/time/format.go?s=12686:12728#L404 + +// Timezone implementation +// https://groups.google.com/forum/#!topic/golang-nuts/XEVN4QwTvHw +// http://en.wikipedia.org/wiki/Zone.tab + +// Support ISO8601 Duration Parsing? +// http://en.wikipedia.org/wiki/ISO_8601 + +// Differences +// Months are NOT zero-index, MOmentJS they are +// Weeks are 0 indexed +// -- Sunday being the last day of the week ISO-8601 - is that diff from Moment? +// From/FromNow Return a Diff object rather than strings + +// Support for locale and languages with English as default + +// Support for strftime +// https://github.com/benjaminoakes/moment-strftime +// Format: https://php.net/strftime + +type Moment struct { + time time.Time + + Parser +} + +type Parser interface { + Convert(string) string +} + +func New() *Moment { + m := &Moment{time.Now(), new(MomentParser)} + + return m +} + +func NewMoment(t time.Time) *Moment { + m := &Moment{t, new(MomentParser)} + + return m +} + +func (m *Moment) GetTime() time.Time { + return m.time +} + +func (m *Moment) Now() *Moment { + m.time = time.Now().In(m.GetTime().Location()) + + return m +} + +func (m *Moment) Moment(layout string, datetime string) *Moment { + return m.MomentGo(m.Convert(layout), datetime) +} + +func (m *Moment) MomentGo(layout string, datetime string) *Moment { + time, _ := time.Parse(layout, datetime) + + m.time = time + + return m +} + +// This method is nowhere near done - requires lots of work. +func (m *Moment) Strtotime(str string) *Moment { + str = strings.ToLower(strings.TrimSpace(str)) + str = compiled.ReplaceAllString(str, " ") + + // Replace written numbers (i.e. nine, ten) with actual numbers (9, 10) + str = written.ReplaceAllStringFunc(str, func(n string) string { + switch n { + case "one": + return "1" + case "two": + return "2" + case "three": + return "3" + case "four": + return "4" + case "five": + return "5" + case "six": + return "6" + case "seven": + return "7" + case "eight": + return "8" + case "nine": + return "9" + case "ten": + return "10" + } + + return "" + }) + + // Remove ordinal suffixes st, nd, rd, th + str = ordinal.ReplaceAllString(str, "$1") + + // Replace n second|minute|hour... ago to -n second|minute|hour... to consolidate parsing + str = ago.ReplaceAllString(str, "-$1 $2") + + // Look for relative +1day, +3 days 5 hours 15 minutes + if match := relativediff.FindAllStringSubmatch(str, -1); match != nil { + for i := range match { + switch match[i][1] { + case "-": + number, _ := strconv.Atoi(match[i][2]) + m.Subtract(match[i][3], number) + default: + number, _ := strconv.Atoi(match[i][2]) + m.Add(match[i][3], number) + } + + str = strings.Replace(str, match[i][0], "", 1) + } + } + + // Remove any words that aren't needed for consistency + str = strings.Replace(str, " at ", " ", -1) + str = strings.Replace(str, " on ", " ", -1) + + // Support for interchangeable previous/last + str = strings.Replace(str, "previous", "last", -1) + + var dateDefaults = map[string]int{ + "year": 0, + "month": 0, + "day": 0, + } + + dateMatches := dateDefaults + if match := yearmonthday.FindStringSubmatch(str); match != nil { + for i, name := range yearmonthday.SubexpNames() { + if i == 0 { + str = strings.Replace(str, match[i], "", 1) + continue + } + + if match[i] == "" { + continue + } + + if name == "year" || name == "month" || name == "day" { + dateMatches[name], _ = strconv.Atoi(match[i]) + } + + } + + defer m.strtotimeSetDate(dateMatches) + if str == "" { + // Nothing left to parse + return m + } + + str = strings.TrimSpace(str) + } + + // Try to parse out time from the string + var timeDefaults = map[string]int{ + "hour": 0, + "minutes": 0, + "seconds": 0, + } + + timeMatches := timeDefaults + var zone string + if match := relativetime.FindStringSubmatch(str); match != nil { + for i, name := range relativetime.SubexpNames() { + if i == 0 { + str = strings.Replace(str, match[i], "", 1) + continue + } + + if match[i] == "" { + continue + } + + // Midnight is all zero's so nothing to do + if name == "relativetime" && match[i] == "noon" { + timeDefaults["hour"] = 12 + } + + if name == "zone" { + zone = match[i] + } + + if name == "meridiem" && match[i] == "pm" && timeMatches["hour"] < 12 { + timeMatches["hour"] += 12 + } + + if name == "hour" || name == "minutes" || name == "seconds" { + timeMatches[name], _ = strconv.Atoi(match[i]) + } + } + + // Processing time is always last + defer m.strtotimeSetTime(timeMatches, zone) + + if str == "" { + // Nothing left to parse + return m + } + + str = strings.TrimSpace(str) + } + + // m.StartOf("month", "January").GoTo(time.Sunday) + + if match := relativeperiod.FindStringSubmatch(str); match != nil { + period := match[1] + unit := match[2] + + str = strings.Replace(str, match[0], "", 1) + + switch period { + case "next": + if unit == "year" { + m.AddYears(1) + } + if unit == "month" { + m.AddMonths(1) + } + if unit == "week" { + m.AddWeeks(1) + } + case "last": + if unit == "year" { + m.SubYears(1) + } + if unit == "month" { + m.SubMonths(1) + } + if unit == "week" { + m.SubWeeks(1) + } + } + + str = strings.TrimSpace(str) + + // first := regexp.MustCompile("(?Pfirst|last)?") + } + + /* + + relativeday: first day of + relativeperiod: this, last, next + relativeperiodunit week, month, year + day: monday, tues, wednesday + month: january, feb + + + YYYY-MM-DD (HH:MM:SS MST)? + MM-DD-YYYY (HH:MM:SS MST) + 10 September 2015 (HH:MM:SS MST)? + September, 10 2015 (HH:MM:SS MST)? + September 10 2015 (HH:MM:SS M + + this year 2014 + next year 2015 + last year 2013 + + this month April + next month May + last month Mar + + first day of April + last day of April + + + DONE 3PM + DONE 3:00 PM + DONE 3:00:05 MST + 3PM on January 5th + January 5th at 3:00PM + first saturday _of_ next month + first saturday _of_ next month _at_ 3:00PM + saturday of next week + saturday of last week + saturday next week + monday next week + saturday of this week + saturday at 3:00pm + saturday at 4:00PM + saturday at midn + first of january + last of january + january of next year + first day of january + last day of january + first day of February + + DONE midnight + DONE noon + DONE 3 days ago + DONE ten days + DONE 9 weeks ago // Convert to -9 weeks + DONE -9 weeks + + */ + + if match := relativeday.FindStringSubmatch(str); match != nil && len(match) > 1 { + day := match[1] + + str = strings.Replace(str, match[0], "", 1) + + switch day { + case "today": + m.Today() + case "yesterday": + m.Yesterday() + case "tomorrow": + m.Tomorrow() + } + } + + if match := relative3.FindStringSubmatch(str); match != nil { + var when string + for i, name := range relative3.SubexpNames() { + if name == "relperiod" { + when = match[i] + } + } + weekDay := match[len(match)-1] + + str = strings.Replace(str, match[0], "", 1) + + wDay, err := ParseWeekDay(weekDay) + if err == nil { + switch when { + case "last", "previous": + m.GoBackTo(wDay, true) + + case "next": + m.GoTo(wDay, true) + + case "", "this": + m.GoTo(wDay, false) + default: + m.GoTo(wDay, false) + } + } + } + + /* + + + yesterday 11:00 + today 11:00 + tomorrow 11:00 + midnight + noon + DONE +n (second|day|week|month|year)s? + DONE -n (second|day|week|month|year)s? + next (monday|tuesday|wednesday|thursday|friday|saturday|sunday) 11:00 + last (monday|tuesday|wednesday|thursday|friday|saturday|sunday) 11:00 + next (month|year) + last (month|year) + first day of (january|february|march...|december) 2014 + last day of (january|february|march...|december) 2014 + first day of (this|next|last) (week|month|year) + last day of (this|next|last) (week|month|year) + first (monday|tuesday|wednesday) of July 2014 + last (monday|tuesday|wednesday) of July 2014 + n (day|week|month|year)s? ago + Monday|Tuesday|Wednesday|Thursday|Friday + Monday (last|this|next) week + + DONE +1 week 2 days 3 hours 4 minutes 5 seconds + */ + + return m +} + +// @todo deal with timezone +func (m *Moment) strtotimeSetTime(time map[string]int, zone string) { + m.SetHour(time["hour"]).SetMinute(time["minutes"]).SetSecond(time["seconds"]) +} + +func (m *Moment) strtotimeSetDate(date map[string]int) { + m.SetYear(date["year"]).SetMonth(time.Month(date["month"])).SetDay(date["day"]) +} + +func (m Moment) Clone() *Moment { + copy := New() + copy.time = m.GetTime() + + return copy +} + +/** + * Getters + * + */ +// https://groups.google.com/forum/#!topic/golang-nuts/pret7hjDc70 +func (m *Moment) Millisecond() { + +} + +func (m *Moment) Second() int { + return m.GetTime().Second() +} + +func (m *Moment) Minute() int { + return m.GetTime().Minute() +} + +func (m *Moment) Hour() int { + return m.GetTime().Hour() +} + +// Day of month +func (m *Moment) Date() int { + return m.DayOfMonth() +} + +// Carbon convenience method +func (m *Moment) DayOfMonth() int { + return m.GetTime().Day() +} + +// Day of week (int or string) +func (m *Moment) Day() time.Weekday { + return m.DayOfWeek() +} + +// Carbon convenience method +func (m *Moment) DayOfWeek() time.Weekday { + return m.GetTime().Weekday() +} + +func (m *Moment) DayOfWeekISO() int { + day := m.GetTime().Weekday() + + if day == time.Sunday { + return 7 + } + + return int(day) +} + +func (m *Moment) DayOfYear() int { + return m.GetTime().YearDay() +} + +// Day of Year with zero padding +func (m *Moment) dayOfYearZero() string { + day := m.GetTime().YearDay() + + if day < 10 { + return fmt.Sprintf("00%d", day) + } + + if day < 100 { + return fmt.Sprintf("0%d", day) + } + + return fmt.Sprintf("%d", day) +} + +// todo panic? +func (m *Moment) Weekday(index int) string { + if index > 6 { + panic("Weekday index must be between 0 and 6") + } + + return time.Weekday(index).String() +} + +func (m *Moment) Week() int { + return 0 +} + +// Is this the week number where as ISOWeekYear is the number of weeks in the year? +// @see http://stackoverflow.com/questions/18478741/get-weeks-in-year +func (m *Moment) ISOWeek() int { + _, week := m.GetTime().ISOWeek() + + return week +} + +// @todo Consider language support +func (m *Moment) Month() time.Month { + return m.GetTime().Month() +} + +func (m *Moment) Quarter() (quarter int) { + quarter = 4 + + switch m.Month() { + case time.January, time.February, time.March: + quarter = 1 + case time.April, time.May, time.June: + quarter = 2 + case time.July, time.August, time.September: + quarter = 3 + } + + return +} + +func (m *Moment) Year() int { + return m.GetTime().Year() +} + +// @see comments for ISOWeek +func (m *Moment) WeekYear() { + +} + +func (m *Moment) ISOWeekYear() { + +} + +/** + * Manipulate + * + */ +func (m *Moment) Add(key string, value int) *Moment { + switch key { + case "years", "year", "y": + m.AddYears(value) + case "months", "month", "M": + m.AddMonths(value) + case "weeks", "week", "w": + m.AddWeeks(value) + case "days", "day", "d": + m.AddDays(value) + case "hours", "hour", "h": + m.AddHours(value) + case "minutes", "minute", "m": + m.AddMinutes(value) + case "seconds", "second", "s": + m.AddSeconds(value) + case "milliseconds", "millisecond", "ms": + + } + + return m +} + +// Carbon +func (m *Moment) AddSeconds(seconds int) *Moment { + return m.addTime(time.Second * time.Duration(seconds)) +} + +// Carbon +func (m *Moment) AddMinutes(minutes int) *Moment { + return m.addTime(time.Minute * time.Duration(minutes)) +} + +// Carbon +func (m *Moment) AddHours(hours int) *Moment { + return m.addTime(time.Hour * time.Duration(hours)) +} + +// Carbon +func (m *Moment) AddDay() *Moment { + return m.AddDays(1) +} + +// Carbon +func (m *Moment) AddDays(days int) *Moment { + m.time = m.GetTime().AddDate(0, 0, days) + + return m +} + +// Carbon +func (m *Moment) AddWeeks(weeks int) *Moment { + return m.AddDays(weeks * 7) +} + +// Carbon +func (m *Moment) AddMonths(months int) *Moment { + m.time = m.GetTime().AddDate(0, months, 0) + + return m +} + +// Carbon +func (m *Moment) AddYears(years int) *Moment { + m.time = m.GetTime().AddDate(years, 0, 0) + + return m +} + +func (m *Moment) addTime(d time.Duration) *Moment { + m.time = m.GetTime().Add(d) + + return m +} + +func (m *Moment) Subtract(key string, value int) *Moment { + switch key { + case "years", "year", "y": + m.SubYears(value) + case "months", "month", "M": + m.SubMonths(value) + case "weeks", "week", "w": + m.SubWeeks(value) + case "days", "day", "d": + m.SubDays(value) + case "hours", "hour", "h": + m.SubHours(value) + case "minutes", "minute", "m": + m.SubMinutes(value) + case "seconds", "second", "s": + m.SubSeconds(value) + case "milliseconds", "millisecond", "ms": + + } + + return m +} + +// Carbon +func (m *Moment) SubSeconds(seconds int) *Moment { + return m.addTime(time.Second * time.Duration(seconds*-1)) +} + +// Carbon +func (m *Moment) SubMinutes(minutes int) *Moment { + return m.addTime(time.Minute * time.Duration(minutes*-1)) +} + +// Carbon +func (m *Moment) SubHours(hours int) *Moment { + return m.addTime(time.Hour * time.Duration(hours*-1)) +} + +// Carbon +func (m *Moment) SubDay() *Moment { + return m.SubDays(1) +} + +// Carbon +func (m *Moment) SubDays(days int) *Moment { + return m.AddDays(days * -1) +} + +func (m *Moment) SubWeeks(weeks int) *Moment { + return m.SubDays(weeks * 7) +} + +// Carbon +func (m *Moment) SubMonths(months int) *Moment { + return m.AddMonths(months * -1) +} + +// Carbon +func (m *Moment) SubYears(years int) *Moment { + return m.AddYears(years * -1) +} + +// Carbon +func (m *Moment) Today() *Moment { + return m.Now() +} + +// Carbon +func (m *Moment) Tomorrow() *Moment { + return m.Today().AddDay() +} + +// Carbon +func (m *Moment) Yesterday() *Moment { + return m.Today().SubDay() +} + +func (m *Moment) StartOf(key string) *Moment { + switch key { + case "year", "y": + m.StartOfYear() + case "month", "M": + m.StartOfMonth() + case "week", "w": + m.StartOfWeek() + case "day", "d": + m.StartOfDay() + case "hour", "h": + if m.Minute() > 0 { + m.SubMinutes(m.Minute()) + } + + if m.Second() > 0 { + m.SubSeconds(m.Second()) + } + case "minute", "m": + if m.Second() > 0 { + m.SubSeconds(m.Second()) + } + case "second", "s": + + } + + return m +} + +// Carbon +func (m *Moment) StartOfDay() *Moment { + if m.Hour() > 0 { + _, timeOffset := m.GetTime().Zone() + m.SubHours(m.Hour()) + + _, newTimeOffset := m.GetTime().Zone() + diffOffset := timeOffset - newTimeOffset + if diffOffset != 0 { + // we need to adjust for time zone difference + m.AddSeconds(diffOffset) + } + } + + return m.StartOf("hour") +} + +// @todo ISO8601 Starts on Monday +func (m *Moment) StartOfWeek() *Moment { + return m.GoBackTo(time.Monday, false).StartOfDay() +} + +// Carbon +func (m *Moment) StartOfMonth() *Moment { + return m.SetDay(1).StartOfDay() +} + +// Carbon +func (m *Moment) StartOfYear() *Moment { + return m.SetMonth(time.January).SetDay(1).StartOfDay() +} + +// Carbon +func (m *Moment) EndOf(key string) *Moment { + switch key { + case "year", "y": + m.EndOfYear() + case "month", "M": + m.EndOfMonth() + case "week", "w": + m.EndOfWeek() + case "day", "d": + m.EndOfDay() + case "hour", "h": + if m.Minute() < 59 { + m.AddMinutes(59 - m.Minute()) + } + case "minute", "m": + if m.Second() < 59 { + m.AddSeconds(59 - m.Second()) + } + case "second", "s": + + } + + return m +} + +// Carbon +func (m *Moment) EndOfDay() *Moment { + if m.Hour() < 23 { + _, timeOffset := m.GetTime().Zone() + m.AddHours(23 - m.Hour()) + + _, newTimeOffset := m.GetTime().Zone() + diffOffset := newTimeOffset - timeOffset + if diffOffset != 0 { + // we need to adjust for time zone difference + m.SubSeconds(diffOffset) + } + } + + return m.EndOf("hour") +} + +// @todo ISO8601 Ends on Sunday +func (m *Moment) EndOfWeek() *Moment { + return m.GoTo(time.Sunday, false).EndOfDay() +} + +// Carbon +func (m *Moment) EndOfMonth() *Moment { + return m.SetDay(m.DaysInMonth()).EndOfDay() +} + +// Carbon +func (m *Moment) EndOfYear() *Moment { + return m.GoToMonth(time.December, false).EndOfMonth() +} + +// Custom +func (m *Moment) GoTo(day time.Weekday, next bool) *Moment { + if m.Day() == day { + if !next { + return m + } else { + m.AddDay() + } + } + + var diff int + if diff = int(day) - int(m.Day()); diff > 0 { + return m.AddDays(diff) + } + + return m.AddDays(7 + diff) +} + +// Custom +func (m *Moment) GoBackTo(day time.Weekday, previous bool) *Moment { + if m.Day() == day { + if !previous { + return m + } else { + m.SubDay() + } + } + + var diff int + if diff = int(day) - int(m.Day()); diff > 0 { + return m.SubDays(7 - diff) + } + + return m.SubDays(diff * -1) +} + +// Custom +func (m *Moment) GoToMonth(month time.Month, next bool) *Moment { + if m.Month() == month { + if !next { + return m + } else { + m.AddMonths(1) + } + } + + var diff int + if diff = int(month - m.Month()); diff > 0 { + return m.AddMonths(diff) + } + + return m.AddMonths(12 + diff) +} + +// Custom +func (m *Moment) GoBackToMonth(month time.Month, previous bool) *Moment { + if m.Month() == month { + if !previous { + return m + } else { + m.SubMonths(1) + } + } + + var diff int + if diff = int(month) - int(m.Month()); diff > 0 { + return m.SubMonths(12 - diff) + } + + return m.SubMonths(diff * -1) +} + +func (m *Moment) SetSecond(seconds int) *Moment { + if seconds >= 0 && seconds <= 60 { + return m.AddSeconds(seconds - m.Second()) + } + + return m +} + +func (m *Moment) SetMinute(minute int) *Moment { + if minute >= 0 && minute <= 60 { + return m.AddMinutes(minute - m.Minute()) + } + + return m +} + +func (m *Moment) SetHour(hour int) *Moment { + if hour >= 0 && hour <= 23 { + return m.AddHours(hour - m.Hour()) + } + + return m +} + +// Custom +func (m *Moment) SetDay(day int) *Moment { + if m.DayOfMonth() == day { + return m + } + + return m.AddDays(day - m.DayOfMonth()) +} + +// Custom +func (m *Moment) SetMonth(month time.Month) *Moment { + if m.Month() > month { + return m.GoBackToMonth(month, false) + } + + return m.GoToMonth(month, false) +} + +// Custom +func (m *Moment) SetYear(year int) *Moment { + if m.Year() == year { + return m + } + + return m.AddYears(year - m.Year()) +} + +// UTC Mode. @see http://momentjs.com/docs/#/parsing/utc/ +func (m *Moment) UTC() *Moment { + return m +} + +// http://momentjs.com/docs/#/manipulating/timezone-offset/ +func (m *Moment) Zone() int { + _, offset := m.GetTime().Zone() + + return (offset / 60) * -1 +} + +/** + * Display + * + */ +func (m *Moment) Format(layout string) string { + format := m.Convert(layout) + hasCustom := false + + formatted := m.GetTime().Format(format) + + if strings.Contains(formatted, "", fmt.Sprintf("%d", m.Unix()), -1) + formatted = strings.Replace(formatted, "", fmt.Sprintf("%d", m.ISOWeek()), -1) + formatted = strings.Replace(formatted, "", fmt.Sprintf("%d", m.DayOfWeek()), -1) + formatted = strings.Replace(formatted, "", fmt.Sprintf("%d", m.DayOfWeekISO()), -1) + formatted = strings.Replace(formatted, "", fmt.Sprintf("%d", m.DayOfYear()), -1) + formatted = strings.Replace(formatted, "", fmt.Sprintf("%d", m.Quarter()), -1) + formatted = strings.Replace(formatted, "", m.dayOfYearZero(), -1) + formatted = strings.Replace(formatted, "", fmt.Sprintf("%d", m.Hour()), -1) + } + + // This has to happen after time.Format + if hasCustom && strings.Contains(formatted, "") { + formatted = numberRegex.ReplaceAllStringFunc(formatted, func(n string) string { + ordinal, _ := strconv.Atoi(strings.Replace(n, "", "", 1)) + return m.ordinal(ordinal) + }) + } + + return formatted +} + +func (m *Moment) FormatGo(layout string) string { + return m.GetTime().Format(layout) +} + +// From Dmytro Shteflyuk @https://groups.google.com/forum/#!topic/golang-nuts/l8NhI74jl-4 +func (m *Moment) ordinal(x int) string { + suffix := "th" + switch x % 10 { + case 1: + if x%100 != 11 { + suffix = "st" + } + case 2: + if x%100 != 12 { + suffix = "nd" + } + case 3: + if x%100 != 13 { + suffix = "rd" + } + } + + return strconv.Itoa(x) + suffix +} + +func (m *Moment) FromNow() Diff { + now := new(Moment) + now.Now() + + return m.From(now) +} + +// Carbon +func (m *Moment) From(f *Moment) Diff { + return m.GetDiff(f) +} + +/** + * Difference + * + */ +func (m *Moment) Diff(t *Moment, unit string) int { + diff := m.GetDiff(t) + + switch unit { + case "years": + return diff.InYears() + case "months": + return diff.InMonths() + case "weeks": + return diff.InWeeks() + case "days": + return diff.InDays() + case "hours": + return diff.InHours() + case "minutes": + return diff.InMinutes() + case "seconds": + return diff.InSeconds() + } + + return 0 +} + +// Custom +func (m *Moment) GetDiff(t *Moment) Diff { + duration := m.GetTime().Sub(t.GetTime()) + + return Diff{duration} +} + +/** + * Display + * + */ +func (m *Moment) ValueOf() int64 { + return m.Unix() * 1000 +} + +func (m *Moment) Unix() int64 { + return m.GetTime().Unix() +} + +func (m *Moment) DaysInMonth() int { + days := 31 + switch m.Month() { + case time.April, time.June, time.September, time.November: + days = 30 + break + case time.February: + days = 28 + if m.IsLeapYear() { + days = 29 + } + break + } + + return days +} + +// or ToSlice? +func (m *Moment) ToArray() []int { + return []int{ + m.Year(), + int(m.Month()), + m.DayOfMonth(), + m.Hour(), + m.Minute(), + m.Second(), + } +} + +/** + * Query + * + */ +func (m *Moment) IsBefore(t Moment) bool { + return m.GetTime().Before(t.GetTime()) +} + +func (m *Moment) IsSame(t *Moment, layout string) bool { + return m.Format(layout) == t.Format(layout) +} + +func (m *Moment) IsAfter(t Moment) bool { + return m.GetTime().After(t.GetTime()) +} + +// Carbon +func (m *Moment) IsToday() bool { + today := m.Clone().Today() + + return m.Year() == today.Year() && m.Month() == today.Month() && m.Day() == today.Day() +} + +// Carbon +func (m *Moment) IsTomorrow() bool { + tomorrow := m.Clone().Tomorrow() + + return m.Year() == tomorrow.Year() && m.Month() == tomorrow.Month() && m.Day() == tomorrow.Day() +} + +// Carbon +func (m *Moment) IsYesterday() bool { + yesterday := m.Clone().Yesterday() + + return m.Year() == yesterday.Year() && m.Month() == yesterday.Month() && m.Day() == yesterday.Day() +} + +// Carbon +func (m *Moment) IsWeekday() bool { + return !m.IsWeekend() +} + +// Carbon +func (m *Moment) IsWeekend() bool { + return m.DayOfWeek() == time.Sunday || m.DayOfWeek() == time.Saturday +} + +func (m *Moment) IsLeapYear() bool { + year := m.Year() + return year%4 == 0 && (year%100 != 0 || year%400 == 0) +} + +// Custom +func (m *Moment) Range(start Moment, end Moment) bool { + return m.IsAfter(start) && m.IsBefore(end) +} diff --git a/vendor/github.com/leibowitz/moment/moment_parser.go b/vendor/github.com/leibowitz/moment/moment_parser.go new file mode 100644 index 00000000000..3361cfba113 --- /dev/null +++ b/vendor/github.com/leibowitz/moment/moment_parser.go @@ -0,0 +1,100 @@ +package moment + +import ( + "regexp" + "strings" +) + +type MomentParser struct{} + +var ( + date_pattern = regexp.MustCompile("(LT|LL?L?L?|l{1,4}|Mo|MM?M?M?|Do|DDDo|DD?D?D?|ddd?d?|do?|w[o|w]?|W[o|W]?|YYYYY|YYYY|YY|gg(ggg?)?|GG(GGG?)?|e|E|a|A|hh?|HH?|mm?|ss?|SS?S?|X|zz?|ZZ?|Q)") +) + +/* + + S (makes any number before it ordinal) + + stdDayOfYear 1,2,365 + + stdDayOfYearZero 001, 002, 365 + + stdDayOfWeek w 0, 1, 2 numeric day of the week (0 = sunday) + + stdDayOfWeekISO N 1 = Monday + + stdWeekOfYear W Iso week number of year + + stdUnix U + + stdQuarter +*/ + +// Thanks to https://github.com/fightbulc/moment.php for replacement keys and regex +var moment_replacements = map[string]string{ + "M": "1", // stdNumMonth 1 2 ... 11 12 + "Mo": "1", // stdNumMonth 1st 2nd ... 11th 12th + "MM": "01", // stdZeroMonth 01 02 ... 11 12 + "MMM": "Jan", // stdMonth Jan Feb ... Nov Dec + "MMMM": "January", // stdLongMonth January February ... November December + "D": "2", // stdDay 1 2 ... 30 30 + "Do": "2", // stdDay 1st 2nd ... 30th 31st @todo support st nd th etch + "DD": "02", // stdZeroDay 01 02 ... 30 31 + "DDD": "", // Day of the year 1 2 ... 364 365 + "DDDo": "", // Day of the year 1st 2nd ... 364th 365th + "DDDD": "", // Day of the year 001 002 ... 364 365 @todo**** + "d": "", // Numeric representation of day of the week 0 1 ... 5 6 + "do": "", // 0th 1st ... 5th 6th + "dd": "Mon", // ***Su Mo ... Fr Sa @todo + "ddd": "Mon", // Sun Mon ... Fri Sat + "dddd": "Monday", // stdLongWeekDay Sunday Monday ... Friday Saturday + "e": "", // Numeric representation of day of the week 0 1 ... 5 6 @todo + "E": "", // ISO-8601 numeric representation of the day of the week (added in PHP 5.1.0) 1 2 ... 6 7 @todo + "w": "", // 1 2 ... 52 53 + "wo": "", // 1st 2nd ... 52nd 53rd + "ww": "", // ***01 02 ... 52 53 @todo + "W": "", // 1 2 ... 52 53 + "Wo": "", // 1st 2nd ... 52nd 53rd + "WW": "", // ***01 02 ... 52 53 @todo + "YY": "06", // stdYear 70 71 ... 29 30 + "YYYY": "2006", // stdLongYear 1970 1971 ... 2029 2030 + // "gg" : "o", // ISO-8601 year number 70 71 ... 29 30 @todo + // "gggg" : "o", // ***1970 1971 ... 2029 2030 @todo + // "GG" : "o", //70 71 ... 29 30 @todo + // "GGGG" : "o", // ***1970 1971 ... 2029 2030 @todo + "Q": "", + "A": "PM", // stdPM AM PM + "a": "pm", // stdpm am pm + "H": "", // stdHour 0 1 ... 22 23 + "HH": "15", // 00 01 ... 22 23 + "h": "3", // stdHour12 1 2 ... 11 12 + "hh": "03", // stdZeroHour12 01 02 ... 11 12 + "m": "4", // stdZeroMinute 0 1 ... 58 59 + "mm": "04", // stdZeroMinute 00 01 ... 58 59 + "s": "5", // stdSecond 0 1 ... 58 59 + "ss": "05", // stdZeroSecond ***00 01 ... 58 59 + // "S" : "", //0 1 ... 8 9 + // "SS" : "", //0 1 ... 98 99 + // "SSS" : "", //0 1 ... 998 999 + "z": "MST", //EST CST ... MST PST + "zz": "MST", //EST CST ... MST PST + "Z": "Z07:00", // stdNumColonTZ -07:00 -06:00 ... +06:00 +07:00 + "ZZ": "-0700", // stdNumTZ -0700 -0600 ... +0600 +0700 + "X": "", // Seconds since unix epoch 1360013296 + "LT": "3:04 PM", // 8:30 PM + "L": "01/02/2006", //09/04/1986 + "l": "1/2/2006", //9/4/1986 + "LL": "January 2 2006", //September 4th 1986 the php s flag isn't supported + "ll": "Jan 2 2006", //Sep 4 1986 + "LLL": "January 2 2006 3:04 PM", //September 4th 1986 8:30 PM @todo the php s flag isn't supported + "lll": "Jan 2 2006 3:04 PM", //Sep 4 1986 8:30 PM + "LLLL": "Monday, January 2 2006 3:04 PM", //Thursday, September 4th 1986 8:30 PM the php s flag isn't supported + "llll": "Mon, Jan 2 2006 3:04 PM", //Thu, Sep 4 1986 8:30 PM +} + +func (p *MomentParser) Convert(layout string) string { + var match [][]string + if match = date_pattern.FindAllStringSubmatch(layout, -1); match == nil { + return layout + } + + for i := range match { + if replace, ok := moment_replacements[match[i][0]]; ok { + layout = strings.Replace(layout, match[i][0], replace, 1) + } + } + + return layout +} diff --git a/vendor/github.com/leibowitz/moment/parse_day.go b/vendor/github.com/leibowitz/moment/parse_day.go new file mode 100644 index 00000000000..e8e890a462e --- /dev/null +++ b/vendor/github.com/leibowitz/moment/parse_day.go @@ -0,0 +1,32 @@ +package moment + +import ( + "fmt" + "strings" + "time" +) + +var ( + days = []time.Weekday{ + time.Sunday, + time.Monday, + time.Tuesday, + time.Wednesday, + time.Thursday, + time.Friday, + time.Saturday, + } +) + +func ParseWeekDay(day string) (time.Weekday, error) { + + day = strings.ToLower(day) + + for _, d := range days { + if day == strings.ToLower(d.String()) { + return d, nil + } + } + + return -1, fmt.Errorf("Unable to parse %s as week day", day) +} diff --git a/vendor/github.com/leibowitz/moment/strftime_parser.go b/vendor/github.com/leibowitz/moment/strftime_parser.go new file mode 100644 index 00000000000..3c024376535 --- /dev/null +++ b/vendor/github.com/leibowitz/moment/strftime_parser.go @@ -0,0 +1,68 @@ +package moment + +import ( + "regexp" + "strings" +) + +type StrftimeParser struct{} + +var ( + replacements_pattern = regexp.MustCompile("%[mbhBedjwuaAVgyGYpPkHlIMSZzsTrRTDFXx]") +) + +// Not implemented +// U +// C + +var strftime_replacements = map[string]string{ + "%m": "01", // stdZeroMonth 01 02 ... 11 12 + "%b": "Jan", // stdMonth Jan Feb ... Nov Dec + "%h": "Jan", + "%B": "January", // stdLongMonth January February ... November December + "%e": "2", // stdDay 1 2 ... 30 30 + "%d": "02", // stdZeroDay 01 02 ... 30 31 + "%j": "", // Day of the year ***001 002 ... 364 365 @todo**** + "%w": "", // Numeric representation of day of the week 0 1 ... 5 6 + "%u": "", // ISO-8601 numeric representation of the day of the week (added in PHP 5.1.0) 1 2 ... 6 7 @todo + "%a": "Mon", // Sun Mon ... Fri Sat + "%A": "Monday", // stdLongWeekDay Sunday Monday ... Friday Saturday + "%V": "", // ***01 02 ... 52 53 @todo begin with zeros + "%g": "06", // stdYear 70 71 ... 29 30 + "%y": "06", + "%G": "2006", // stdLongYear 1970 1971 ... 2029 2030 + "%Y": "2006", + "%p": "PM", // stdPM AM PM + "%P": "pm", // stdpm am pm + "%k": "15", // stdHour 0 1 ... 22 23 + "%H": "15", // 00 01 ... 22 23 + "%l": "3", // stdHour12 1 2 ... 11 12 + "%I": "03", // stdZeroHour12 01 02 ... 11 12 + "%M": "04", // stdZeroMinute 00 01 ... 58 59 + "%S": "05", // stdZeroSecond ***00 01 ... 58 59 + "%Z": "MST", //EST CST ... MST PST + "%z": "-0700", // stdNumTZ -0700 -0600 ... +0600 +0700 + "%s": "", // Seconds since unix epoch 1360013296 + "%r": "03:04:05 PM", + "%R": "15:04", + "%T": "15:04:05", + "%D": "01/02/06", + "%F": "2006-01-02", + "%X": "15:04:05", + "%x": "01/02/06", +} + +func (p *StrftimeParser) Convert(layout string) string { + var match [][]string + if match = replacements_pattern.FindAllStringSubmatch(layout, -1); match == nil { + return layout + } + + for i := range match { + if replace, ok := strftime_replacements[match[i][0]]; ok { + layout = strings.Replace(layout, match[i][0], replace, 1) + } + } + + return layout +} From 63a200686e065a79fdd7ade563fd942236c4feda Mon Sep 17 00:00:00 2001 From: wph95 Date: Mon, 26 Mar 2018 19:48:57 +0800 Subject: [PATCH 008/488] - pipeline aggs support - add some test --- pkg/tsdb/elasticsearch/elasticsearch.go | 42 ++- pkg/tsdb/elasticsearch/model_parser.go | 81 ++---- pkg/tsdb/elasticsearch/models.go | 21 +- pkg/tsdb/elasticsearch/query.go | 182 +++++++----- pkg/tsdb/elasticsearch/query_def.go | 18 ++ pkg/tsdb/elasticsearch/query_test.go | 331 ++++++++++++++++++++++ pkg/tsdb/elasticsearch/response_parser.go | 34 ++- 7 files changed, 557 insertions(+), 152 deletions(-) create mode 100644 pkg/tsdb/elasticsearch/query_test.go diff --git a/pkg/tsdb/elasticsearch/elasticsearch.go b/pkg/tsdb/elasticsearch/elasticsearch.go index 8fd82a179e8..0ce9eca0972 100644 --- a/pkg/tsdb/elasticsearch/elasticsearch.go +++ b/pkg/tsdb/elasticsearch/elasticsearch.go @@ -1,6 +1,7 @@ package elasticsearch import ( + "bytes" "context" "encoding/json" "errors" @@ -18,7 +19,8 @@ import ( ) type ElasticsearchExecutor struct { - Transport *http.Transport + QueryParser *ElasticSearchQueryParser + Transport *http.Transport } var ( @@ -47,17 +49,21 @@ func (e *ElasticsearchExecutor) Query(ctx context.Context, dsInfo *models.DataSo result := &tsdb.Response{} result.Results = make(map[string]*tsdb.QueryResult) - queryParser := ElasticSearchQueryParser{ - dsInfo, - tsdbQuery.TimeRange, - tsdbQuery.Queries, - } - - payload, targets, err := queryParser.Parse() + queries, err := e.getQuery(dsInfo, tsdbQuery) if err != nil { return nil, err } + buff := bytes.Buffer{} + for _, q := range queries { + s, err := q.Build(tsdbQuery, dsInfo) + if err != nil { + return nil, err + } + buff.WriteString(s) + } + payload := buff.String() + if setting.Env == setting.DEV { glog.Debug("Elasticsearch playload", "raw playload", payload) } @@ -96,12 +102,30 @@ func (e *ElasticsearchExecutor) Query(ctx context.Context, dsInfo *models.DataSo return nil, errors.New(res.getErrMsg()) } } - responseParser := ElasticsearchResponseParser{responses.Responses, targets} + responseParser := ElasticsearchResponseParser{responses.Responses, queries} queryRes := responseParser.getTimeSeries() result.Results["A"] = queryRes return result, nil } +func (e *ElasticsearchExecutor) getQuery(dsInfo *models.DataSource, context *tsdb.TsdbQuery) ([]*Query, error) { + queries := make([]*Query, 0) + if len(context.Queries) == 0 { + return nil, fmt.Errorf("query request contains no queries") + } + for _, v := range context.Queries { + + query, err := e.QueryParser.Parse(v.Model, dsInfo) + if err != nil { + return nil, err + } + queries = append(queries, query) + + } + return queries, nil + +} + func (e *ElasticsearchExecutor) createRequest(dsInfo *models.DataSource, query string) (*http.Request, error) { u, _ := url.Parse(dsInfo.Url) u.Path = path.Join(u.Path, "_msearch") diff --git a/pkg/tsdb/elasticsearch/model_parser.go b/pkg/tsdb/elasticsearch/model_parser.go index 7da6765e06c..0d016dc58a5 100644 --- a/pkg/tsdb/elasticsearch/model_parser.go +++ b/pkg/tsdb/elasticsearch/model_parser.go @@ -1,62 +1,45 @@ package elasticsearch import ( - "bytes" - "encoding/json" "fmt" "github.com/grafana/grafana/pkg/components/simplejson" "github.com/grafana/grafana/pkg/models" "github.com/grafana/grafana/pkg/tsdb" "github.com/leibowitz/moment" - "src/github.com/davecgh/go-spew/spew" - "strconv" "strings" "time" ) type ElasticSearchQueryParser struct { - DsInfo *models.DataSource - TimeRange *tsdb.TimeRange - Queries []*tsdb.Query } -func (qp *ElasticSearchQueryParser) Parse() (string, []*QueryBuilder, error) { - payload := bytes.Buffer{} - queryHeader := qp.getQueryHeader() - targets := make([]*QueryBuilder, 0) - for _, q := range qp.Queries { - timeField, err := q.Model.Get("timeField").String() - if err != nil { - return "", nil, err - } - rawQuery := q.Model.Get("query").MustString("") - bucketAggs := q.Model.Get("bucketAggs").MustArray() - metrics := q.Model.Get("metrics").MustArray() - alias := q.Model.Get("alias").MustString("") - builder := QueryBuilder{timeField, rawQuery, bucketAggs, metrics, alias} - targets = append(targets, &builder) - - query, err := builder.Build() - if err != nil { - return "", nil, err - } - queryBytes, err := json.Marshal(query) - if err != nil { - return "", nil, err - } - - payload.WriteString(queryHeader.String() + "\n") - payload.WriteString(string(queryBytes) + "\n") +func (qp *ElasticSearchQueryParser) Parse(model *simplejson.Json, dsInfo *models.DataSource) (*Query, error) { + //payload := bytes.Buffer{} + //queryHeader := qp.getQueryHeader() + timeField, err := model.Get("timeField").String() + if err != nil { + return nil, err + } + rawQuery := model.Get("query").MustString("") + bucketAggs := model.Get("bucketAggs").MustArray() + metrics := model.Get("metrics").MustArray() + alias := model.Get("alias").MustString("") + parsedInterval, err := tsdb.GetIntervalFrom(dsInfo, model, time.Millisecond) + if err != nil { + return nil, err } - p, err := qp.payloadReplace(payload.String(), qp.DsInfo.JsonData) - - return p, targets, err + return &Query{timeField, + rawQuery, + bucketAggs, + metrics, + alias, + parsedInterval}, nil } -func (qp *ElasticSearchQueryParser) getQueryHeader() *QueryHeader { +func getRequestHeader(timeRange *tsdb.TimeRange, dsInfo *models.DataSource) *QueryHeader { var header QueryHeader - esVersion := qp.DsInfo.JsonData.Get("esVersion").MustInt() + esVersion := dsInfo.JsonData.Get("esVersion").MustInt() searchType := "query_then_fetch" if esVersion < 5 { @@ -64,29 +47,13 @@ func (qp *ElasticSearchQueryParser) getQueryHeader() *QueryHeader { } header.SearchType = searchType header.IgnoreUnavailable = true - header.Index = getIndexList(qp.DsInfo.Database, qp.DsInfo.JsonData.Get("interval").MustString(""), qp.TimeRange) + header.Index = getIndexList(dsInfo.Database, dsInfo.JsonData.Get("interval").MustString(""), timeRange) if esVersion >= 56 { - header.MaxConcurrentShardRequests = qp.DsInfo.JsonData.Get("maxConcurrentShardRequests").MustInt() + header.MaxConcurrentShardRequests = dsInfo.JsonData.Get("maxConcurrentShardRequests").MustInt() } return &header } -func (qp *ElasticSearchQueryParser) payloadReplace(payload string, model *simplejson.Json) (string, error) { - parsedInterval, err := tsdb.GetIntervalFrom(qp.DsInfo, model, time.Millisecond) - if err != nil { - return "", nil - } - - interval := intervalCalculator.Calculate(qp.TimeRange, parsedInterval) - glog.Warn(spew.Sdump(interval)) - payload = strings.Replace(payload, "$timeFrom", fmt.Sprintf("%d", qp.TimeRange.GetFromAsMsEpoch()), -1) - payload = strings.Replace(payload, "$timeTo", fmt.Sprintf("%d", qp.TimeRange.GetToAsMsEpoch()), -1) - payload = strings.Replace(payload, "$interval", interval.Text, -1) - payload = strings.Replace(payload, "$__interval_ms", strconv.FormatInt(interval.Value.Nanoseconds()/int64(time.Millisecond), 10), -1) - payload = strings.Replace(payload, "$__interval", interval.Text, -1) - - return payload, nil -} func getIndexList(pattern string, interval string, timeRange *tsdb.TimeRange) string { if interval == "" { diff --git a/pkg/tsdb/elasticsearch/models.go b/pkg/tsdb/elasticsearch/models.go index d758e2159de..822df2dd4d1 100644 --- a/pkg/tsdb/elasticsearch/models.go +++ b/pkg/tsdb/elasticsearch/models.go @@ -1,25 +1,25 @@ package elasticsearch import ( - "github.com/grafana/grafana/pkg/components/simplejson" "bytes" - "fmt" "encoding/json" + "fmt" + "github.com/grafana/grafana/pkg/components/simplejson" ) type QueryHeader struct { SearchType string `json:"search_type"` IgnoreUnavailable bool `json:"ignore_unavailable"` Index interface{} `json:"index"` - MaxConcurrentShardRequests int `json:"max_concurrent_shard_requests"` + MaxConcurrentShardRequests int `json:"max_concurrent_shard_requests,omitempty"` } -func (q *QueryHeader) String() (string) { +func (q *QueryHeader) String() string { r, _ := json.Marshal(q) return string(r) } -type Query struct { +type Request struct { Query map[string]interface{} `json:"query"` Aggs Aggs `json:"aggs"` Size int `json:"size"` @@ -45,11 +45,10 @@ type FiltersAgg struct { } type TermsAggSetting struct { - Field string `json:"field"` - Size int `json:"size"` - Order map[string]interface{} `json:"order"` - MinDocCount int `json:"min_doc_count"` - Missing string `json:"missing"` + Field string `json:"field"` + Size int `json:"size"` + Order map[string]interface{} `json:"order"` + Missing string `json:"missing,omitempty"` } type TermsAgg struct { @@ -104,7 +103,7 @@ type Response struct { Aggregations map[string]interface{} `json:"aggregations"` } -func (r *Response) getErrMsg() (string) { +func (r *Response) getErrMsg() string { var msg bytes.Buffer errJson := simplejson.NewFromAny(r.Err) errType, err := errJson.Get("type").String() diff --git a/pkg/tsdb/elasticsearch/query.go b/pkg/tsdb/elasticsearch/query.go index d6d70e79a2a..51f1ebb5d7a 100644 --- a/pkg/tsdb/elasticsearch/query.go +++ b/pkg/tsdb/elasticsearch/query.go @@ -1,81 +1,103 @@ package elasticsearch import ( + "bytes" + "encoding/json" "errors" + "fmt" "github.com/grafana/grafana/pkg/components/simplejson" + "github.com/grafana/grafana/pkg/models" + "github.com/grafana/grafana/pkg/tsdb" "strconv" + "strings" + "time" ) var rangeFilterSetting = RangeFilterSetting{Gte: "$timeFrom", - Lte: "$timeTo", + Lte: "$timeTo", Format: "epoch_millis"} -type QueryBuilder struct { - TimeField string - RawQuery string - BucketAggs []interface{} - Metrics []interface{} - Alias string +type Query struct { + TimeField string `json:"timeField"` + RawQuery string `json:"query"` + BucketAggs []interface{} `json:"bucketAggs"` + Metrics []interface{} `json:"metrics"` + Alias string `json:"Alias"` + Interval time.Duration } -func (b *QueryBuilder) Build() (Query, error) { - var err error - var res Query - res.Query = make(map[string]interface{}) - res.Size = 0 +func (q *Query) Build(queryContext *tsdb.TsdbQuery, dsInfo *models.DataSource) (string, error) { + var req Request + payload := bytes.Buffer{} - if err != nil { - return res, err - } - - boolQuery := BoolQuery{} - boolQuery.Filter = append(boolQuery.Filter, newRangeFilter(b.TimeField, rangeFilterSetting)) - boolQuery.Filter = append(boolQuery.Filter, newQueryStringFilter(true, b.RawQuery)) - res.Query["bool"] = boolQuery + req.Size = 0 + q.renderReqQuery(&req) // handle document query - if len(b.BucketAggs) == 0 { - if len(b.Metrics) > 0 { - metric := simplejson.NewFromAny(b.Metrics[0]) + if q.isRawDocumentQuery() { + return "", errors.New("alert not support Raw_Document") + } + + err := q.parseAggs(&req) + if err != nil { + return "", err + } + + reqBytes, err := json.Marshal(req) + reqHeader := getRequestHeader(queryContext.TimeRange, dsInfo) + payload.WriteString(reqHeader.String() + "\n") + payload.WriteString(string(reqBytes) + "\n") + return q.renderTemplate(payload.String(), queryContext) +} + +func (q *Query) isRawDocumentQuery() bool { + if len(q.BucketAggs) == 0 { + if len(q.Metrics) > 0 { + metric := simplejson.NewFromAny(q.Metrics[0]) if metric.Get("type").MustString("") == "raw_document" { - return res, errors.New("alert not support Raw_Document") + return true } } } - aggs, err := b.parseAggs(b.BucketAggs, b.Metrics) - res.Aggs = aggs["aggs"].(Aggs) - - return res, err + return false } -func (b *QueryBuilder) parseAggs(bucketAggs []interface{}, metrics []interface{}) (Aggs, error) { - query := make(Aggs) - nestedAggs := query - for _, aggRaw := range bucketAggs { +func (q *Query) renderReqQuery(req *Request) { + req.Query = make(map[string]interface{}) + boolQuery := BoolQuery{} + boolQuery.Filter = append(boolQuery.Filter, newRangeFilter(q.TimeField, rangeFilterSetting)) + boolQuery.Filter = append(boolQuery.Filter, newQueryStringFilter(true, q.RawQuery)) + req.Query["bool"] = boolQuery +} + +func (q *Query) parseAggs(req *Request) error { + aggs := make(Aggs) + nestedAggs := aggs + for _, aggRaw := range q.BucketAggs { esAggs := make(Aggs) aggJson := simplejson.NewFromAny(aggRaw) aggType, err := aggJson.Get("type").String() if err != nil { - return nil, err + return err } id, err := aggJson.Get("id").String() if err != nil { - return nil, err + return err } switch aggType { case "date_histogram": - esAggs["date_histogram"] = b.getDateHistogramAgg(aggJson) + esAggs["date_histogram"] = q.getDateHistogramAgg(aggJson) case "histogram": - esAggs["histogram"] = b.getHistogramAgg(aggJson) + esAggs["histogram"] = q.getHistogramAgg(aggJson) case "filters": - esAggs["filters"] = b.getFilters(aggJson) + esAggs["filters"] = q.getFilters(aggJson) case "terms": - terms := b.getTerms(aggJson) + terms := q.getTerms(aggJson) esAggs["terms"] = terms.Terms esAggs["aggs"] = terms.Aggs case "geohash_grid": - return nil, errors.New("alert not support Geo_Hash_Grid") + return errors.New("alert not support Geo_Hash_Grid") } if _, ok := nestedAggs["aggs"]; !ok { @@ -90,40 +112,51 @@ func (b *QueryBuilder) parseAggs(bucketAggs []interface{}, metrics []interface{} } nestedAggs["aggs"] = make(Aggs) - for _, metricRaw := range metrics { + for _, metricRaw := range q.Metrics { metric := make(Metric) metricJson := simplejson.NewFromAny(metricRaw) id, err := metricJson.Get("id").String() if err != nil { - return nil, err + return err } metricType, err := metricJson.Get("type").String() if err != nil { - return nil, err + return err } if metricType == "count" { continue } - // todo support pipeline Agg + settings := metricJson.Get("settings").MustMap(map[string]interface{}{}) + + if isPipelineAgg(metricType) { + pipelineAgg := metricJson.Get("pipelineAgg").MustString("") + if _, err := strconv.Atoi(pipelineAgg); err == nil { + settings["buckets_path"] = pipelineAgg + } else { + continue + } + + } else { + settings["field"] = metricJson.Get("field").MustString() + } - settings := metricJson.Get("settings").MustMap() - settings["field"] = metricJson.Get("field").MustString() metric[metricType] = settings nestedAggs["aggs"].(Aggs)[id] = metric } - return query, nil + req.Aggs = aggs["aggs"].(Aggs) + return nil } -func (b *QueryBuilder) getDateHistogramAgg(model *simplejson.Json) DateHistogramAgg { +func (q *Query) getDateHistogramAgg(model *simplejson.Json) *DateHistogramAgg { agg := &DateHistogramAgg{} settings := simplejson.NewFromAny(model.Get("settings").Interface()) interval, err := settings.Get("interval").String() if err == nil { agg.Interval = interval } - agg.Field = b.TimeField + agg.Field = q.TimeField agg.MinDocCount = settings.Get("min_doc_count").MustInt(0) agg.ExtendedBounds = ExtendedBounds{"$timeFrom", "$timeTo"} agg.Format = "epoch_millis" @@ -136,10 +169,10 @@ func (b *QueryBuilder) getDateHistogramAgg(model *simplejson.Json) DateHistogram if err == nil { agg.Missing = missing } - return *agg + return agg } -func (b *QueryBuilder) getHistogramAgg(model *simplejson.Json) HistogramAgg { +func (q *Query) getHistogramAgg(model *simplejson.Json) *HistogramAgg { agg := &HistogramAgg{} settings := simplejson.NewFromAny(model.Get("settings").Interface()) interval, err := settings.Get("interval").String() @@ -155,10 +188,10 @@ func (b *QueryBuilder) getHistogramAgg(model *simplejson.Json) HistogramAgg { if err == nil { agg.Missing = missing } - return *agg + return agg } -func (b *QueryBuilder) getFilters(model *simplejson.Json) FiltersAgg { +func (q *Query) getFilters(model *simplejson.Json) *FiltersAgg { agg := &FiltersAgg{} settings := simplejson.NewFromAny(model.Get("settings").Interface()) for filter := range settings.Get("filters").MustArray() { @@ -170,15 +203,15 @@ func (b *QueryBuilder) getFilters(model *simplejson.Json) FiltersAgg { } agg.Filter[label] = newQueryStringFilter(true, query) } - return *agg + return agg } -func (b *QueryBuilder) getTerms(model *simplejson.Json) TermsAgg { +func (q *Query) getTerms(model *simplejson.Json) *TermsAgg { agg := &TermsAgg{Aggs: make(Aggs)} settings := simplejson.NewFromAny(model.Get("settings").Interface()) agg.Terms.Field = model.Get("field").MustString() if settings == nil { - return *agg + return agg } sizeStr := settings.Get("size").MustString("") size, err := strconv.Atoi(sizeStr) @@ -186,17 +219,25 @@ func (b *QueryBuilder) getTerms(model *simplejson.Json) TermsAgg { size = 500 } agg.Terms.Size = size - orderBy := settings.Get("orderBy").MustString("") - if orderBy != "" { + orderBy, err := settings.Get("orderBy").String() + if err == nil { agg.Terms.Order = make(map[string]interface{}) agg.Terms.Order[orderBy] = settings.Get("order").MustString("") - // if orderBy is a int, means this fields is metric result value - // TODO set subAggs - } - - minDocCount, err := settings.Get("min_doc_count").Int() - if err == nil { - agg.Terms.MinDocCount = minDocCount + if _, err := strconv.Atoi(orderBy); err != nil { + for _, metricI := range q.Metrics { + metric := simplejson.NewFromAny(metricI) + metricId := metric.Get("id").MustString() + if metricId == orderBy { + subAggs := make(Aggs) + metricField := metric.Get("field").MustString() + metricType := metric.Get("type").MustString() + subAggs[metricType] = map[string]string{"field": metricField} + agg.Aggs = make(Aggs) + agg.Aggs[metricId] = subAggs + break + } + } + } } missing, err := settings.Get("missing").String() @@ -204,5 +245,16 @@ func (b *QueryBuilder) getTerms(model *simplejson.Json) TermsAgg { agg.Terms.Missing = missing } - return *agg + return agg +} + +func (q *Query) renderTemplate(payload string, queryContext *tsdb.TsdbQuery) (string, error) { + timeRange := queryContext.TimeRange + interval := intervalCalculator.Calculate(timeRange, q.Interval) + payload = strings.Replace(payload, "$timeFrom", fmt.Sprintf("%d", timeRange.GetFromAsMsEpoch()), -1) + payload = strings.Replace(payload, "$timeTo", fmt.Sprintf("%d", timeRange.GetToAsMsEpoch()), -1) + payload = strings.Replace(payload, "$interval", interval.Text, -1) + payload = strings.Replace(payload, "$__interval_ms", strconv.FormatInt(interval.Value.Nanoseconds()/int64(time.Millisecond), 10), -1) + payload = strings.Replace(payload, "$__interval", interval.Text, -1) + return payload, nil } diff --git a/pkg/tsdb/elasticsearch/query_def.go b/pkg/tsdb/elasticsearch/query_def.go index 5dc02aa359e..6f78f02f346 100644 --- a/pkg/tsdb/elasticsearch/query_def.go +++ b/pkg/tsdb/elasticsearch/query_def.go @@ -24,3 +24,21 @@ var extendedStats = map[string]string{ "std_deviation_bounds_upper": "Std Dev Upper", "std_deviation_bounds_lower": "Std Dev Lower", } + +var pipelineOptions = map[string]string{ + "moving_avg": "moving_avg", + "derivative": "derivative", +} + +func isPipelineAgg(metricType string) bool { + if _, ok := pipelineOptions[metricType]; ok { + return true + } + return false +} + +func describeMetric(metricType, field string) string { + text := metricAggType[metricType] + return text + " " + field + +} diff --git a/pkg/tsdb/elasticsearch/query_test.go b/pkg/tsdb/elasticsearch/query_test.go new file mode 100644 index 00000000000..992469175b6 --- /dev/null +++ b/pkg/tsdb/elasticsearch/query_test.go @@ -0,0 +1,331 @@ +package elasticsearch + +import ( + "encoding/json" + "fmt" + "github.com/grafana/grafana/pkg/components/simplejson" + "github.com/grafana/grafana/pkg/models" + "github.com/grafana/grafana/pkg/tsdb" + . "github.com/smartystreets/goconvey/convey" + "reflect" + "strconv" + "strings" + "testing" +) + +func testElasticSearchResponse(requestJSON string, expectedElasticSearchRequestJSON string) { + var queryExpectedJSONInterface, queryJSONInterface interface{} + parser := ElasticSearchQueryParser{} + model := &Query{} + + err := json.Unmarshal([]byte(requestJSON), model) + So(err, ShouldBeNil) + jsonDate, _ := simplejson.NewJson([]byte(`{"esVersion":2}`)) + dsInfo := &models.DataSource{ + Database: "grafana-test", + JsonData: jsonDate, + } + + testTimeRange := tsdb.NewTimeRange("5m", "now") + + req, _ := simplejson.NewJson([]byte(requestJSON)) + query, err := parser.Parse(req, dsInfo) + s, err := query.Build(&tsdb.TsdbQuery{TimeRange: testTimeRange}, dsInfo) + + queryJSON := strings.Split(s, "\n")[1] + err = json.Unmarshal([]byte(queryJSON), &queryJSONInterface) + So(err, ShouldBeNil) + + expectedElasticSearchRequestJSON = strings.Replace( + expectedElasticSearchRequestJSON, + "", + strconv.FormatInt(testTimeRange.GetFromAsMsEpoch(), 10), + -1, + ) + + expectedElasticSearchRequestJSON = strings.Replace( + expectedElasticSearchRequestJSON, + "", + strconv.FormatInt(testTimeRange.GetToAsMsEpoch(), 10), + -1, + ) + + err = json.Unmarshal([]byte(expectedElasticSearchRequestJSON), &queryExpectedJSONInterface) + So(err, ShouldBeNil) + + result := reflect.DeepEqual(queryExpectedJSONInterface, queryJSONInterface) + if !result { + fmt.Printf("ERROR: %s \n != \n %s", expectedElasticSearchRequestJSON, queryJSON) + } + So(result, ShouldBeTrue) +} +func TestElasticSearchQueryBuilder(t *testing.T) { + Convey("Elasticsearch QueryBuilder query testing", t, func() { + Convey("Build test average metric with moving average", func() { + var testElasticsearchModelRequestJSON = ` + { + "bucketAggs": [ + { + "field": "timestamp", + "id": "2", + "settings": { + "interval": "auto", + "min_doc_count": 0, + "trimEdges": 0 + }, + "type": "date_histogram" + } + ], + "dsType": "elasticsearch", + "metrics": [ + { + "field": "value", + "id": "1", + "inlineScript": "_value * 2", + "meta": {}, + "settings": { + "script": { + "inline": "_value * 2" + } + }, + "type": "avg" + }, + { + "field": "1", + "id": "3", + "meta": {}, + "pipelineAgg": "1", + "settings": { + "minimize": false, + "model": "simple", + "window": 5 + }, + "type": "moving_avg" + } + ], + "query": "(test:query) AND (name:sample)", + "refId": "A", + "timeField": "timestamp" + } + ` + + var expectedElasticsearchQueryJSON = ` + { + "size": 0, + "query": { + "bool": { + "filter": [ + { + "range": { + "timestamp": { + "gte": "", + "lte": "", + "format": "epoch_millis" + } + } + }, + { + "query_string": { + "analyze_wildcard": true, + "query": "(test:query) AND (name:sample)" + } + } + ] + } + }, + "aggs": { + "2": { + "date_histogram": { + "interval": "200ms", + "field": "timestamp", + "min_doc_count": 0, + "extended_bounds": { + "min": "", + "max": "" + }, + "format": "epoch_millis" + }, + "aggs": { + "1": { + "avg": { + "field": "value", + "script": { + "inline": "_value * 2" + } + } + }, + "3": { + "moving_avg": { + "buckets_path": "1", + "window": 5, + "model": "simple", + "minimize": false + } + } + } + } + } + }` + + testElasticSearchResponse(testElasticsearchModelRequestJSON, expectedElasticsearchQueryJSON) + }) + Convey("Test Wildcards and Quotes", func() { + testElasticsearchModelRequestJSON := ` + { + "alias": "New", + "bucketAggs": [ + { + "field": "timestamp", + "id": "2", + "type": "date_histogram" + } + ], + "dsType": "elasticsearch", + "metrics": [ + { + "type": "sum", + "field": "value", + "id": "1" + } + ], + "query": "scope:$location.leagueconnect.api AND name:*CreateRegistration AND name:\"*.201-responses.rate\"", + "refId": "A", + "timeField": "timestamp" + }` + + expectedElasticsearchQueryJSON := ` + { + "size": 0, + "query": { + "bool": { + "filter": [ + { + "range": { + "timestamp": { + "gte": "", + "lte": "", + "format": "epoch_millis" + } + } + }, + { + "query_string": { + "analyze_wildcard": true, + "query": "scope:$location.leagueconnect.api AND name:*CreateRegistration AND name:\"*.201-responses.rate\"" + } + } + ] + } + }, + "aggs": { + "2": { + "aggs": { + "1": { + "sum": { + "field": "value" + } + } + }, + "date_histogram": { + "extended_bounds": { + "max": "", + "min": "" + }, + "field": "timestamp", + "format": "epoch_millis", + "min_doc_count": 0 + } + } + } + }` + + testElasticSearchResponse(testElasticsearchModelRequestJSON, expectedElasticsearchQueryJSON) + }) + Convey("Test Term Aggregates", func() { + testElasticsearchModelRequestJSON := ` + { + "bucketAggs": [{ + "field": "name_raw", + "id": "4", + "settings": { + "order": "desc", + "orderBy": "_term", + "size": "10" + }, + "type": "terms" + }, { + "field": "timestamp", + "id": "2", + "settings": { + "interval": "1m", + "min_doc_count": 0, + "trimEdges": 0 + }, + "type": "date_histogram" + }], + "dsType": "elasticsearch", + "filters": [{ + "boolOp": "AND", + "not": false, + "type": "rfc190Scope", + "value": "*.hmp.metricsd" + }, { + "boolOp": "AND", + "not": false, + "type": "name_raw", + "value": "builtin.general.*_instance_count" + }], + "metricObject": {}, + "metrics": [{ + "field": "value", + "id": "1", + "meta": {}, + "options": {}, + "settings": {}, + "type": "sum" + }], + "mode": 0, + "numToGraph": 10, + "prependHostName": false, + "query": "(scope:*.hmp.metricsd) AND (name_raw:builtin.general.*_instance_count)", + "refId": "A", + "regexAlias": false, + "selectedApplication": "", + "selectedHost": "", + "selectedLocation": "", + "timeField": "timestamp", + "useFullHostName": "", + "useQuery": false + }` + + expectedElasticsearchQueryJSON := ` + { + "size": 0, + "query": { + "bool": { + "filter": [ + { + "range": { + "timestamp": { + "gte": "", + "lte": "", + "format": "epoch_millis" + } + } + }, + { + "query_string": { + "analyze_wildcard": true, + "query": "(scope:*.hmp.metricsd) AND (name_raw:builtin.general.*_instance_count)" + } + } + ] + } + }, + "aggs": {"4":{"aggs":{"2":{"aggs":{"1":{"sum":{"field":"value"}}},"date_histogram":{"extended_bounds":{"max":"","min":""},"field":"timestamp","format":"epoch_millis","interval":"1m","min_doc_count":0}}},"terms":{"field":"name_raw","order":{"_term":"desc"},"size":10}}} + }` + + testElasticSearchResponse(testElasticsearchModelRequestJSON, expectedElasticsearchQueryJSON) + }) + }) +} diff --git a/pkg/tsdb/elasticsearch/response_parser.go b/pkg/tsdb/elasticsearch/response_parser.go index a2a8565641f..01b8cb1d235 100644 --- a/pkg/tsdb/elasticsearch/response_parser.go +++ b/pkg/tsdb/elasticsearch/response_parser.go @@ -6,14 +6,14 @@ import ( "github.com/grafana/grafana/pkg/components/null" "github.com/grafana/grafana/pkg/components/simplejson" "github.com/grafana/grafana/pkg/tsdb" - "strconv" "regexp" + "strconv" "strings" ) type ElasticsearchResponseParser struct { Responses []Response - Targets []*QueryBuilder + Targets []*Query } func (rp *ElasticsearchResponseParser) getTimeSeries() *tsdb.QueryResult { @@ -29,7 +29,7 @@ func (rp *ElasticsearchResponseParser) getTimeSeries() *tsdb.QueryResult { return queryRes } -func (rp *ElasticsearchResponseParser) processBuckets(aggs map[string]interface{}, target *QueryBuilder, series *[]*tsdb.TimeSeries, props map[string]string, depth int) (error) { +func (rp *ElasticsearchResponseParser) processBuckets(aggs map[string]interface{}, target *Query, series *[]*tsdb.TimeSeries, props map[string]string, depth int) error { var err error maxDepth := len(target.BucketAggs) - 1 for aggId, v := range aggs { @@ -71,7 +71,7 @@ func (rp *ElasticsearchResponseParser) processBuckets(aggs map[string]interface{ } -func (rp *ElasticsearchResponseParser) processMetrics(esAgg *simplejson.Json, target *QueryBuilder, series *[]*tsdb.TimeSeries, props map[string]string) (error) { +func (rp *ElasticsearchResponseParser) processMetrics(esAgg *simplejson.Json, target *Query, series *[]*tsdb.TimeSeries, props map[string]string) error { for _, v := range target.Metrics { metric := simplejson.NewFromAny(v) if metric.Get("hide").MustBool(false) { @@ -143,7 +143,7 @@ func (rp *ElasticsearchResponseParser) processMetrics(esAgg *simplejson.Json, ta return nil } -func (rp *ElasticsearchResponseParser) nameSeries(seriesList *[]*tsdb.TimeSeries, target *QueryBuilder) { +func (rp *ElasticsearchResponseParser) nameSeries(seriesList *[]*tsdb.TimeSeries, target *Query) { set := make(map[string]string) for _, v := range *seriesList { if metricType, exists := v.Tags["metric"]; exists { @@ -159,8 +159,9 @@ func (rp *ElasticsearchResponseParser) nameSeries(seriesList *[]*tsdb.TimeSeries } -func (rp *ElasticsearchResponseParser) getSeriesName(series *tsdb.TimeSeries, target *QueryBuilder, metricTypeCount int) (string) { - metricName := rp.getMetricName(series.Tags["metric"]) +func (rp *ElasticsearchResponseParser) getSeriesName(series *tsdb.TimeSeries, target *Query, metricTypeCount int) string { + metricType := series.Tags["metric"] + metricName := rp.getMetricName(metricType) delete(series.Tags, "metric") field := "" @@ -172,7 +173,7 @@ func (rp *ElasticsearchResponseParser) getSeriesName(series *tsdb.TimeSeries, ta if target.Alias != "" { var re = regexp.MustCompile(`{{([\s\S]+?)}}`) for _, match := range re.FindAllString(target.Alias, -1) { - group := match[2:len(match)-2] + group := match[2 : len(match)-2] if strings.HasPrefix(group, "term ") { if term, ok := series.Tags["term "]; ok { @@ -193,7 +194,20 @@ func (rp *ElasticsearchResponseParser) getSeriesName(series *tsdb.TimeSeries, ta } } // todo, if field and pipelineAgg - if field != "" { + if field != "" && isPipelineAgg(metricType) { + found := false + for _, targetMetricI := range target.Metrics { + targetMetric := simplejson.NewFromAny(targetMetricI) + if targetMetric.Get("id").MustString() == field { + metricName += " " + describeMetric(targetMetric.Get("type").MustString(), field) + found = true + } + } + if !found { + metricName = "Unset" + } + + } else if field != "" { metricName += " " + field } @@ -241,7 +255,7 @@ func castToNullFloat(j *simplejson.Json) null.Float { return null.NewFloat(0, false) } -func findAgg(target *QueryBuilder, aggId string) (*simplejson.Json, error) { +func findAgg(target *Query, aggId string) (*simplejson.Json, error) { for _, v := range target.BucketAggs { aggDef := simplejson.NewFromAny(v) if aggId == aggDef.Get("id").MustString() { From 4042e4b225ad4b989f3f1ecabb52271547ff2af2 Mon Sep 17 00:00:00 2001 From: wph95 Date: Tue, 27 Mar 2018 02:12:43 +0800 Subject: [PATCH 009/488] fix a terms bug and add test --- pkg/tsdb/elasticsearch/models.go | 2 +- pkg/tsdb/elasticsearch/query.go | 6 +- pkg/tsdb/elasticsearch/query_test.go | 97 ++++++++++++++++++++++++++++ 3 files changed, 102 insertions(+), 3 deletions(-) diff --git a/pkg/tsdb/elasticsearch/models.go b/pkg/tsdb/elasticsearch/models.go index 822df2dd4d1..6ab6fa9f43e 100644 --- a/pkg/tsdb/elasticsearch/models.go +++ b/pkg/tsdb/elasticsearch/models.go @@ -41,7 +41,7 @@ type DateHistogramAgg struct { } type FiltersAgg struct { - Filter map[string]interface{} `json:"filter"` + Filters map[string]interface{} `json:"filters"` } type TermsAggSetting struct { diff --git a/pkg/tsdb/elasticsearch/query.go b/pkg/tsdb/elasticsearch/query.go index 51f1ebb5d7a..c4e30cfcbf4 100644 --- a/pkg/tsdb/elasticsearch/query.go +++ b/pkg/tsdb/elasticsearch/query.go @@ -193,15 +193,17 @@ func (q *Query) getHistogramAgg(model *simplejson.Json) *HistogramAgg { func (q *Query) getFilters(model *simplejson.Json) *FiltersAgg { agg := &FiltersAgg{} + agg.Filters = map[string]interface{}{} settings := simplejson.NewFromAny(model.Get("settings").Interface()) - for filter := range settings.Get("filters").MustArray() { + + for _, filter := range settings.Get("filters").MustArray() { filterJson := simplejson.NewFromAny(filter) query := filterJson.Get("query").MustString("") label := filterJson.Get("label").MustString("") if label == "" { label = query } - agg.Filter[label] = newQueryStringFilter(true, query) + agg.Filters[label] = newQueryStringFilter(true, query) } return agg } diff --git a/pkg/tsdb/elasticsearch/query_test.go b/pkg/tsdb/elasticsearch/query_test.go index 992469175b6..4f7b4d9147e 100644 --- a/pkg/tsdb/elasticsearch/query_test.go +++ b/pkg/tsdb/elasticsearch/query_test.go @@ -325,6 +325,103 @@ func TestElasticSearchQueryBuilder(t *testing.T) { "aggs": {"4":{"aggs":{"2":{"aggs":{"1":{"sum":{"field":"value"}}},"date_histogram":{"extended_bounds":{"max":"","min":""},"field":"timestamp","format":"epoch_millis","interval":"1m","min_doc_count":0}}},"terms":{"field":"name_raw","order":{"_term":"desc"},"size":10}}} }` + testElasticSearchResponse(testElasticsearchModelRequestJSON, expectedElasticsearchQueryJSON) + }) + Convey("Test Filters Aggregates", func() { + testElasticsearchModelRequestJSON := ` + { + "bucketAggs": [ + { + "id": "3", + "settings": { + "filters": [{ + "label": "hello", + "query": "host:\"67.65.185.232\"" + }] + }, + "type": "filters" + }, + { + "field": "time", + "id": "2", + "settings": { + "interval": "auto", + "min_doc_count": 0, + "trimEdges": 0 + }, + "type": "date_histogram" + } + ], + "metrics": [ + { + "pipelineAgg": "select metric", + "field": "bytesSent", + "id": "1", + "meta": {}, + "settings": {}, + "type": "count" + } + ], + "query": "*", + "refId": "A", + "timeField": "time" + }` + + expectedElasticsearchQueryJSON := `{ + "size": 0, + "query": { + "bool": { + "filter": [ + { + "range": { + "time": { + "gte": "", + "lte": "", + "format": "epoch_millis" + } + } + }, + { + "query_string": { + "analyze_wildcard": true, + "query": "*" + } + } + ] + } + }, + "aggs": { + "3": { + "filters": { + "filters": { + "hello": { + "query_string": { + "query": "host:\"67.65.185.232\"", + "analyze_wildcard": true + } + } + } + }, + "aggs": { + "2": { + "date_histogram": { + "interval": "200ms", + "field": "time", + "min_doc_count": 0, + "extended_bounds": { + "min": "", + "max": "" + }, + "format": "epoch_millis" + }, + "aggs": {} + } + } + } + } + } + ` + testElasticSearchResponse(testElasticsearchModelRequestJSON, expectedElasticsearchQueryJSON) }) }) From 06f73321560defb2ac074e4d90af1c94f459943d Mon Sep 17 00:00:00 2001 From: wph95 Date: Wed, 28 Mar 2018 01:42:25 +0800 Subject: [PATCH 010/488] cleanup and add more test --- pkg/tsdb/elasticsearch/elasticsearch_test.go | 121 ++++++++++++ pkg/tsdb/elasticsearch/model_parser.go | 65 ++++++- pkg/tsdb/elasticsearch/models.go | 26 ++- pkg/tsdb/elasticsearch/query.go | 113 +++++------ pkg/tsdb/elasticsearch/query_def.go | 1 - pkg/tsdb/elasticsearch/query_test.go | 186 +------------------ pkg/tsdb/elasticsearch/response_parser.go | 50 +++-- 7 files changed, 274 insertions(+), 288 deletions(-) create mode 100644 pkg/tsdb/elasticsearch/elasticsearch_test.go diff --git a/pkg/tsdb/elasticsearch/elasticsearch_test.go b/pkg/tsdb/elasticsearch/elasticsearch_test.go new file mode 100644 index 00000000000..ad905299166 --- /dev/null +++ b/pkg/tsdb/elasticsearch/elasticsearch_test.go @@ -0,0 +1,121 @@ +package elasticsearch + +import ( + "github.com/grafana/grafana/pkg/components/simplejson" + "time" +) + +var avgWithMovingAvg = Query{ + TimeField: "timestamp", + RawQuery: "(test:query) AND (name:sample)", + Interval: time.Millisecond, + BucketAggs: []*BucketAgg{{ + Field: "timestamp", + ID: "2", + Type: "date_histogram", + Settings: simplejson.NewFromAny(map[string]interface{}{ + "interval": "auto", + "min_doc_count": 0, + "trimEdges": 0, + }), + }}, + Metrics: []*Metric{{ + Field: "value", + ID: "1", + Type: "avg", + Settings: simplejson.NewFromAny(map[string]interface{}{ + "script": map[string]string{ + "inline": "_value * 2", + }, + }), + }, { + Field: "1", + ID: "3", + Type: "moving_avg", + PipelineAggregate: "1", + Settings: simplejson.NewFromAny(map[string]interface{}{ + "minimize": false, + "model": "simple", + "window": 5, + }), + }}, +} + +var wildcardsAndQuotes = Query{ + TimeField: "timestamp", + RawQuery: "scope:$location.leagueconnect.api AND name:*CreateRegistration AND name:\"*.201-responses.rate\"", + Interval: time.Millisecond, + BucketAggs: []*BucketAgg{{ + Field: "timestamp", + ID: "2", + Type: "date_histogram", + Settings: simplejson.NewFromAny(map[string]interface{}{}), + }}, + Metrics: []*Metric{{ + Field: "value", + ID: "1", + Type: "sum", + Settings: simplejson.NewFromAny(map[string]interface{}{}), + }}, +} +var termAggs = Query{ + TimeField: "timestamp", + RawQuery: "(scope:*.hmp.metricsd) AND (name_raw:builtin.general.*_instance_count)", + Interval: time.Millisecond, + BucketAggs: []*BucketAgg{{ + Field: "name_raw", + ID: "4", + Type: "terms", + Settings: simplejson.NewFromAny(map[string]interface{}{ + "order": "desc", + "orderBy": "_term", + "size": "10", + }), + }, { + Field: "timestamp", + ID: "2", + Type: "date_histogram", + Settings: simplejson.NewFromAny(map[string]interface{}{ + "interval": "auto", + "min_doc_count": 0, + "trimEdges": 0, + }), + }}, + Metrics: []*Metric{{ + Field: "value", + ID: "1", + Type: "sum", + Settings: simplejson.NewFromAny(map[string]interface{}{}), + }}, +} + +var filtersAggs = Query{ + TimeField: "time", + RawQuery: "*", + Interval: time.Millisecond, + BucketAggs: []*BucketAgg{{ + ID: "3", + Type: "filters", + Settings: simplejson.NewFromAny(map[string]interface{}{ + "filters": []interface{}{ + map[string]interface{}{"label": "hello", "query": "host:\"67.65.185.232\""}, + }, + }), + }, { + Field: "timestamp", + ID: "2", + Type: "date_histogram", + Settings: simplejson.NewFromAny(map[string]interface{}{ + "interval": "auto", + "min_doc_count": 0, + "trimEdges": 0, + }), + }}, + Metrics: []*Metric{{ + Field: "bytesSent", + ID: "1", + Type: "count", + PipelineAggregate: "select metric", + Settings: simplejson.NewFromAny(map[string]interface{}{}), + }}, +} diff --git a/pkg/tsdb/elasticsearch/model_parser.go b/pkg/tsdb/elasticsearch/model_parser.go index 0d016dc58a5..5d94aebef1a 100644 --- a/pkg/tsdb/elasticsearch/model_parser.go +++ b/pkg/tsdb/elasticsearch/model_parser.go @@ -20,9 +20,15 @@ func (qp *ElasticSearchQueryParser) Parse(model *simplejson.Json, dsInfo *models if err != nil { return nil, err } - rawQuery := model.Get("query").MustString("") - bucketAggs := model.Get("bucketAggs").MustArray() - metrics := model.Get("metrics").MustArray() + rawQuery := model.Get("query").MustString() + bucketAggs, err := qp.parseBucketAggs(model) + if err != nil { + return nil, err + } + metrics, err := qp.parseMetrics(model) + if err != nil { + return nil, err + } alias := model.Get("alias").MustString("") parsedInterval, err := tsdb.GetIntervalFrom(dsInfo, model, time.Millisecond) if err != nil { @@ -37,6 +43,57 @@ func (qp *ElasticSearchQueryParser) Parse(model *simplejson.Json, dsInfo *models parsedInterval}, nil } +func (qp *ElasticSearchQueryParser) parseBucketAggs(model *simplejson.Json) ([]*BucketAgg, error) { + var err error + var result []*BucketAgg + for _, t := range model.Get("bucketAggs").MustArray() { + aggJson := simplejson.NewFromAny(t) + agg := &BucketAgg{} + + agg.Type, err = aggJson.Get("type").String() + if err != nil { + return nil, err + } + + agg.ID, err = aggJson.Get("id").String() + if err != nil { + return nil, err + } + + agg.Field = aggJson.Get("field").MustString() + agg.Settings = simplejson.NewFromAny(aggJson.Get("settings").MustMap()) + + result = append(result, agg) + } + return result, nil +} + +func (qp *ElasticSearchQueryParser) parseMetrics(model *simplejson.Json) ([]*Metric, error) { + var err error + var result []*Metric + for _, t := range model.Get("metrics").MustArray() { + metricJson := simplejson.NewFromAny(t) + metric := &Metric{} + + metric.Field = metricJson.Get("field").MustString() + metric.Hide = metricJson.Get("hide").MustBool(false) + metric.ID, err = metricJson.Get("id").String() + if err != nil { + return nil, err + } + + metric.PipelineAggregate = metricJson.Get("pipelineAgg").MustString() + metric.Settings = simplejson.NewFromAny(metricJson.Get("settings").MustMap()) + + metric.Type, err = metricJson.Get("type").String() + if err != nil { + return nil, err + } + + result = append(result, metric) + } + return result, nil +} func getRequestHeader(timeRange *tsdb.TimeRange, dsInfo *models.DataSource) *QueryHeader { var header QueryHeader esVersion := dsInfo.JsonData.Get("esVersion").MustInt() @@ -47,7 +104,7 @@ func getRequestHeader(timeRange *tsdb.TimeRange, dsInfo *models.DataSource) *Que } header.SearchType = searchType header.IgnoreUnavailable = true - header.Index = getIndexList(dsInfo.Database, dsInfo.JsonData.Get("interval").MustString(""), timeRange) + header.Index = getIndexList(dsInfo.Database, dsInfo.JsonData.Get("interval").MustString(), timeRange) if esVersion >= 56 { header.MaxConcurrentShardRequests = dsInfo.JsonData.Get("maxConcurrentShardRequests").MustInt() diff --git a/pkg/tsdb/elasticsearch/models.go b/pkg/tsdb/elasticsearch/models.go index 6ab6fa9f43e..9cf295cbd0e 100644 --- a/pkg/tsdb/elasticsearch/models.go +++ b/pkg/tsdb/elasticsearch/models.go @@ -7,6 +7,22 @@ import ( "github.com/grafana/grafana/pkg/components/simplejson" ) +type BucketAgg struct { + Field string `json:"field"` + ID string `json:"id"` + Settings *simplejson.Json `json:"settings"` + Type string `jsons:"type"` +} + +type Metric struct { + Field string `json:"field"` + Hide bool `json:"hide"` + ID string `json:"id"` + PipelineAggregate string `json:"pipelineAgg"` + Settings *simplejson.Json `json:"settings"` + Type string `json:"type"` +} + type QueryHeader struct { SearchType string `json:"search_type"` IgnoreUnavailable bool `json:"ignore_unavailable"` @@ -44,16 +60,16 @@ type FiltersAgg struct { Filters map[string]interface{} `json:"filters"` } -type TermsAggSetting struct { +type TermsAgg struct { Field string `json:"field"` Size int `json:"size"` Order map[string]interface{} `json:"order"` Missing string `json:"missing,omitempty"` } -type TermsAgg struct { - Terms TermsAggSetting `json:"terms"` - Aggs Aggs `json:"aggs"` +type TermsAggWrap struct { + Terms TermsAgg `json:"terms"` + Aggs Aggs `json:"aggs"` } type ExtendedBounds struct { @@ -91,8 +107,6 @@ type BoolQuery struct { Filter []interface{} `json:"filter"` } -type Metric map[string]interface{} - type Responses struct { Responses []Response `json:"responses"` } diff --git a/pkg/tsdb/elasticsearch/query.go b/pkg/tsdb/elasticsearch/query.go index c4e30cfcbf4..a63529df2df 100644 --- a/pkg/tsdb/elasticsearch/query.go +++ b/pkg/tsdb/elasticsearch/query.go @@ -18,11 +18,11 @@ var rangeFilterSetting = RangeFilterSetting{Gte: "$timeFrom", Format: "epoch_millis"} type Query struct { - TimeField string `json:"timeField"` - RawQuery string `json:"query"` - BucketAggs []interface{} `json:"bucketAggs"` - Metrics []interface{} `json:"metrics"` - Alias string `json:"Alias"` + TimeField string `json:"timeField"` + RawQuery string `json:"query"` + BucketAggs []*BucketAgg `json:"bucketAggs"` + Metrics []*Metric `json:"metrics"` + Alias string `json:"Alias"` Interval time.Duration } @@ -73,27 +73,17 @@ func (q *Query) renderReqQuery(req *Request) { func (q *Query) parseAggs(req *Request) error { aggs := make(Aggs) nestedAggs := aggs - for _, aggRaw := range q.BucketAggs { + for _, agg := range q.BucketAggs { esAggs := make(Aggs) - aggJson := simplejson.NewFromAny(aggRaw) - aggType, err := aggJson.Get("type").String() - if err != nil { - return err - } - id, err := aggJson.Get("id").String() - if err != nil { - return err - } - - switch aggType { + switch agg.Type { case "date_histogram": - esAggs["date_histogram"] = q.getDateHistogramAgg(aggJson) + esAggs["date_histogram"] = q.getDateHistogramAgg(agg) case "histogram": - esAggs["histogram"] = q.getHistogramAgg(aggJson) + esAggs["histogram"] = q.getHistogramAgg(agg) case "filters": - esAggs["filters"] = q.getFilters(aggJson) + esAggs["filters"] = q.getFilters(agg) case "terms": - terms := q.getTerms(aggJson) + terms := q.getTerms(agg) esAggs["terms"] = terms.Terms esAggs["aggs"] = terms.Aggs case "geohash_grid": @@ -105,59 +95,47 @@ func (q *Query) parseAggs(req *Request) error { } if aggs, ok := (nestedAggs["aggs"]).(Aggs); ok { - aggs[id] = esAggs + aggs[agg.ID] = esAggs } nestedAggs = esAggs } nestedAggs["aggs"] = make(Aggs) - for _, metricRaw := range q.Metrics { - metric := make(Metric) - metricJson := simplejson.NewFromAny(metricRaw) + for _, metric := range q.Metrics { + subAgg := make(Aggs) - id, err := metricJson.Get("id").String() - if err != nil { - return err - } - metricType, err := metricJson.Get("type").String() - if err != nil { - return err - } - if metricType == "count" { + if metric.Type == "count" { continue } + settings := metric.Settings.MustMap(make(map[string]interface{})) - settings := metricJson.Get("settings").MustMap(map[string]interface{}{}) - - if isPipelineAgg(metricType) { - pipelineAgg := metricJson.Get("pipelineAgg").MustString("") - if _, err := strconv.Atoi(pipelineAgg); err == nil { - settings["buckets_path"] = pipelineAgg + if isPipelineAgg(metric.Type) { + if _, err := strconv.Atoi(metric.PipelineAggregate); err == nil { + settings["buckets_path"] = metric.PipelineAggregate } else { continue } } else { - settings["field"] = metricJson.Get("field").MustString() + settings["field"] = metric.Field } - metric[metricType] = settings - nestedAggs["aggs"].(Aggs)[id] = metric + subAgg[metric.Type] = settings + nestedAggs["aggs"].(Aggs)[metric.ID] = subAgg } req.Aggs = aggs["aggs"].(Aggs) return nil } -func (q *Query) getDateHistogramAgg(model *simplejson.Json) *DateHistogramAgg { +func (q *Query) getDateHistogramAgg(target *BucketAgg) *DateHistogramAgg { agg := &DateHistogramAgg{} - settings := simplejson.NewFromAny(model.Get("settings").Interface()) - interval, err := settings.Get("interval").String() + interval, err := target.Settings.Get("interval").String() if err == nil { agg.Interval = interval } agg.Field = q.TimeField - agg.MinDocCount = settings.Get("min_doc_count").MustInt(0) + agg.MinDocCount = target.Settings.Get("min_doc_count").MustInt(0) agg.ExtendedBounds = ExtendedBounds{"$timeFrom", "$timeTo"} agg.Format = "epoch_millis" @@ -165,66 +143,63 @@ func (q *Query) getDateHistogramAgg(model *simplejson.Json) *DateHistogramAgg { agg.Interval = "$__interval" } - missing, err := settings.Get("missing").String() + missing, err := target.Settings.Get("missing").String() if err == nil { agg.Missing = missing } return agg } -func (q *Query) getHistogramAgg(model *simplejson.Json) *HistogramAgg { +func (q *Query) getHistogramAgg(target *BucketAgg) *HistogramAgg { agg := &HistogramAgg{} - settings := simplejson.NewFromAny(model.Get("settings").Interface()) - interval, err := settings.Get("interval").String() + interval, err := target.Settings.Get("interval").String() if err == nil { agg.Interval = interval } - field, err := model.Get("field").String() - if err == nil { - agg.Field = field + + if target.Field != "" { + agg.Field = target.Field } - agg.MinDocCount = settings.Get("min_doc_count").MustInt(0) - missing, err := settings.Get("missing").String() + agg.MinDocCount = target.Settings.Get("min_doc_count").MustInt(0) + missing, err := target.Settings.Get("missing").String() if err == nil { agg.Missing = missing } return agg } -func (q *Query) getFilters(model *simplejson.Json) *FiltersAgg { +func (q *Query) getFilters(target *BucketAgg) *FiltersAgg { agg := &FiltersAgg{} agg.Filters = map[string]interface{}{} - settings := simplejson.NewFromAny(model.Get("settings").Interface()) - - for _, filter := range settings.Get("filters").MustArray() { + for _, filter := range target.Settings.Get("filters").MustArray() { filterJson := simplejson.NewFromAny(filter) query := filterJson.Get("query").MustString("") label := filterJson.Get("label").MustString("") if label == "" { label = query } + agg.Filters[label] = newQueryStringFilter(true, query) } return agg } -func (q *Query) getTerms(model *simplejson.Json) *TermsAgg { - agg := &TermsAgg{Aggs: make(Aggs)} - settings := simplejson.NewFromAny(model.Get("settings").Interface()) - agg.Terms.Field = model.Get("field").MustString() - if settings == nil { +func (q *Query) getTerms(target *BucketAgg) *TermsAggWrap { + agg := &TermsAggWrap{Aggs: make(Aggs)} + agg.Terms.Field = target.Field + if len(target.Settings.MustMap()) == 0 { return agg } - sizeStr := settings.Get("size").MustString("") + sizeStr := target.Settings.Get("size").MustString("") size, err := strconv.Atoi(sizeStr) if err != nil { size = 500 } agg.Terms.Size = size - orderBy, err := settings.Get("orderBy").String() + orderBy, err := target.Settings.Get("orderBy").String() if err == nil { agg.Terms.Order = make(map[string]interface{}) - agg.Terms.Order[orderBy] = settings.Get("order").MustString("") + agg.Terms.Order[orderBy] = target.Settings.Get("order").MustString("") if _, err := strconv.Atoi(orderBy); err != nil { for _, metricI := range q.Metrics { metric := simplejson.NewFromAny(metricI) @@ -242,7 +217,7 @@ func (q *Query) getTerms(model *simplejson.Json) *TermsAgg { } } - missing, err := settings.Get("missing").String() + missing, err := target.Settings.Get("missing").String() if err == nil { agg.Terms.Missing = missing } diff --git a/pkg/tsdb/elasticsearch/query_def.go b/pkg/tsdb/elasticsearch/query_def.go index 6f78f02f346..128e752d97a 100644 --- a/pkg/tsdb/elasticsearch/query_def.go +++ b/pkg/tsdb/elasticsearch/query_def.go @@ -40,5 +40,4 @@ func isPipelineAgg(metricType string) bool { func describeMetric(metricType, field string) string { text := metricAggType[metricType] return text + " " + field - } diff --git a/pkg/tsdb/elasticsearch/query_test.go b/pkg/tsdb/elasticsearch/query_test.go index 4f7b4d9147e..aecca9f4734 100644 --- a/pkg/tsdb/elasticsearch/query_test.go +++ b/pkg/tsdb/elasticsearch/query_test.go @@ -13,13 +13,8 @@ import ( "testing" ) -func testElasticSearchResponse(requestJSON string, expectedElasticSearchRequestJSON string) { +func testElasticSearchResponse(query Query, expectedElasticSearchRequestJSON string) { var queryExpectedJSONInterface, queryJSONInterface interface{} - parser := ElasticSearchQueryParser{} - model := &Query{} - - err := json.Unmarshal([]byte(requestJSON), model) - So(err, ShouldBeNil) jsonDate, _ := simplejson.NewJson([]byte(`{"esVersion":2}`)) dsInfo := &models.DataSource{ Database: "grafana-test", @@ -28,10 +23,8 @@ func testElasticSearchResponse(requestJSON string, expectedElasticSearchRequestJ testTimeRange := tsdb.NewTimeRange("5m", "now") - req, _ := simplejson.NewJson([]byte(requestJSON)) - query, err := parser.Parse(req, dsInfo) s, err := query.Build(&tsdb.TsdbQuery{TimeRange: testTimeRange}, dsInfo) - + So(err, ShouldBeNil) queryJSON := strings.Split(s, "\n")[1] err = json.Unmarshal([]byte(queryJSON), &queryJSONInterface) So(err, ShouldBeNil) @@ -62,53 +55,6 @@ func testElasticSearchResponse(requestJSON string, expectedElasticSearchRequestJ func TestElasticSearchQueryBuilder(t *testing.T) { Convey("Elasticsearch QueryBuilder query testing", t, func() { Convey("Build test average metric with moving average", func() { - var testElasticsearchModelRequestJSON = ` - { - "bucketAggs": [ - { - "field": "timestamp", - "id": "2", - "settings": { - "interval": "auto", - "min_doc_count": 0, - "trimEdges": 0 - }, - "type": "date_histogram" - } - ], - "dsType": "elasticsearch", - "metrics": [ - { - "field": "value", - "id": "1", - "inlineScript": "_value * 2", - "meta": {}, - "settings": { - "script": { - "inline": "_value * 2" - } - }, - "type": "avg" - }, - { - "field": "1", - "id": "3", - "meta": {}, - "pipelineAgg": "1", - "settings": { - "minimize": false, - "model": "simple", - "window": 5 - }, - "type": "moving_avg" - } - ], - "query": "(test:query) AND (name:sample)", - "refId": "A", - "timeField": "timestamp" - } - ` - var expectedElasticsearchQueryJSON = ` { "size": 0, @@ -167,32 +113,9 @@ func TestElasticSearchQueryBuilder(t *testing.T) { } }` - testElasticSearchResponse(testElasticsearchModelRequestJSON, expectedElasticsearchQueryJSON) + testElasticSearchResponse(avgWithMovingAvg, expectedElasticsearchQueryJSON) }) Convey("Test Wildcards and Quotes", func() { - testElasticsearchModelRequestJSON := ` - { - "alias": "New", - "bucketAggs": [ - { - "field": "timestamp", - "id": "2", - "type": "date_histogram" - } - ], - "dsType": "elasticsearch", - "metrics": [ - { - "type": "sum", - "field": "value", - "id": "1" - } - ], - "query": "scope:$location.leagueconnect.api AND name:*CreateRegistration AND name:\"*.201-responses.rate\"", - "refId": "A", - "timeField": "timestamp" - }` - expectedElasticsearchQueryJSON := ` { "size": 0, @@ -239,65 +162,9 @@ func TestElasticSearchQueryBuilder(t *testing.T) { } }` - testElasticSearchResponse(testElasticsearchModelRequestJSON, expectedElasticsearchQueryJSON) + testElasticSearchResponse(wildcardsAndQuotes, expectedElasticsearchQueryJSON) }) Convey("Test Term Aggregates", func() { - testElasticsearchModelRequestJSON := ` - { - "bucketAggs": [{ - "field": "name_raw", - "id": "4", - "settings": { - "order": "desc", - "orderBy": "_term", - "size": "10" - }, - "type": "terms" - }, { - "field": "timestamp", - "id": "2", - "settings": { - "interval": "1m", - "min_doc_count": 0, - "trimEdges": 0 - }, - "type": "date_histogram" - }], - "dsType": "elasticsearch", - "filters": [{ - "boolOp": "AND", - "not": false, - "type": "rfc190Scope", - "value": "*.hmp.metricsd" - }, { - "boolOp": "AND", - "not": false, - "type": "name_raw", - "value": "builtin.general.*_instance_count" - }], - "metricObject": {}, - "metrics": [{ - "field": "value", - "id": "1", - "meta": {}, - "options": {}, - "settings": {}, - "type": "sum" - }], - "mode": 0, - "numToGraph": 10, - "prependHostName": false, - "query": "(scope:*.hmp.metricsd) AND (name_raw:builtin.general.*_instance_count)", - "refId": "A", - "regexAlias": false, - "selectedApplication": "", - "selectedHost": "", - "selectedLocation": "", - "timeField": "timestamp", - "useFullHostName": "", - "useQuery": false - }` - expectedElasticsearchQueryJSON := ` { "size": 0, @@ -322,51 +189,12 @@ func TestElasticSearchQueryBuilder(t *testing.T) { ] } }, - "aggs": {"4":{"aggs":{"2":{"aggs":{"1":{"sum":{"field":"value"}}},"date_histogram":{"extended_bounds":{"max":"","min":""},"field":"timestamp","format":"epoch_millis","interval":"1m","min_doc_count":0}}},"terms":{"field":"name_raw","order":{"_term":"desc"},"size":10}}} + "aggs": {"4":{"aggs":{"2":{"aggs":{"1":{"sum":{"field":"value"}}},"date_histogram":{"extended_bounds":{"max":"","min":""},"field":"timestamp","format":"epoch_millis","interval":"200ms","min_doc_count":0}}},"terms":{"field":"name_raw","order":{"_term":"desc"},"size":10}}} }` - testElasticSearchResponse(testElasticsearchModelRequestJSON, expectedElasticsearchQueryJSON) + testElasticSearchResponse(termAggs, expectedElasticsearchQueryJSON) }) Convey("Test Filters Aggregates", func() { - testElasticsearchModelRequestJSON := ` - { - "bucketAggs": [ - { - "id": "3", - "settings": { - "filters": [{ - "label": "hello", - "query": "host:\"67.65.185.232\"" - }] - }, - "type": "filters" - }, - { - "field": "time", - "id": "2", - "settings": { - "interval": "auto", - "min_doc_count": 0, - "trimEdges": 0 - }, - "type": "date_histogram" - } - ], - "metrics": [ - { - "pipelineAgg": "select metric", - "field": "bytesSent", - "id": "1", - "meta": {}, - "settings": {}, - "type": "count" - } - ], - "query": "*", - "refId": "A", - "timeField": "time" - }` - expectedElasticsearchQueryJSON := `{ "size": 0, "query": { @@ -422,7 +250,7 @@ func TestElasticSearchQueryBuilder(t *testing.T) { } ` - testElasticSearchResponse(testElasticsearchModelRequestJSON, expectedElasticsearchQueryJSON) + testElasticSearchResponse(filtersAggs, expectedElasticsearchQueryJSON) }) }) } diff --git a/pkg/tsdb/elasticsearch/response_parser.go b/pkg/tsdb/elasticsearch/response_parser.go index 01b8cb1d235..24d5ebebfc4 100644 --- a/pkg/tsdb/elasticsearch/response_parser.go +++ b/pkg/tsdb/elasticsearch/response_parser.go @@ -40,27 +40,26 @@ func (rp *ElasticsearchResponseParser) processBuckets(aggs map[string]interface{ } if depth == maxDepth { - if aggDef.Get("type").MustString() == "date_histogram" { + if aggDef.Type == "date_histogram" { err = rp.processMetrics(esAgg, target, series, props) if err != nil { return err } } else { - return fmt.Errorf("not support type:%s", aggDef.Get("type").MustString()) + return fmt.Errorf("not support type:%s", aggDef.Type) } } else { for i, b := range esAgg.Get("buckets").MustArray() { - field := aggDef.Get("field").MustString() bucket := simplejson.NewFromAny(b) newProps := props if key, err := bucket.Get("key").String(); err == nil { - newProps[field] = key + newProps[aggDef.Field] = key } else { props["filter"] = strconv.Itoa(i) } if key, err := bucket.Get("key_as_string").String(); err == nil { - props[field] = key + props[aggDef.Field] = key } rp.processBuckets(bucket.MustMap(), target, series, newProps, depth+1) } @@ -72,17 +71,12 @@ func (rp *ElasticsearchResponseParser) processBuckets(aggs map[string]interface{ } func (rp *ElasticsearchResponseParser) processMetrics(esAgg *simplejson.Json, target *Query, series *[]*tsdb.TimeSeries, props map[string]string) error { - for _, v := range target.Metrics { - metric := simplejson.NewFromAny(v) - if metric.Get("hide").MustBool(false) { + for _, metric := range target.Metrics { + if metric.Hide { continue } - metricId := metric.Get("id").MustString() - metricField := metric.Get("field").MustString() - metricType := metric.Get("type").MustString() - - switch metricType { + switch metric.Type { case "count": newSeries := tsdb.TimeSeries{} for _, v := range esAgg.Get("buckets").MustArray() { @@ -102,16 +96,16 @@ func (rp *ElasticsearchResponseParser) processMetrics(esAgg *simplejson.Json, ta } firstBucket := simplejson.NewFromAny(buckets[0]) - percentiles := firstBucket.GetPath(metricId, "values").MustMap() + percentiles := firstBucket.GetPath(metric.ID, "values").MustMap() for percentileName := range percentiles { newSeries := tsdb.TimeSeries{} newSeries.Tags = props newSeries.Tags["metric"] = "p" + percentileName - newSeries.Tags["field"] = metricField + newSeries.Tags["field"] = metric.Field for _, v := range buckets { bucket := simplejson.NewFromAny(v) - value := castToNullFloat(bucket.GetPath(metricId, "values", percentileName)) + value := castToNullFloat(bucket.GetPath(metric.ID, "values", percentileName)) key := castToNullFloat(bucket.Get("key")) newSeries.Points = append(newSeries.Points, tsdb.TimePoint{value, key}) } @@ -120,20 +114,20 @@ func (rp *ElasticsearchResponseParser) processMetrics(esAgg *simplejson.Json, ta default: newSeries := tsdb.TimeSeries{} newSeries.Tags = props - newSeries.Tags["metric"] = metricType - newSeries.Tags["field"] = metricField + newSeries.Tags["metric"] = metric.Type + newSeries.Tags["field"] = metric.Field for _, v := range esAgg.Get("buckets").MustArray() { bucket := simplejson.NewFromAny(v) key := castToNullFloat(bucket.Get("key")) - valueObj, err := bucket.Get(metricId).Map() + valueObj, err := bucket.Get(metric.ID).Map() if err != nil { break } var value null.Float if _, ok := valueObj["normalized_value"]; ok { - value = castToNullFloat(bucket.GetPath(metricId, "normalized_value")) + value = castToNullFloat(bucket.GetPath(metric.ID, "normalized_value")) } else { - value = castToNullFloat(bucket.GetPath(metricId, "value")) + value = castToNullFloat(bucket.GetPath(metric.ID, "value")) } newSeries.Points = append(newSeries.Points, tsdb.TimePoint{value, key}) } @@ -196,10 +190,9 @@ func (rp *ElasticsearchResponseParser) getSeriesName(series *tsdb.TimeSeries, ta // todo, if field and pipelineAgg if field != "" && isPipelineAgg(metricType) { found := false - for _, targetMetricI := range target.Metrics { - targetMetric := simplejson.NewFromAny(targetMetricI) - if targetMetric.Get("id").MustString() == field { - metricName += " " + describeMetric(targetMetric.Get("type").MustString(), field) + for _, metric := range target.Metrics { + if metric.ID == field { + metricName += " " + describeMetric(metric.Type, field) found = true } } @@ -255,11 +248,10 @@ func castToNullFloat(j *simplejson.Json) null.Float { return null.NewFloat(0, false) } -func findAgg(target *Query, aggId string) (*simplejson.Json, error) { +func findAgg(target *Query, aggId string) (*BucketAgg, error) { for _, v := range target.BucketAggs { - aggDef := simplejson.NewFromAny(v) - if aggId == aggDef.Get("id").MustString() { - return aggDef, nil + if aggId == v.ID { + return v, nil } } return nil, errors.New("can't found aggDef, aggID:" + aggId) From 4050fce2205f9fdb6c217eae49686730cabd92c7 Mon Sep 17 00:00:00 2001 From: wph95 Date: Wed, 28 Mar 2018 12:35:05 +0800 Subject: [PATCH 011/488] add response_parser test --- pkg/tsdb/elasticsearch/response_parser.go | 9 +- .../elasticsearch/response_parser_test.go | 109 ++++++++++++++++++ 2 files changed, 116 insertions(+), 2 deletions(-) create mode 100644 pkg/tsdb/elasticsearch/response_parser_test.go diff --git a/pkg/tsdb/elasticsearch/response_parser.go b/pkg/tsdb/elasticsearch/response_parser.go index 24d5ebebfc4..ec7d2f9eb08 100644 --- a/pkg/tsdb/elasticsearch/response_parser.go +++ b/pkg/tsdb/elasticsearch/response_parser.go @@ -30,6 +30,7 @@ func (rp *ElasticsearchResponseParser) getTimeSeries() *tsdb.QueryResult { } func (rp *ElasticsearchResponseParser) processBuckets(aggs map[string]interface{}, target *Query, series *[]*tsdb.TimeSeries, props map[string]string, depth int) error { + var err error maxDepth := len(target.BucketAggs) - 1 for aggId, v := range aggs { @@ -113,7 +114,11 @@ func (rp *ElasticsearchResponseParser) processMetrics(esAgg *simplejson.Json, ta } default: newSeries := tsdb.TimeSeries{} - newSeries.Tags = props + newSeries.Tags = map[string]string{} + for k, v := range props { + newSeries.Tags[k] = v + } + newSeries.Tags["metric"] = metric.Type newSeries.Tags["field"] = metric.Field for _, v := range esAgg.Get("buckets").MustArray() { @@ -121,7 +126,7 @@ func (rp *ElasticsearchResponseParser) processMetrics(esAgg *simplejson.Json, ta key := castToNullFloat(bucket.Get("key")) valueObj, err := bucket.Get(metric.ID).Map() if err != nil { - break + continue } var value null.Float if _, ok := valueObj["normalized_value"]; ok { diff --git a/pkg/tsdb/elasticsearch/response_parser_test.go b/pkg/tsdb/elasticsearch/response_parser_test.go new file mode 100644 index 00000000000..c5b877c1925 --- /dev/null +++ b/pkg/tsdb/elasticsearch/response_parser_test.go @@ -0,0 +1,109 @@ +package elasticsearch + +import ( + "encoding/json" + "github.com/grafana/grafana/pkg/tsdb" + . "github.com/smartystreets/goconvey/convey" + "testing" +) + +func testElasticsearchResponse(body string, target Query) *tsdb.QueryResult { + var responses Responses + err := json.Unmarshal([]byte(body), &responses) + So(err, ShouldBeNil) + + responseParser := ElasticsearchResponseParser{responses.Responses, []*Query{&target}} + return responseParser.getTimeSeries() +} + +func TestElasticSearchResponseParser(t *testing.T) { + Convey("Elasticsearch Response query testing", t, func() { + Convey("Build test average metric with moving average", func() { + responses := `{ + "responses": [ + { + "took": 1, + "timed_out": false, + "_shards": { + "total": 5, + "successful": 5, + "skipped": 0, + "failed": 0 + }, + "hits": { + "total": 4500, + "max_score": 0, + "hits": [] + }, + "aggregations": { + "2": { + "buckets": [ + { + "1": { + "value": null + }, + "key_as_string": "1522205880000", + "key": 1522205880000, + "doc_count": 0 + }, + { + "1": { + "value": 10 + }, + "key_as_string": "1522205940000", + "key": 1522205940000, + "doc_count": 300 + }, + { + "1": { + "value": 10 + }, + "3": { + "value": 20 + }, + "key_as_string": "1522206000000", + "key": 1522206000000, + "doc_count": 300 + }, + { + "1": { + "value": 10 + }, + "3": { + "value": 20 + }, + "key_as_string": "1522206060000", + "key": 1522206060000, + "doc_count": 300 + } + ] + } + }, + "status": 200 + } + ] +} +` + res := testElasticsearchResponse(responses, avgWithMovingAvg) + So(len(res.Series), ShouldEqual, 2) + So(res.Series[0].Name, ShouldEqual, "Average value") + So(len(res.Series[0].Points), ShouldEqual, 4) + for i, p := range res.Series[0].Points { + if i == 0 { + So(p[0].Valid, ShouldBeFalse) + } else { + So(p[0].Float64, ShouldEqual, 10) + } + So(p[1].Float64, ShouldEqual, 1522205880000+60000*i) + } + + So(res.Series[1].Name, ShouldEqual, "Moving Average Average 1") + So(len(res.Series[1].Points), ShouldEqual, 2) + + for _, p := range res.Series[1].Points { + So(p[0].Float64, ShouldEqual, 20) + } + + }) + }) +} From 64c16eb912dd469e44744b2070e1fb076b2c3651 Mon Sep 17 00:00:00 2001 From: Marcel Anacker Date: Wed, 4 Apr 2018 15:56:27 +0200 Subject: [PATCH 012/488] Alerting: Fixing mobile notifications in Microsoft Teams --- pkg/services/alerting/notifiers/teams.go | 20 +++++++++----------- 1 file changed, 9 insertions(+), 11 deletions(-) diff --git a/pkg/services/alerting/notifiers/teams.go b/pkg/services/alerting/notifiers/teams.go index 9a9e93dbc47..43d628a4415 100644 --- a/pkg/services/alerting/notifiers/teams.go +++ b/pkg/services/alerting/notifiers/teams.go @@ -41,10 +41,8 @@ func NewTeamsNotifier(model *m.AlertNotification) (alerting.Notifier, error) { type TeamsNotifier struct { NotifierBase - Url string - Recipient string - Mention string - log log.Logger + Url string + log log.Logger } func (this *TeamsNotifier) Notify(evalContext *alerting.EvalContext) error { @@ -75,17 +73,17 @@ func (this *TeamsNotifier) Notify(evalContext *alerting.EvalContext) error { }) } - message := this.Mention + message := "" if evalContext.Rule.State != m.AlertStateOK { //dont add message when going back to alert state ok. - message += " " + evalContext.Rule.Message - } else { - message += " " // summary must not be empty + message = evalContext.Rule.Message } body := map[string]interface{}{ - "@type": "MessageCard", - "@context": "http://schema.org/extensions", - "summary": message, + "@type": "MessageCard", + "@context": "http://schema.org/extensions", + // summary MUST not be empty or the webhook request fails + // summary SHOULD contain some meaningful information, since it is used for mobile notifications + "summary": evalContext.GetNotificationTitle(), "title": evalContext.GetNotificationTitle(), "themeColor": evalContext.GetStateModel().Color, "sections": []map[string]interface{}{ From 2dd40eb4e4b865b3769dae8eef6d0fe0a3feac68 Mon Sep 17 00:00:00 2001 From: Stuart McLean Date: Mon, 14 May 2018 20:50:08 +0200 Subject: [PATCH 013/488] improve alerting api docs sample responses --- docs/sources/http_api/alerting.md | 53 +++++++++++++++++++++---------- 1 file changed, 36 insertions(+), 17 deletions(-) diff --git a/docs/sources/http_api/alerting.md b/docs/sources/http_api/alerting.md index 3860ae490b1..305bfccebe8 100644 --- a/docs/sources/http_api/alerting.md +++ b/docs/sources/http_api/alerting.md @@ -50,19 +50,16 @@ Content-Type: application/json "id": 1, "dashboardId": 1, "panelId": 1, + "panelUId": "ABcdEFghij" + "dashboardSlug": "sensors", "name": "fire place sensor", "message": "Someone is trying to break in through the fire place", "state": "alerting", + "newStateDate": "2018-05-14T05:55:20+02:00", "evalDate": "0001-01-01T00:00:00Z", - "evalData": [ - { - "metric": "fire", - "tags": null, - "value": 5.349999999999999 - } - "newStateDate": "2016-12-25", + "evalData": null, "executionError": "", - "url": "http://grafana.com/dashboard/db/sensors" + "dashboardUri": "http://grafana.com/dashboard/db/sensors" } ] ``` @@ -86,15 +83,37 @@ Authorization: Bearer eyJrIjoiT0tTcG1pUlY2RnVKZTFVaDFsNFZXdE9ZWmNrMkZYbk HTTP/1.1 200 Content-Type: application/json { - "id": 1, - "dashboardId": 1, - "panelId": 1, - "name": "fire place sensor", - "message": "Someone is trying to break in through the fire place", - "state": "alerting", - "newStateDate": "2016-12-25", - "executionError": "", - "url": "http://grafana.com/dashboard/db/sensors" + "Id": 1, + "Version": 0, + "OrgId": 1, + "DashboardId": 55, + "PanelId": 2, + "Name": "my mem alert", + "Message": "", + "Severity": "", + "State": "alerting", + "Handler": 1, + "Silenced": false, + "ExecutionError": " ", + "Frequency": 60, + "EvalData": { + "evalMatches": [ + { + "metric": "mem_usage", + "tags": { + "name": "server.grafana.com" + }, + "value": 98.765 + } + ] + }, + "NewStateDate": "2018-05-14T17:12:45+02:00", + "StateChanges": 3, + "Created": "2018-05-14T17:01:25+02:00", + "Updated": "2018-05-14T17:11:18+02:00", + "Settings": { + ... + } } ``` From 295169b94f79af4d8316b9cc3a193fce0706a772 Mon Sep 17 00:00:00 2001 From: Stuart McLean Date: Mon, 14 May 2018 20:50:44 +0200 Subject: [PATCH 014/488] add useful note to alerting api docs --- docs/sources/http_api/alerting.md | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/docs/sources/http_api/alerting.md b/docs/sources/http_api/alerting.md index 305bfccebe8..3b9eceddd5e 100644 --- a/docs/sources/http_api/alerting.md +++ b/docs/sources/http_api/alerting.md @@ -117,6 +117,13 @@ Content-Type: application/json } ``` +**Important Note**: +"evalMatches" data is cached in the db when and only when the state of the alert changes +(e.g. transitioning from "ok" to "alerting" state). + +If data from one server triggers the alert first and, before that server is seen leaving alerting state, +a second server also enters a state that would trigger the alert, the second server will not be visible in "evalMatches" data. + ## Pause alert `POST /api/alerts/:id/pause` From c09c00a1f5eacf6d06e57d1c4d4fe001861e182e Mon Sep 17 00:00:00 2001 From: Stuart McLean Date: Tue, 15 May 2018 10:06:26 +0200 Subject: [PATCH 015/488] fixes following first code review --- docs/sources/http_api/alerting.md | 60 +++++++++++++------------------ 1 file changed, 25 insertions(+), 35 deletions(-) diff --git a/docs/sources/http_api/alerting.md b/docs/sources/http_api/alerting.md index 3b9eceddd5e..4d52105cf3c 100644 --- a/docs/sources/http_api/alerting.md +++ b/docs/sources/http_api/alerting.md @@ -49,17 +49,17 @@ Content-Type: application/json { "id": 1, "dashboardId": 1, - "panelId": 1, - "panelUId": "ABcdEFghij" + "dashboardUId": "ABcdEFghij" "dashboardSlug": "sensors", + "panelId": 1, "name": "fire place sensor", - "message": "Someone is trying to break in through the fire place", "state": "alerting", + "message": "Someone is trying to break in through the fire place", "newStateDate": "2018-05-14T05:55:20+02:00", "evalDate": "0001-01-01T00:00:00Z", "evalData": null, "executionError": "", - "dashboardUri": "http://grafana.com/dashboard/db/sensors" + "url": "http://grafana.com/dashboard/db/sensors" } ] ``` @@ -83,37 +83,27 @@ Authorization: Bearer eyJrIjoiT0tTcG1pUlY2RnVKZTFVaDFsNFZXdE9ZWmNrMkZYbk HTTP/1.1 200 Content-Type: application/json { - "Id": 1, - "Version": 0, - "OrgId": 1, - "DashboardId": 55, - "PanelId": 2, - "Name": "my mem alert", - "Message": "", - "Severity": "", - "State": "alerting", - "Handler": 1, - "Silenced": false, - "ExecutionError": " ", - "Frequency": 60, - "EvalData": { - "evalMatches": [ - { - "metric": "mem_usage", - "tags": { - "name": "server.grafana.com" - }, - "value": 98.765 - } - ] - }, - "NewStateDate": "2018-05-14T17:12:45+02:00", - "StateChanges": 3, - "Created": "2018-05-14T17:01:25+02:00", - "Updated": "2018-05-14T17:11:18+02:00", - "Settings": { - ... - } + "id": 1, + "dashboardId": 1, + "dashboardUId": "ABcdEFghij" + "dashboardSlug": "sensors", + "panelId": 1, + "name": "fire place sensor", + "state": "alerting", + "message": "Someone is trying to break in through the fire place", + "newStateDate": "2018-05-14T05:55:20+02:00", + "evalDate": "0001-01-01T00:00:00Z", + "evalData": "evalMatches": [ + { + "metric": "movement", + "tags": { + "name": "fireplace_chimney" + }, + "value": 98.765 + } + ], + "executionError": "", + "url": "http://grafana.com/dashboard/db/sensors" } ``` From 567fec402e2fb2147e96c3d54174c3d2563c424b Mon Sep 17 00:00:00 2001 From: Daniel Lee Date: Wed, 16 May 2018 00:18:28 +0200 Subject: [PATCH 016/488] scroll: temporary fix for double scrollbar issue If #11939 is not merged in the patch release, then this is a temporary fix for 5.1.3. It sets overflow to hidden for larger screens and keeps the overflow set to auto for mobiles and tablets. Fixes #11937 --- public/sass/pages/_dashboard.scss | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/public/sass/pages/_dashboard.scss b/public/sass/pages/_dashboard.scss index 471e90ed9cf..fb947f176dd 100644 --- a/public/sass/pages/_dashboard.scss +++ b/public/sass/pages/_dashboard.scss @@ -44,10 +44,18 @@ div.flot-text { padding: $panel-padding; height: calc(100% - 27px); position: relative; + // Fixes scrolling on mobile devices overflow: auto; } +// For larger screens, set back to hidden to avoid double scroll bars +@include media-breakpoint-up(md) { + .panel-content { + overflow: hidden; + } +} + .panel-title-container { min-height: 9px; cursor: move; From fe301142bab237af6b268b6267bfae17320a3019 Mon Sep 17 00:00:00 2001 From: Mitsuhiro Tanda Date: Wed, 25 Apr 2018 18:23:37 +0900 Subject: [PATCH 017/488] allow to add annotation for non editable dashboard --- public/app/plugins/panel/graph/graph.ts | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/public/app/plugins/panel/graph/graph.ts b/public/app/plugins/panel/graph/graph.ts index 2de53b6dce0..41580c1deea 100755 --- a/public/app/plugins/panel/graph/graph.ts +++ b/public/app/plugins/panel/graph/graph.ts @@ -674,7 +674,7 @@ function graphDirective(timeSrv, popoverSrv, contextSrv) { return; } - if ((ranges.ctrlKey || ranges.metaKey) && dashboard.meta.canEdit) { + if ((ranges.ctrlKey || ranges.metaKey) && contextSrv.isEditor) { // Add annotation setTimeout(() => { eventManager.updateTime(ranges.xaxis); @@ -695,7 +695,7 @@ function graphDirective(timeSrv, popoverSrv, contextSrv) { return; } - if ((pos.ctrlKey || pos.metaKey) && dashboard.meta.canEdit) { + if ((pos.ctrlKey || pos.metaKey) && contextSrv.isEditor) { // Skip if range selected (added in "plotselected" event handler) let isRangeSelection = pos.x !== pos.x1; if (!isRangeSelection) { From 87c9c47579bc2c1b3cb1efa8c3d8ef7b424cf502 Mon Sep 17 00:00:00 2001 From: Mitsuhiro Tanda Date: Wed, 16 May 2018 11:58:51 +0900 Subject: [PATCH 018/488] use canMakeEditable --- public/app/plugins/panel/graph/graph.ts | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/public/app/plugins/panel/graph/graph.ts b/public/app/plugins/panel/graph/graph.ts index 41580c1deea..9e4fb42952e 100755 --- a/public/app/plugins/panel/graph/graph.ts +++ b/public/app/plugins/panel/graph/graph.ts @@ -674,7 +674,7 @@ function graphDirective(timeSrv, popoverSrv, contextSrv) { return; } - if ((ranges.ctrlKey || ranges.metaKey) && contextSrv.isEditor) { + if ((ranges.ctrlKey || ranges.metaKey) && (dashboard.meta.canEdit || dashboard.meta.canMakeEditable)) { // Add annotation setTimeout(() => { eventManager.updateTime(ranges.xaxis); @@ -695,7 +695,7 @@ function graphDirective(timeSrv, popoverSrv, contextSrv) { return; } - if ((pos.ctrlKey || pos.metaKey) && contextSrv.isEditor) { + if ((pos.ctrlKey || pos.metaKey) && (dashboard.meta.canEdit || dashboard.meta.canMakeEditable)) { // Skip if range selected (added in "plotselected" event handler) let isRangeSelection = pos.x !== pos.x1; if (!isRangeSelection) { From 50026fad2ec28b850b6e1351f03aa7a792e2eb4d Mon Sep 17 00:00:00 2001 From: Daniel Lee Date: Wed, 16 May 2018 15:07:34 +0200 Subject: [PATCH 019/488] legend: fixes Firefox/baron scroll bug Compensates for Firefox scrollbar calculation error in the baron framework. Offsetwidth and clientwidth are used to find the width of the scrollbar. In the legend these differ by 9px and cause the scroll div to grow by 9px for every refresh. This fix compensates with a negative margin-right in that case. Fixes #11830 --- public/app/plugins/panel/graph/legend.ts | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/public/app/plugins/panel/graph/legend.ts b/public/app/plugins/panel/graph/legend.ts index 6b6c89444dc..af61db396ba 100644 --- a/public/app/plugins/panel/graph/legend.ts +++ b/public/app/plugins/panel/graph/legend.ts @@ -287,6 +287,10 @@ module.directive('graphLegend', function(popoverSrv, $timeout) { destroyScrollbar(); legendScrollbar = baron(scrollbarParams); } + + // #11830 - compensates for Firefox scrollbar calculation error in the baron framework + scroller[0].style.marginRight = '-' + (scroller[0].offsetWidth - scroller[0].clientWidth) + 'px'; + legendScrollbar.scroll(); } From 66c4a04decf14e6bd268baa19775bd74a83afab6 Mon Sep 17 00:00:00 2001 From: Daniel Lee Date: Wed, 16 May 2018 17:46:32 +0200 Subject: [PATCH 020/488] changelog: add note for #11830 --- CHANGELOG.md | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 146520d13a1..57fcbb6401a 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -13,6 +13,10 @@ * **Security**: Fix XSS vulnerabilities in dashboard links [#11813](https://github.com/grafana/grafana/pull/11813) * **Singlestat**: Fix "time of last point" shows local time when dashboard timezone set to UTC [#10338](https://github.com/grafana/grafana/issues/10338) +# 5.1.3 (2018-05-16) + +* **Scroll**: Graph panel / legend texts shifts on the left each time we move scrollbar on firefox [#11830](https://github.com/grafana/grafana/issues/11830) + # 5.1.2 (2018-05-09) * **Database**: Fix MySql migration issue [#11862](https://github.com/grafana/grafana/issues/11862) From 75993971c6a2066dd6c5caca155abaa85ce96bd1 Mon Sep 17 00:00:00 2001 From: Daniel Lee Date: Wed, 16 May 2018 21:48:26 +0200 Subject: [PATCH 021/488] docs: installation pages for 5.1.3 --- docs/sources/installation/debian.md | 6 +++--- docs/sources/installation/rpm.md | 10 +++++----- docs/sources/installation/windows.md | 2 +- 3 files changed, 9 insertions(+), 9 deletions(-) diff --git a/docs/sources/installation/debian.md b/docs/sources/installation/debian.md index f0ad89a8e88..3025b2384df 100644 --- a/docs/sources/installation/debian.md +++ b/docs/sources/installation/debian.md @@ -15,7 +15,7 @@ weight = 1 Description | Download ------------ | ------------- -Stable for Debian-based Linux | [grafana_5.1.2_amd64.deb](https://s3-us-west-2.amazonaws.com/grafana-releases/release/grafana_5.1.2_amd64.deb) +Stable for Debian-based Linux | [grafana_5.1.3_amd64.deb](https://s3-us-west-2.amazonaws.com/grafana-releases/release/grafana_5.1.3_amd64.deb) @@ -27,9 +27,9 @@ installation. ```bash -wget https://s3-us-west-2.amazonaws.com/grafana-releases/release/grafana_5.1.2_amd64.deb +wget https://s3-us-west-2.amazonaws.com/grafana-releases/release/grafana_5.1.3_amd64.deb sudo apt-get install -y adduser libfontconfig -sudo dpkg -i grafana_5.1.2_amd64.deb +sudo dpkg -i grafana_5.1.3_amd64.deb ``` @@ -28,7 +28,7 @@ installation. You can install Grafana using Yum directly. ```bash -$ sudo yum install https://s3-us-west-2.amazonaws.com/grafana-releases/release/grafana-5.1.2-1.x86_64.rpm +$ sudo yum install https://s3-us-west-2.amazonaws.com/grafana-releases/release/grafana-5.1.3-1.x86_64.rpm ``` - - - - - - diff --git a/public/sass/components/_dashboard_grid.scss b/public/sass/components/_dashboard_grid.scss index aec08d72258..0a27df75164 100644 --- a/public/sass/components/_dashboard_grid.scss +++ b/public/sass/components/_dashboard_grid.scss @@ -1,4 +1,4 @@ -@import '~react-grid-layout-grafana/css/styles.css'; +@import '~react-grid-layout/css/styles.css'; @import '~react-resizable/css/styles.css'; .panel-in-fullscreen { @@ -44,11 +44,6 @@ border-right: 2px solid $gray-1; border-bottom: 2px solid $gray-1; } - // temp fix since we use old commit of grid component - // this can be removed when we revert to non fork grid component - .react-grid-item > .react-resizable-handle { - background-image: url('../img/resize-handle-white.svg'); - } } .theme-light { diff --git a/yarn.lock b/yarn.lock index cdd71528baa..f58731040c6 100644 --- a/yarn.lock +++ b/yarn.lock @@ -405,17 +405,17 @@ angular-native-dragdrop@^1.2.2: version "1.2.2" resolved "https://registry.yarnpkg.com/angular-native-dragdrop/-/angular-native-dragdrop-1.2.2.tgz#d646c6b75b131c48073c3f6e36a225b2726d8bae" -angular-route@^1.6.6: - version "1.6.10" - resolved "https://registry.yarnpkg.com/angular-route/-/angular-route-1.6.10.tgz#4247a32eab19495624623e96c1626dfba17ebf21" +angular-route@1.6.6: + version "1.6.6" + resolved "https://registry.yarnpkg.com/angular-route/-/angular-route-1.6.6.tgz#8c11748aa195c717b1b615a7e746442bfc7c61f4" -angular-sanitize@^1.6.6: - version "1.6.10" - resolved "https://registry.yarnpkg.com/angular-sanitize/-/angular-sanitize-1.6.10.tgz#635a362afb2dd040179f17d3a5455962b2c1918f" +angular-sanitize@1.6.6: + version "1.6.6" + resolved "https://registry.yarnpkg.com/angular-sanitize/-/angular-sanitize-1.6.6.tgz#0fd065a19931517fbece66596d325d72b6e06041" -angular@^1.6.6: - version "1.6.10" - resolved "https://registry.yarnpkg.com/angular/-/angular-1.6.10.tgz#eed3080a34d29d0f681ff119b18ce294e3f74826" +angular@1.6.6: + version "1.6.6" + resolved "https://registry.yarnpkg.com/angular/-/angular-1.6.6.tgz#fd5a3cfb437ce382d854ee01120797978527cb64" ansi-align@^2.0.0: version "2.0.0" @@ -8898,22 +8898,22 @@ react-dom@^16.2.0: object-assign "^4.1.1" prop-types "^15.6.0" -"react-draggable@^2.2.6 || ^3.0.3", react-draggable@^3.0.3: +react-draggable@3.x, "react-draggable@^2.2.6 || ^3.0.3": version "3.0.5" resolved "https://registry.yarnpkg.com/react-draggable/-/react-draggable-3.0.5.tgz#c031e0ed4313531f9409d6cd84c8ebcec0ddfe2d" dependencies: classnames "^2.2.5" prop-types "^15.6.0" -react-grid-layout-grafana@0.16.0: - version "0.16.0" - resolved "https://registry.yarnpkg.com/react-grid-layout-grafana/-/react-grid-layout-grafana-0.16.0.tgz#12242153fcd0bb80a26af8e41694bc2fde788b3a" +react-grid-layout@0.16.6: + version "0.16.6" + resolved "https://registry.yarnpkg.com/react-grid-layout/-/react-grid-layout-0.16.6.tgz#9b2407a2b946c2260ebaf66f13b556e1da4efeb2" dependencies: classnames "2.x" lodash.isequal "^4.0.0" prop-types "15.x" - react-draggable "^3.0.3" - react-resizable "^1.7.5" + react-draggable "3.x" + react-resizable "1.x" react-highlight-words@^0.10.0: version "0.10.0" @@ -8973,7 +8973,7 @@ react-reconciler@^0.7.0: object-assign "^4.1.1" prop-types "^15.6.0" -react-resizable@^1.7.5: +react-resizable@1.x: version "1.7.5" resolved "https://registry.yarnpkg.com/react-resizable/-/react-resizable-1.7.5.tgz#83eb75bb3684da6989bbbf4f826e1470f0af902e" dependencies: From a1e6c31ec12a43f7cd7605031e9cbce7c2c667d6 Mon Sep 17 00:00:00 2001 From: bergquist Date: Tue, 29 May 2018 14:00:46 +0200 Subject: [PATCH 104/488] devenv: script for setting up default datasources --- .../bulk-testing/bulk-dashboards.yaml | 9 ++ devenv/dashboards/generate-bulk-dashboards.sh | 15 ---- devenv/datasources/default/default.yaml | 82 +++++++++++++++++++ devenv/setup.sh | 61 ++++++++++++++ 4 files changed, 152 insertions(+), 15 deletions(-) create mode 100644 devenv/dashboards/bulk-testing/bulk-dashboards.yaml delete mode 100755 devenv/dashboards/generate-bulk-dashboards.sh create mode 100644 devenv/datasources/default/default.yaml create mode 100755 devenv/setup.sh diff --git a/devenv/dashboards/bulk-testing/bulk-dashboards.yaml b/devenv/dashboards/bulk-testing/bulk-dashboards.yaml new file mode 100644 index 00000000000..7838e4bc342 --- /dev/null +++ b/devenv/dashboards/bulk-testing/bulk-dashboards.yaml @@ -0,0 +1,9 @@ +apiVersion: 1 + +providers: + - name: 'Bulk dashboards' + folder: 'Bulk dashboards' + type: file + options: + path: /home/carl/go/src/github.com/grafana/grafana/devenv/dashboards/bulk-testing + diff --git a/devenv/dashboards/generate-bulk-dashboards.sh b/devenv/dashboards/generate-bulk-dashboards.sh deleted file mode 100755 index 079a5a9c520..00000000000 --- a/devenv/dashboards/generate-bulk-dashboards.sh +++ /dev/null @@ -1,15 +0,0 @@ -#/bin/bash - -if ! type "jsonnet" > /dev/null; then - echo "you need you install jsonnet to run this script" - echo "follow the instructions on https://github.com/google/jsonnet" - exit 1 -fi - -COUNTER=0 -MAX=400 -while [ $COUNTER -lt $MAX ]; do - jsonnet -o "bulk-testing/dashboard${COUNTER}.json" -e "local bulkDash = import 'bulk-testing/bulkdash.jsonnet'; bulkDash + { uid: 'uid-${COUNTER}', title: 'title-${COUNTER}' }" - let COUNTER=COUNTER+1 -done - diff --git a/devenv/datasources/default/default.yaml b/devenv/datasources/default/default.yaml new file mode 100644 index 00000000000..b721c093f3a --- /dev/null +++ b/devenv/datasources/default/default.yaml @@ -0,0 +1,82 @@ +apiVersion: 1 + +datasources: + - name: Graphite + type: graphite + access: proxy + url: http://localhost:8080 + jsonData: + graphiteVersion: "1.1" + + - name: Prometheus + type: prometheus + access: proxy + isDefault: true + url: http://localhost:9090 + + - name: InfluxDB + type: influxdb + access: proxy + database: site + user: grafana + password: grafana + url: http://localhost:8086 + jsonData: + timeInterval: "15s" + + - name: OpenTsdb + type: opentsdb + access: proxy + url: http://localhost:4242 + jsonData: + tsdbResolution: 1 + tsdbVersion: 1 + + - name: Elastic + type: elasticsearch + access: proxy + database: "[metrics-]YYYY.MM.DD" + url: http://localhost:9200 + jsonData: + interval: Daily + timeField: "@timestamp" + + - name: MySQL + type: mysql + url: localhost:3306 + database: grafana + user: grafana + password: password + + - name: MSSQL + type: mssql + url: localhost:1433 + database: grafana + user: grafana + password: "Password!" + + - name: Postgres + type: postgres + url: localhost:5432 + database: grafana + user: grafana + password: password + jsonData: + sslmode: "disable" + + - name: Cloudwatch + type: cloudwatch + editable: true + jsonData: + authType: credentials + defaultRegion: eu-west-2 + + - name: Cloudwatch keys + type: cloudwatch + editable: true + jsonData: + authType: keys + defaultRegion: eu-west-2 + secureJsonData: + accessKey: AKIAJL347VWN6MK63N2A + secretKey: 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 diff --git a/devenv/setup.sh b/devenv/setup.sh new file mode 100755 index 00000000000..d6f8f969e75 --- /dev/null +++ b/devenv/setup.sh @@ -0,0 +1,61 @@ +#/bin/bash + +bulkDashboard() { + + requiresJsonnet + + COUNTER=0 + MAX=400 + while [ $COUNTER -lt $MAX ]; do + jsonnet -o "dashboards/bulk-testing/dashboard${COUNTER}.json" -e "local bulkDash = import 'dashboards/bulk-testing/bulkdash.jsonnet'; bulkDash + { uid: 'uid-${COUNTER}', title: 'title-${COUNTER}' }" + let COUNTER=COUNTER+1 + done + + ln -s -f -r ./dashboards/bulk-testing/bulk-dashboards.yaml ../conf/provisioning/dashboards/custom.yaml +} + +requiresJsonnet() { + if ! type "jsonnet" > /dev/null; then + echo "you need you install jsonnet to run this script" + echo "follow the instructions on https://github.com/google/jsonnet" + exit 1 + fi +} + +defaultDashboards() { + echo "not implemented yet" +} + +defaultDatasources() { + echo "setting up all default datasources using provisioning" + + ln -s -f -r ./datasources/default/default.yaml ../conf/provisioning/datasources/custom.yaml +} + +usage() { + echo -e "install.sh\n\tThis script installs my basic setup for a debian laptop\n" + echo "Usage:" + echo " bulk-dashboards - create and provisioning 400 dashboards" + echo " default-datasources - provisiong all core datasources" +} + +main() { + local cmd=$1 + + if [[ -z "$cmd" ]]; then + usage + exit 1 + fi + + if [[ $cmd == "bulk-dashboards" ]]; then + bulkDashboard + elif [[ $cmd == "default-datasources" ]]; then + defaultDatasources + elif [[ $cmd == "default-dashboards" ]]; then + bulkDashboard + else + usage + fi +} + +main "$@" \ No newline at end of file From be34417b3aa85c5eddfcff044ecd3df38c56c905 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Torkel=20=C3=96degaard?= Date: Tue, 29 May 2018 14:02:52 +0200 Subject: [PATCH 105/488] fix: refactoring PR #11996 and fixing issue #11551 16706hashkey in json editors --- public/app/features/annotations/editor_ctrl.ts | 4 ++++ public/app/features/annotations/partials/editor.html | 4 ++-- public/app/features/dashboard/save_provisioned_modal.ts | 2 +- public/app/features/dashboard/settings/settings.ts | 3 ++- 4 files changed, 9 insertions(+), 4 deletions(-) diff --git a/public/app/features/annotations/editor_ctrl.ts b/public/app/features/annotations/editor_ctrl.ts index 169e2e4c2bb..34b9635ec85 100644 --- a/public/app/features/annotations/editor_ctrl.ts +++ b/public/app/features/annotations/editor_ctrl.ts @@ -70,6 +70,10 @@ export class AnnotationsEditorCtrl { this.mode = 'list'; } + move(index, dir) { + _.move(this.annotations, index, index + dir); + } + add() { this.annotations.push(this.currentAnnotation); this.reset(); diff --git a/public/app/features/annotations/partials/editor.html b/public/app/features/annotations/partials/editor.html index e1410ad0fea..65ee7e52bd0 100644 --- a/public/app/features/annotations/partials/editor.html +++ b/public/app/features/annotations/partials/editor.html @@ -33,8 +33,8 @@ {{annotation.datasource || 'Default'}} - - + + diff --git a/public/app/features/dashboard/save_provisioned_modal.ts b/public/app/features/dashboard/save_provisioned_modal.ts index ba96ce0b0b9..3f2dcd0f57b 100644 --- a/public/app/features/dashboard/save_provisioned_modal.ts +++ b/public/app/features/dashboard/save_provisioned_modal.ts @@ -48,7 +48,7 @@ export class SaveProvisionedDashboardModalCtrl { constructor(dashboardSrv) { this.dash = dashboardSrv.getCurrent().getSaveModelClone(); delete this.dash.id; - this.dashboardJson = JSON.stringify(this.dash, null, 2); + this.dashboardJson = angular.toJson(this.dash, true); } save() { diff --git a/public/app/features/dashboard/settings/settings.ts b/public/app/features/dashboard/settings/settings.ts index 5acbbcf29c5..457cac5af72 100755 --- a/public/app/features/dashboard/settings/settings.ts +++ b/public/app/features/dashboard/settings/settings.ts @@ -2,6 +2,7 @@ import { coreModule, appEvents, contextSrv } from 'app/core/core'; import { DashboardModel } from '../dashboard_model'; import $ from 'jquery'; import _ from 'lodash'; +import angular from 'angular'; import config from 'app/core/config'; export class SettingsCtrl { @@ -118,7 +119,7 @@ export class SettingsCtrl { this.viewId = this.$location.search().editview; if (this.viewId) { - this.json = JSON.stringify(this.dashboard.getSaveModelClone(), null, 2); + this.json = angular.toJson(this.dashboard.getSaveModelClone(), true); } if (this.viewId === 'settings' && this.dashboard.meta.canMakeEditable) { From 4c9b146bda91ad3a37923c3dcd478109553cd3fb Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Torkel=20=C3=96degaard?= Date: Tue, 29 May 2018 14:11:05 +0200 Subject: [PATCH 106/488] PR: minor change to PR #12004 before merge --- public/sass/components/_panel_singlestat.scss | 2 -- 1 file changed, 2 deletions(-) diff --git a/public/sass/components/_panel_singlestat.scss b/public/sass/components/_panel_singlestat.scss index faaa6fc2447..af11de3b835 100644 --- a/public/sass/components/_panel_singlestat.scss +++ b/public/sass/components/_panel_singlestat.scss @@ -7,7 +7,6 @@ .singlestat-panel-value-container { line-height: 1; - display: table-cell; position: absolute; z-index: 1; font-size: 3em; @@ -16,7 +15,6 @@ top: 50%; left: 50%; transform: translate(-50%, -50%); - padding-bottom: 10px; } .singlestat-panel-prefix { From 3ba3fd9a598f73ef719f5497f658ab52dd5e909f Mon Sep 17 00:00:00 2001 From: Christophe Le Guern Date: Tue, 29 May 2018 14:26:33 +0200 Subject: [PATCH 107/488] Add new regions to handleGetRegions function (#12082) As public/app/plugins/datasource/cloudwatch/partials/config.html and this file differ between the AWS regions available, I've updated the latest so they share the same data. In that way, the regions() method in dashboards returns the same list as the frontend does. --- pkg/tsdb/cloudwatch/metric_find_query.go | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pkg/tsdb/cloudwatch/metric_find_query.go b/pkg/tsdb/cloudwatch/metric_find_query.go index a7d33645b9b..136ee241c2e 100644 --- a/pkg/tsdb/cloudwatch/metric_find_query.go +++ b/pkg/tsdb/cloudwatch/metric_find_query.go @@ -230,8 +230,8 @@ func parseMultiSelectValue(input string) []string { // Please update the region list in public/app/plugins/datasource/cloudwatch/partials/config.html func (e *CloudWatchExecutor) handleGetRegions(ctx context.Context, parameters *simplejson.Json, queryContext *tsdb.TsdbQuery) ([]suggestData, error) { regions := []string{ - "ap-northeast-1", "ap-northeast-2", "ap-southeast-1", "ap-southeast-2", "ap-south-1", "ca-central-1", "cn-north-1", - "eu-central-1", "eu-west-1", "eu-west-2", "sa-east-1", "us-east-1", "us-east-2", "us-gov-west-1", "us-west-1", "us-west-2", + "ap-northeast-1", "ap-northeast-2", "ap-southeast-1", "ap-southeast-2", "ap-south-1", "ca-central-1", "cn-north-1", "cn-northwest-1", + "eu-central-1", "eu-west-1", "eu-west-2", "eu-west-3", "sa-east-1", "us-east-1", "us-east-2", "us-gov-west-1", "us-west-1", "us-west-2", } result := make([]suggestData, 0) From 79575ea124e07fcd106da646787318f8de1f29a7 Mon Sep 17 00:00:00 2001 From: Marcus Efraimsson Date: Tue, 29 May 2018 14:28:04 +0200 Subject: [PATCH 108/488] changelog: add notes about closing #11494 [skip ci] --- CHANGELOG.md | 1 + 1 file changed, 1 insertion(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index d0d650c1aee..7c16f4f6e5b 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -16,6 +16,7 @@ * **Login**: Add optional option sign out url for generic oauth [#9847](https://github.com/grafana/grafana/issues/9847), thx [@roidelapluie](https://github.com/roidelapluie) * **Login**: Use proxy server from environment variable if available [#9703](https://github.com/grafana/grafana/issues/9703), thx [@iyeonok](https://github.com/iyeonok) * **Invite users**: Friendlier error message when smtp is not configured [#12087](https://github.com/grafana/grafana/issues/12087), thx [@thurt](https://github.com/thurt) +* **Graphite**: Don't send distributed tracing headers when using direct/browser access mode [#11494](https://github.com/grafana/grafana/issues/11494) # 5.1.3 (2018-05-16) From 1411709db1c8ce65fd45906fcbc43c7757256084 Mon Sep 17 00:00:00 2001 From: bergquist Date: Tue, 29 May 2018 14:07:37 +0200 Subject: [PATCH 109/488] provisioning: place testfiles within testdata folder --- .../all-properties/not.yaml.txt => devenv/README.md | 0 devenv/datasources/default/default.yaml | 9 --------- .../provisioning/datasources/config_reader_test.go | 12 ++++++------ .../all-properties/all-properties.yaml | 0 .../all-properties/not.yaml.txt} | 0 .../all-properties/sample.yaml | 0 .../all-properties/second.yaml | 0 .../broken-yaml/broken.yaml | 0 .../broken-yaml/commented.yaml | 0 .../double-default/default-1.yaml | 0 .../double-default/default-2.yaml | 0 .../insert-two-delete-two/one-datasources.yaml | 0 .../insert-two-delete-two/two-datasources.yml | 0 .../two-datasources/two-datasources.yaml | 0 .../version-0/version-0.yaml | 0 .../testdata/zero-datasources/placeholder-for-git | 0 16 files changed, 6 insertions(+), 15 deletions(-) rename pkg/services/provisioning/datasources/test-configs/all-properties/not.yaml.txt => devenv/README.md (100%) rename pkg/services/provisioning/datasources/{test-configs => testdata}/all-properties/all-properties.yaml (100%) rename pkg/services/provisioning/datasources/{test-configs/zero-datasources/placeholder-for-git => testdata/all-properties/not.yaml.txt} (100%) rename pkg/services/provisioning/datasources/{test-configs => testdata}/all-properties/sample.yaml (100%) rename pkg/services/provisioning/datasources/{test-configs => testdata}/all-properties/second.yaml (100%) rename pkg/services/provisioning/datasources/{test-configs => testdata}/broken-yaml/broken.yaml (100%) rename pkg/services/provisioning/datasources/{test-configs => testdata}/broken-yaml/commented.yaml (100%) rename pkg/services/provisioning/datasources/{test-configs => testdata}/double-default/default-1.yaml (100%) rename pkg/services/provisioning/datasources/{test-configs => testdata}/double-default/default-2.yaml (100%) rename pkg/services/provisioning/datasources/{test-configs => testdata}/insert-two-delete-two/one-datasources.yaml (100%) rename pkg/services/provisioning/datasources/{test-configs => testdata}/insert-two-delete-two/two-datasources.yml (100%) rename pkg/services/provisioning/datasources/{test-configs => testdata}/two-datasources/two-datasources.yaml (100%) rename pkg/services/provisioning/datasources/{test-configs => testdata}/version-0/version-0.yaml (100%) create mode 100644 pkg/services/provisioning/datasources/testdata/zero-datasources/placeholder-for-git diff --git a/pkg/services/provisioning/datasources/test-configs/all-properties/not.yaml.txt b/devenv/README.md similarity index 100% rename from pkg/services/provisioning/datasources/test-configs/all-properties/not.yaml.txt rename to devenv/README.md diff --git a/devenv/datasources/default/default.yaml b/devenv/datasources/default/default.yaml index b721c093f3a..dc2310f15aa 100644 --- a/devenv/datasources/default/default.yaml +++ b/devenv/datasources/default/default.yaml @@ -71,12 +71,3 @@ datasources: authType: credentials defaultRegion: eu-west-2 - - name: Cloudwatch keys - type: cloudwatch - editable: true - jsonData: - authType: keys - defaultRegion: eu-west-2 - secureJsonData: - accessKey: AKIAJL347VWN6MK63N2A - secretKey: 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 diff --git a/pkg/services/provisioning/datasources/config_reader_test.go b/pkg/services/provisioning/datasources/config_reader_test.go index 89ecc5a0b68..2e407dbe4de 100644 --- a/pkg/services/provisioning/datasources/config_reader_test.go +++ b/pkg/services/provisioning/datasources/config_reader_test.go @@ -13,12 +13,12 @@ import ( var ( logger log.Logger = log.New("fake.log") - twoDatasourcesConfig = "./test-configs/two-datasources" - twoDatasourcesConfigPurgeOthers = "./test-configs/insert-two-delete-two" - doubleDatasourcesConfig = "./test-configs/double-default" - allProperties = "./test-configs/all-properties" - versionZero = "./test-configs/version-0" - brokenYaml = "./test-configs/broken-yaml" + twoDatasourcesConfig = "testdata/two-datasources" + twoDatasourcesConfigPurgeOthers = "testdata/insert-two-delete-two" + doubleDatasourcesConfig = "testdata/double-default" + allProperties = "testdata/all-properties" + versionZero = "testdata/version-0" + brokenYaml = "testdata/broken-yaml" fakeRepo *fakeRepository ) diff --git a/pkg/services/provisioning/datasources/test-configs/all-properties/all-properties.yaml b/pkg/services/provisioning/datasources/testdata/all-properties/all-properties.yaml similarity index 100% rename from pkg/services/provisioning/datasources/test-configs/all-properties/all-properties.yaml rename to pkg/services/provisioning/datasources/testdata/all-properties/all-properties.yaml diff --git a/pkg/services/provisioning/datasources/test-configs/zero-datasources/placeholder-for-git b/pkg/services/provisioning/datasources/testdata/all-properties/not.yaml.txt similarity index 100% rename from pkg/services/provisioning/datasources/test-configs/zero-datasources/placeholder-for-git rename to pkg/services/provisioning/datasources/testdata/all-properties/not.yaml.txt diff --git a/pkg/services/provisioning/datasources/test-configs/all-properties/sample.yaml b/pkg/services/provisioning/datasources/testdata/all-properties/sample.yaml similarity index 100% rename from pkg/services/provisioning/datasources/test-configs/all-properties/sample.yaml rename to pkg/services/provisioning/datasources/testdata/all-properties/sample.yaml diff --git a/pkg/services/provisioning/datasources/test-configs/all-properties/second.yaml b/pkg/services/provisioning/datasources/testdata/all-properties/second.yaml similarity index 100% rename from pkg/services/provisioning/datasources/test-configs/all-properties/second.yaml rename to pkg/services/provisioning/datasources/testdata/all-properties/second.yaml diff --git a/pkg/services/provisioning/datasources/test-configs/broken-yaml/broken.yaml b/pkg/services/provisioning/datasources/testdata/broken-yaml/broken.yaml similarity index 100% rename from pkg/services/provisioning/datasources/test-configs/broken-yaml/broken.yaml rename to pkg/services/provisioning/datasources/testdata/broken-yaml/broken.yaml diff --git a/pkg/services/provisioning/datasources/test-configs/broken-yaml/commented.yaml b/pkg/services/provisioning/datasources/testdata/broken-yaml/commented.yaml similarity index 100% rename from pkg/services/provisioning/datasources/test-configs/broken-yaml/commented.yaml rename to pkg/services/provisioning/datasources/testdata/broken-yaml/commented.yaml diff --git a/pkg/services/provisioning/datasources/test-configs/double-default/default-1.yaml b/pkg/services/provisioning/datasources/testdata/double-default/default-1.yaml similarity index 100% rename from pkg/services/provisioning/datasources/test-configs/double-default/default-1.yaml rename to pkg/services/provisioning/datasources/testdata/double-default/default-1.yaml diff --git a/pkg/services/provisioning/datasources/test-configs/double-default/default-2.yaml b/pkg/services/provisioning/datasources/testdata/double-default/default-2.yaml similarity index 100% rename from pkg/services/provisioning/datasources/test-configs/double-default/default-2.yaml rename to pkg/services/provisioning/datasources/testdata/double-default/default-2.yaml diff --git a/pkg/services/provisioning/datasources/test-configs/insert-two-delete-two/one-datasources.yaml b/pkg/services/provisioning/datasources/testdata/insert-two-delete-two/one-datasources.yaml similarity index 100% rename from pkg/services/provisioning/datasources/test-configs/insert-two-delete-two/one-datasources.yaml rename to pkg/services/provisioning/datasources/testdata/insert-two-delete-two/one-datasources.yaml diff --git a/pkg/services/provisioning/datasources/test-configs/insert-two-delete-two/two-datasources.yml b/pkg/services/provisioning/datasources/testdata/insert-two-delete-two/two-datasources.yml similarity index 100% rename from pkg/services/provisioning/datasources/test-configs/insert-two-delete-two/two-datasources.yml rename to pkg/services/provisioning/datasources/testdata/insert-two-delete-two/two-datasources.yml diff --git a/pkg/services/provisioning/datasources/test-configs/two-datasources/two-datasources.yaml b/pkg/services/provisioning/datasources/testdata/two-datasources/two-datasources.yaml similarity index 100% rename from pkg/services/provisioning/datasources/test-configs/two-datasources/two-datasources.yaml rename to pkg/services/provisioning/datasources/testdata/two-datasources/two-datasources.yaml diff --git a/pkg/services/provisioning/datasources/test-configs/version-0/version-0.yaml b/pkg/services/provisioning/datasources/testdata/version-0/version-0.yaml similarity index 100% rename from pkg/services/provisioning/datasources/test-configs/version-0/version-0.yaml rename to pkg/services/provisioning/datasources/testdata/version-0/version-0.yaml diff --git a/pkg/services/provisioning/datasources/testdata/zero-datasources/placeholder-for-git b/pkg/services/provisioning/datasources/testdata/zero-datasources/placeholder-for-git new file mode 100644 index 00000000000..e69de29bb2d From b253284accef14e4ad5fa0d89ee55c8837cb5047 Mon Sep 17 00:00:00 2001 From: bergquist Date: Tue, 29 May 2018 16:52:02 +0200 Subject: [PATCH 110/488] devenv: improve readme --- devenv/README.md | 11 +++++++++++ 1 file changed, 11 insertions(+) diff --git a/devenv/README.md b/devenv/README.md index e69de29bb2d..4ec6f672f25 100644 --- a/devenv/README.md +++ b/devenv/README.md @@ -0,0 +1,11 @@ +This folder contains useful scripts and configuration for... + +* Configuring datasources in Grafana +* Provision example dashboards in Grafana +* Run preconfiured datasources as docker containers + +want to know more? run setup! + +```bash +./setup.sh +``` From f32e3a29609ad311595ec7e6b87b6c740d3ec270 Mon Sep 17 00:00:00 2001 From: bergquist Date: Tue, 29 May 2018 17:22:52 +0200 Subject: [PATCH 111/488] changelog: note about closing #11858 --- CHANGELOG.md | 1 + 1 file changed, 1 insertion(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 7c16f4f6e5b..9eb6125492d 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -17,6 +17,7 @@ * **Login**: Use proxy server from environment variable if available [#9703](https://github.com/grafana/grafana/issues/9703), thx [@iyeonok](https://github.com/iyeonok) * **Invite users**: Friendlier error message when smtp is not configured [#12087](https://github.com/grafana/grafana/issues/12087), thx [@thurt](https://github.com/thurt) * **Graphite**: Don't send distributed tracing headers when using direct/browser access mode [#11494](https://github.com/grafana/grafana/issues/11494) +* **Sidenav**: Show create dashboard link for viewers if at least editor in one folder [#11858](https://github.com/grafana/grafana/issues/11858) # 5.1.3 (2018-05-16) From c7acbcdaf5e28092a2be9d44c348d2a767bc7e3b Mon Sep 17 00:00:00 2001 From: bergquist Date: Wed, 30 May 2018 08:46:44 +0200 Subject: [PATCH 112/488] provisioning: enable relative path's this commit enable relatives path for provisioning dashboards. this enables easier dev setups --- .../bulk-testing/bulk-dashboards.yaml | 2 +- .../provisioning/dashboards/file_reader.go | 8 +- .../dashboards/file_reader_test.go | 75 ++++++++++++------- 3 files changed, 55 insertions(+), 30 deletions(-) diff --git a/devenv/dashboards/bulk-testing/bulk-dashboards.yaml b/devenv/dashboards/bulk-testing/bulk-dashboards.yaml index 7838e4bc342..e0ba8a88e68 100644 --- a/devenv/dashboards/bulk-testing/bulk-dashboards.yaml +++ b/devenv/dashboards/bulk-testing/bulk-dashboards.yaml @@ -5,5 +5,5 @@ providers: folder: 'Bulk dashboards' type: file options: - path: /home/carl/go/src/github.com/grafana/grafana/devenv/dashboards/bulk-testing + path: devenv/dashboards/bulk-testing diff --git a/pkg/services/provisioning/dashboards/file_reader.go b/pkg/services/provisioning/dashboards/file_reader.go index e5186e12f06..93846f5c474 100644 --- a/pkg/services/provisioning/dashboards/file_reader.go +++ b/pkg/services/provisioning/dashboards/file_reader.go @@ -47,9 +47,15 @@ func NewDashboardFileReader(cfg *DashboardsAsConfig, log log.Logger) (*fileReade log.Error("Cannot read directory", "error", err) } + absPath, err := filepath.Abs(path) + if err != nil { + log.Error("Could not create absolute path ", "path", path) + absPath = path //if .Abs return an error we fallback to path + } + return &fileReader{ Cfg: cfg, - Path: path, + Path: absPath, log: log, dashboardService: dashboards.NewProvisioningService(), }, nil diff --git a/pkg/services/provisioning/dashboards/file_reader_test.go b/pkg/services/provisioning/dashboards/file_reader_test.go index 084fae1310a..a04fbb23f82 100644 --- a/pkg/services/provisioning/dashboards/file_reader_test.go +++ b/pkg/services/provisioning/dashboards/file_reader_test.go @@ -15,14 +15,57 @@ import ( ) var ( - defaultDashboards = "./testdata/test-dashboards/folder-one" - brokenDashboards = "./testdata/test-dashboards/broken-dashboards" - oneDashboard = "./testdata/test-dashboards/one-dashboard" - containingId = "./testdata/test-dashboards/containing-id" + defaultDashboards = "testdata/test-dashboards/folder-one" + brokenDashboards = "testdata/test-dashboards/broken-dashboards" + oneDashboard = "testdata/test-dashboards/one-dashboard" + containingId = "testdata/test-dashboards/containing-id" fakeService *fakeDashboardProvisioningService ) +func TestCreatingNewDashboardFileReader(t *testing.T) { + Convey("creating new dashboard file reader", t, func() { + cfg := &DashboardsAsConfig{ + Name: "Default", + Type: "file", + OrgId: 1, + Folder: "", + Options: map[string]interface{}{}, + } + + Convey("using path parameter", func() { + cfg.Options["path"] = defaultDashboards + reader, err := NewDashboardFileReader(cfg, log.New("test-logger")) + So(err, ShouldBeNil) + So(reader.Path, ShouldNotEqual, "") + }) + + Convey("using folder as options", func() { + cfg.Options["folder"] = defaultDashboards + reader, err := NewDashboardFileReader(cfg, log.New("test-logger")) + So(err, ShouldBeNil) + So(reader.Path, ShouldNotEqual, "") + }) + + Convey("using full path", func() { + cfg.Options["folder"] = "/var/lib/grafana/dashboards" + reader, err := NewDashboardFileReader(cfg, log.New("test-logger")) + So(err, ShouldBeNil) + + So(reader.Path, ShouldEqual, "/var/lib/grafana/dashboards") + So(filepath.IsAbs(reader.Path), ShouldBeTrue) + }) + + Convey("using relative path", func() { + cfg.Options["folder"] = defaultDashboards + reader, err := NewDashboardFileReader(cfg, log.New("test-logger")) + So(err, ShouldBeNil) + + So(filepath.IsAbs(reader.Path), ShouldBeTrue) + }) + }) +} + func TestDashboardFileReader(t *testing.T) { Convey("Dashboard file reader", t, func() { bus.ClearBusHandlers() @@ -170,30 +213,6 @@ func TestDashboardFileReader(t *testing.T) { }) }) - Convey("Can use bpth path and folder as dashboard path", func() { - cfg := &DashboardsAsConfig{ - Name: "Default", - Type: "file", - OrgId: 1, - Folder: "", - Options: map[string]interface{}{}, - } - - Convey("using path parameter", func() { - cfg.Options["path"] = defaultDashboards - reader, err := NewDashboardFileReader(cfg, log.New("test-logger")) - So(err, ShouldBeNil) - So(reader.Path, ShouldEqual, defaultDashboards) - }) - - Convey("using folder as options", func() { - cfg.Options["folder"] = defaultDashboards - reader, err := NewDashboardFileReader(cfg, log.New("test-logger")) - So(err, ShouldBeNil) - So(reader.Path, ShouldEqual, defaultDashboards) - }) - }) - Reset(func() { dashboards.NewProvisioningService = origNewDashboardProvisioningService }) From 48fc5edda19a7c426d70a494786a46378722a6b2 Mon Sep 17 00:00:00 2001 From: Kim Christensen Date: Wed, 30 May 2018 09:22:16 +0200 Subject: [PATCH 113/488] Support InfluxDB count distinct aggregation (#11658) influxdb: support count distinct aggregation --- pkg/tsdb/influxdb/query_part_test.go | 8 + .../plugins/datasource/influxdb/query_part.ts | 23 +++ .../influxdb/specs/query_part.jest.ts | 144 ++++++++++++++++++ 3 files changed, 175 insertions(+) diff --git a/pkg/tsdb/influxdb/query_part_test.go b/pkg/tsdb/influxdb/query_part_test.go index d23865174c8..cd0863cee9b 100644 --- a/pkg/tsdb/influxdb/query_part_test.go +++ b/pkg/tsdb/influxdb/query_part_test.go @@ -76,5 +76,13 @@ func TestInfluxdbQueryPart(t *testing.T) { res := part.Render(query, queryContext, "mean(value)") So(res, ShouldEqual, `mean(value) AS "test"`) }) + + Convey("render count distinct", func() { + part, err := NewQueryPart("count", []string{}) + So(err, ShouldBeNil) + + res := part.Render(query, queryContext, "distinct(value)") + So(res, ShouldEqual, `count(distinct(value))`) + }) }) } diff --git a/public/app/plugins/datasource/influxdb/query_part.ts b/public/app/plugins/datasource/influxdb/query_part.ts index ce5588abe53..2a2f9f2a4ef 100644 --- a/public/app/plugins/datasource/influxdb/query_part.ts +++ b/public/app/plugins/datasource/influxdb/query_part.ts @@ -44,6 +44,28 @@ function replaceAggregationAddStrategy(selectParts, partModel) { for (var i = 0; i < selectParts.length; i++) { var part = selectParts[i]; if (part.def.category === categories.Aggregations) { + if (part.def.type === partModel.def.type) { + return; + } + // count distinct is allowed + if (part.def.type === 'count' && partModel.def.type === 'distinct') { + break; + } + // remove next aggregation if distinct was replaced + if (part.def.type === 'distinct') { + var morePartsAvailable = selectParts.length >= i + 2; + if (partModel.def.type !== 'count' && morePartsAvailable) { + var nextPart = selectParts[i + 1]; + if (nextPart.def.category === categories.Aggregations) { + selectParts.splice(i + 1, 1); + } + } else if (partModel.def.type === 'count') { + if (!morePartsAvailable || selectParts[i + 1].def.type !== 'count') { + selectParts.splice(i + 1, 0, partModel); + } + return; + } + } selectParts[i] = partModel; return; } @@ -434,4 +456,5 @@ export default { getCategories: function() { return categories; }, + replaceAggregationAdd: replaceAggregationAddStrategy, }; diff --git a/public/app/plugins/datasource/influxdb/specs/query_part.jest.ts b/public/app/plugins/datasource/influxdb/specs/query_part.jest.ts index cabe8bc9b6f..e9e6d216c1e 100644 --- a/public/app/plugins/datasource/influxdb/specs/query_part.jest.ts +++ b/public/app/plugins/datasource/influxdb/specs/query_part.jest.ts @@ -40,5 +40,149 @@ describe('InfluxQueryPart', () => { expect(part.text).toBe('alias(test)'); expect(part.render('mean(value)')).toBe('mean(value) AS "test"'); }); + + it('should nest distinct when count is selected', () => { + var selectParts = [ + queryPart.create({ + type: 'field', + category: queryPart.getCategories().Fields, + }), + queryPart.create({ + type: 'count', + category: queryPart.getCategories().Aggregations, + }), + ]; + var partModel = queryPart.create({ + type: 'distinct', + category: queryPart.getCategories().Aggregations, + }); + + queryPart.replaceAggregationAdd(selectParts, partModel); + + expect(selectParts[1].text).toBe('distinct()'); + expect(selectParts[2].text).toBe('count()'); + }); + + it('should convert to count distinct when distinct is selected and count added', () => { + var selectParts = [ + queryPart.create({ + type: 'field', + category: queryPart.getCategories().Fields, + }), + queryPart.create({ + type: 'distinct', + category: queryPart.getCategories().Aggregations, + }), + ]; + var partModel = queryPart.create({ + type: 'count', + category: queryPart.getCategories().Aggregations, + }); + + queryPart.replaceAggregationAdd(selectParts, partModel); + + expect(selectParts[1].text).toBe('distinct()'); + expect(selectParts[2].text).toBe('count()'); + }); + + it('should replace count distinct if an aggregation is selected', () => { + var selectParts = [ + queryPart.create({ + type: 'field', + category: queryPart.getCategories().Fields, + }), + queryPart.create({ + type: 'distinct', + category: queryPart.getCategories().Aggregations, + }), + queryPart.create({ + type: 'count', + category: queryPart.getCategories().Aggregations, + }), + ]; + var partModel = queryPart.create({ + type: 'mean', + category: queryPart.getCategories().Selectors, + }); + + queryPart.replaceAggregationAdd(selectParts, partModel); + + expect(selectParts[1].text).toBe('mean()'); + expect(selectParts).toHaveLength(2); + }); + + it('should not allowed nested counts when count distinct is selected', () => { + var selectParts = [ + queryPart.create({ + type: 'field', + category: queryPart.getCategories().Fields, + }), + queryPart.create({ + type: 'distinct', + category: queryPart.getCategories().Aggregations, + }), + queryPart.create({ + type: 'count', + category: queryPart.getCategories().Aggregations, + }), + ]; + var partModel = queryPart.create({ + type: 'count', + category: queryPart.getCategories().Aggregations, + }); + + queryPart.replaceAggregationAdd(selectParts, partModel); + + expect(selectParts[1].text).toBe('distinct()'); + expect(selectParts[2].text).toBe('count()'); + expect(selectParts).toHaveLength(3); + }); + + it('should not remove count distinct when distinct is added', () => { + var selectParts = [ + queryPart.create({ + type: 'field', + category: queryPart.getCategories().Fields, + }), + queryPart.create({ + type: 'distinct', + category: queryPart.getCategories().Aggregations, + }), + queryPart.create({ + type: 'count', + category: queryPart.getCategories().Aggregations, + }), + ]; + var partModel = queryPart.create({ + type: 'distinct', + category: queryPart.getCategories().Aggregations, + }); + + queryPart.replaceAggregationAdd(selectParts, partModel); + + expect(selectParts[1].text).toBe('distinct()'); + expect(selectParts[2].text).toBe('count()'); + expect(selectParts).toHaveLength(3); + }); + + it('should remove distinct when sum aggregation is selected', () => { + var selectParts = [ + queryPart.create({ + type: 'field', + category: queryPart.getCategories().Fields, + }), + queryPart.create({ + type: 'distinct', + category: queryPart.getCategories().Aggregations, + }), + ]; + var partModel = queryPart.create({ + type: 'sum', + category: queryPart.getCategories().Aggregations, + }); + queryPart.replaceAggregationAdd(selectParts, partModel); + + expect(selectParts[1].text).toBe('sum()'); + }); }); }); From f2942d94a5b3c8d48616e2ee77f53e20f50420ff Mon Sep 17 00:00:00 2001 From: Marcus Efraimsson Date: Wed, 30 May 2018 09:26:15 +0200 Subject: [PATCH 114/488] changelog: add notes about closing #11645 [skip ci] --- CHANGELOG.md | 1 + 1 file changed, 1 insertion(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 9eb6125492d..3597b1b6a1c 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -9,6 +9,7 @@ * **Dashboard**: Fix date selector styling for dark/light theme in time picker control [#11616](https://github.com/grafana/grafana/issues/11616) * **Discord**: Alert notification channel type for Discord, [#7964](https://github.com/grafana/grafana/issues/7964) thx [@jereksel](https://github.com/jereksel), * **InfluxDB**: Support SELECT queries in templating query, [#5013](https://github.com/grafana/grafana/issues/5013) +* **InfluxDB**: Support count distinct aggregation [#11645](https://github.com/grafana/grafana/issues/11645), thx [@kichristensen](https://github.com/kichristensen) * **Dashboard**: JSON Model under dashboard settings can now be updated & changes saved, [#1429](https://github.com/grafana/grafana/issues/1429), thx [@jereksel](https://github.com/jereksel) * **Security**: Fix XSS vulnerabilities in dashboard links [#11813](https://github.com/grafana/grafana/pull/11813) * **Singlestat**: Fix "time of last point" shows local time when dashboard timezone set to UTC [#10338](https://github.com/grafana/grafana/issues/10338) From ac1dda3b3a522d3174fd2035c4e562312994e92d Mon Sep 17 00:00:00 2001 From: David Kaltschmidt Date: Wed, 30 May 2018 12:07:51 +0200 Subject: [PATCH 115/488] Fix CSS to hide grid controls in fullscreen/low-activity views * there was a comma missing to hide the handles, fixed now * added new styles to hide header interaction in full screen panels --- public/sass/components/_dashboard_grid.scss | 14 ++++++++++++++ public/sass/components/_view_states.scss | 3 ++- 2 files changed, 16 insertions(+), 1 deletion(-) diff --git a/public/sass/components/_dashboard_grid.scss b/public/sass/components/_dashboard_grid.scss index 0a27df75164..f1908ca8786 100644 --- a/public/sass/components/_dashboard_grid.scss +++ b/public/sass/components/_dashboard_grid.scss @@ -18,6 +18,20 @@ height: 100% !important; transform: translate(0px, 0px) !important; } + + // Disable grid interaction indicators in fullscreen panels + + .panel-header:hover { + background-color: inherit; + } + + .panel-title-container { + cursor: pointer; + } + + .react-resizable-handle { + display: none; + } } @include media-breakpoint-down(sm) { diff --git a/public/sass/components/_view_states.scss b/public/sass/components/_view_states.scss index b1fa47d0c0a..c14590b4ec9 100644 --- a/public/sass/components/_view_states.scss +++ b/public/sass/components/_view_states.scss @@ -10,7 +10,8 @@ .playlist-active, .user-activity-low { - .react-resizable-handle .add-row-panel-hint, + .react-resizable-handle, + .add-row-panel-hint, .dash-row-menu-container, .navbar-button--refresh, .navbar-buttons--zoom, From f69654fcd5dd20b264e38af988a4a69673de76bb Mon Sep 17 00:00:00 2001 From: David Kaltschmidt Date: Wed, 30 May 2018 13:13:29 +0200 Subject: [PATCH 116/488] Restrict Explore UI to Editor and Admin roles Access is restricted via not showing in the following places: * hide from sidemenu * hide from panel header menu * disable keybinding `x` Also adds a `roles` property to reactContainer routes that will be checked if `roles` is set, and on failure redirects to `/`. --- pkg/api/index.go | 2 +- public/app/core/services/keybindingSrv.ts | 33 ++++++++++--------- .../app/features/panel/metrics_panel_ctrl.ts | 4 ++- public/app/routes/ReactContainer.tsx | 20 +++++++++-- public/app/routes/routes.ts | 1 + 5 files changed, 41 insertions(+), 19 deletions(-) diff --git a/pkg/api/index.go b/pkg/api/index.go index f082f03b5f6..acf0c30c907 100644 --- a/pkg/api/index.go +++ b/pkg/api/index.go @@ -128,7 +128,7 @@ func setIndexViewData(c *m.ReqContext) (*dtos.IndexViewData, error) { Children: dashboardChildNavs, }) - if setting.ExploreEnabled { + if setting.ExploreEnabled && (c.OrgRole == m.ROLE_ADMIN || c.OrgRole == m.ROLE_EDITOR) { data.NavTree = append(data.NavTree, &dtos.NavLink{ Text: "Explore", Id: "explore", diff --git a/public/app/core/services/keybindingSrv.ts b/public/app/core/services/keybindingSrv.ts index 25d00ab37f1..b1021c90adc 100644 --- a/public/app/core/services/keybindingSrv.ts +++ b/public/app/core/services/keybindingSrv.ts @@ -14,7 +14,7 @@ export class KeybindingSrv { timepickerOpen = false; /** @ngInject */ - constructor(private $rootScope, private $location, private datasourceSrv, private timeSrv) { + constructor(private $rootScope, private $location, private datasourceSrv, private timeSrv, private contextSrv) { // clear out all shortcuts on route change $rootScope.$on('$routeChangeSuccess', () => { Mousetrap.reset(); @@ -177,21 +177,24 @@ export class KeybindingSrv { } }); - this.bind('x', async () => { - if (dashboard.meta.focusPanelId) { - const panel = dashboard.getPanelById(dashboard.meta.focusPanelId); - const datasource = await this.datasourceSrv.get(panel.datasource); - if (datasource && datasource.supportsExplore) { - const range = this.timeSrv.timeRangeForUrl(); - const state = { - ...datasource.getExploreState(panel), - range, - }; - const exploreState = encodePathComponent(JSON.stringify(state)); - this.$location.url(`/explore/${exploreState}`); + // jump to explore if permissions allow + if (this.contextSrv.isEditor) { + this.bind('x', async () => { + if (dashboard.meta.focusPanelId) { + const panel = dashboard.getPanelById(dashboard.meta.focusPanelId); + const datasource = await this.datasourceSrv.get(panel.datasource); + if (datasource && datasource.supportsExplore) { + const range = this.timeSrv.timeRangeForUrl(); + const state = { + ...datasource.getExploreState(panel), + range, + }; + const exploreState = encodePathComponent(JSON.stringify(state)); + this.$location.url(`/explore/${exploreState}`); + } } - } - }); + }); + } // delete panel this.bind('p r', () => { diff --git a/public/app/features/panel/metrics_panel_ctrl.ts b/public/app/features/panel/metrics_panel_ctrl.ts index 3c48119ba3a..cf1b2cd49bc 100644 --- a/public/app/features/panel/metrics_panel_ctrl.ts +++ b/public/app/features/panel/metrics_panel_ctrl.ts @@ -16,6 +16,7 @@ class MetricsPanelCtrl extends PanelCtrl { datasourceName: any; $q: any; $timeout: any; + contextSrv: any; datasourceSrv: any; timeSrv: any; templateSrv: any; @@ -37,6 +38,7 @@ class MetricsPanelCtrl extends PanelCtrl { // make metrics tab the default this.editorTabIndex = 1; this.$q = $injector.get('$q'); + this.contextSrv = $injector.get('contextSrv'); this.datasourceSrv = $injector.get('datasourceSrv'); this.timeSrv = $injector.get('timeSrv'); this.templateSrv = $injector.get('templateSrv'); @@ -312,7 +314,7 @@ class MetricsPanelCtrl extends PanelCtrl { getAdditionalMenuItems() { const items = []; - if (this.datasource && this.datasource.supportsExplore) { + if (this.contextSrv.isEditor && this.datasource && this.datasource.supportsExplore) { items.push({ text: 'Explore', click: 'ctrl.explore();', diff --git a/public/app/routes/ReactContainer.tsx b/public/app/routes/ReactContainer.tsx index db6938cc878..b161a5e7a87 100644 --- a/public/app/routes/ReactContainer.tsx +++ b/public/app/routes/ReactContainer.tsx @@ -6,6 +6,7 @@ import coreModule from 'app/core/core_module'; import { store } from 'app/stores/store'; import { BackendSrv } from 'app/core/services/backend_srv'; import { DatasourceSrv } from 'app/features/plugins/datasource_srv'; +import { ContextSrv } from 'app/core/services/context_srv'; function WrapInProvider(store, Component, props) { return ( @@ -16,16 +17,31 @@ function WrapInProvider(store, Component, props) { } /** @ngInject */ -export function reactContainer($route, $location, backendSrv: BackendSrv, datasourceSrv: DatasourceSrv) { +export function reactContainer( + $route, + $location, + backendSrv: BackendSrv, + datasourceSrv: DatasourceSrv, + contextSrv: ContextSrv +) { return { restrict: 'E', template: '', link(scope, elem) { - let component = $route.current.locals.component; + // Check permissions for this component + const { roles } = $route.current.locals; + if (roles && roles.length) { + if (!roles.some(r => contextSrv.hasRole(r))) { + $location.url('/'); + } + } + + let { component } = $route.current.locals; // Dynamic imports return whole module, need to extract default export if (component.default) { component = component.default; } + const props = { backendSrv: backendSrv, datasourceSrv: datasourceSrv, diff --git a/public/app/routes/routes.ts b/public/app/routes/routes.ts index b10084d1941..568b3438b38 100644 --- a/public/app/routes/routes.ts +++ b/public/app/routes/routes.ts @@ -113,6 +113,7 @@ export function setupAngularRoutes($routeProvider, $locationProvider) { .when('/explore/:initial?', { template: '', resolve: { + roles: () => ['Editor', 'Admin'], component: () => import(/* webpackChunkName: "explore" */ 'app/containers/Explore/Wrapper'), }, }) From 7224ca6c622547124fcab828919872fde93efca6 Mon Sep 17 00:00:00 2001 From: David Kaltschmidt Date: Wed, 30 May 2018 13:24:09 +0200 Subject: [PATCH 117/488] Fix panel menu test --- public/app/features/panel/specs/metrics_panel_ctrl.jest.ts | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/public/app/features/panel/specs/metrics_panel_ctrl.jest.ts b/public/app/features/panel/specs/metrics_panel_ctrl.jest.ts index f2e5199b57d..79564e2a123 100644 --- a/public/app/features/panel/specs/metrics_panel_ctrl.jest.ts +++ b/public/app/features/panel/specs/metrics_panel_ctrl.jest.ts @@ -24,8 +24,9 @@ describe('MetricsPanelCtrl', () => { }); }); - describe('and has datasource set that supports explore', () => { + describe('and has datasource set that supports explore and user has powers', () => { beforeEach(() => { + ctrl.contextSrv = { isEditor: true }; ctrl.datasource = { supportsExplore: true }; additionalItems = ctrl.getAdditionalMenuItems(); }); From 21ecaae6ff2f91b5b58e008f81a32d30bd06d74d Mon Sep 17 00:00:00 2001 From: Leonard Gram Date: Wed, 30 May 2018 14:30:01 +0200 Subject: [PATCH 118/488] changelog: Second epochs are now correctly converted to ms. --- CHANGELOG.md | 1 + 1 file changed, 1 insertion(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 3597b1b6a1c..3a86eeba75e 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -19,6 +19,7 @@ * **Invite users**: Friendlier error message when smtp is not configured [#12087](https://github.com/grafana/grafana/issues/12087), thx [@thurt](https://github.com/thurt) * **Graphite**: Don't send distributed tracing headers when using direct/browser access mode [#11494](https://github.com/grafana/grafana/issues/11494) * **Sidenav**: Show create dashboard link for viewers if at least editor in one folder [#11858](https://github.com/grafana/grafana/issues/11858) +* **SQL**: Second epochs are now correctly converted to ms. [#12085](https://github.com/grafana/grafana/pull/12085) # 5.1.3 (2018-05-16) From 827fb7e8de3bf075a2af13f8f6940abbee5eb584 Mon Sep 17 00:00:00 2001 From: David Kaltschmidt Date: Wed, 30 May 2018 15:24:47 +0200 Subject: [PATCH 119/488] Fix karma tests that rely on MetricsPanelCtrl --- public/app/features/panel/metrics_panel_ctrl.ts | 4 ++-- public/test/specs/helpers.ts | 2 ++ 2 files changed, 4 insertions(+), 2 deletions(-) diff --git a/public/app/features/panel/metrics_panel_ctrl.ts b/public/app/features/panel/metrics_panel_ctrl.ts index cf1b2cd49bc..cbda8c874db 100644 --- a/public/app/features/panel/metrics_panel_ctrl.ts +++ b/public/app/features/panel/metrics_panel_ctrl.ts @@ -1,9 +1,9 @@ -import config from 'app/core/config'; import $ from 'jquery'; import _ from 'lodash'; + +import config from 'app/core/config'; import kbn from 'app/core/utils/kbn'; import { PanelCtrl } from 'app/features/panel/panel_ctrl'; - import * as rangeUtil from 'app/core/utils/rangeutil'; import * as dateMath from 'app/core/utils/datemath'; import { encodePathComponent } from 'app/core/utils/location_util'; diff --git a/public/test/specs/helpers.ts b/public/test/specs/helpers.ts index 276d9867ec4..dd8bd39846e 100644 --- a/public/test/specs/helpers.ts +++ b/public/test/specs/helpers.ts @@ -11,6 +11,7 @@ export function ControllerTestContext() { this.$element = {}; this.$sanitize = {}; this.annotationsSrv = {}; + this.contextSrv = {}; this.timeSrv = new TimeSrvStub(); this.templateSrv = new TemplateSrvStub(); this.datasourceSrv = { @@ -27,6 +28,7 @@ export function ControllerTestContext() { this.providePhase = function(mocks) { return angularMocks.module(function($provide) { + $provide.value('contextSrv', self.contextSrv); $provide.value('datasourceSrv', self.datasourceSrv); $provide.value('annotationsSrv', self.annotationsSrv); $provide.value('timeSrv', self.timeSrv); From 50d1519a916a5526d02e7cb3621b97b5db8505e2 Mon Sep 17 00:00:00 2001 From: Leonard Gram Date: Wed, 30 May 2018 13:55:30 +0200 Subject: [PATCH 120/488] build: mysql integration testing on ci. --- .circleci/config.yml | 26 +++++++++++++++++++++++++ docker/blocks/mysql/docker-compose.yaml | 2 +- docker/blocks/mysql_tests/Dockerfile | 4 ++-- pkg/tsdb/mysql/mysql_test.go | 6 +++--- 4 files changed, 32 insertions(+), 6 deletions(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index c92a68bf99d..d9cc03b9527 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -12,6 +12,26 @@ aliases: version: 2 jobs: + mysql-integration-test: + docker: + - image: circleci/golang:1.10 + - image: circleci/mysql:5.6-ram + environment: + MYSQL_ROOT_PASSWORD: rootpass + MYSQL_DATABASE: grafana_tests + MYSQL_USER: grafana + MYSQL_PASSWORD: password + working_directory: /go/src/github.com/grafana/grafana + steps: + - checkout + - run: sudo apt update + - run: sudo apt install -y mysql-client + - run: dockerize -wait tcp://127.0.0.1:3306 -timeout 120s + - run: cat docker/blocks/mysql_tests/setup.sql | mysql -h 127.0.0.1 -P 3306 -u root -prootpass + - run: + name: mysql integration tests + command: 'GRAFANA_TEST_DB=mysql go test ./pkg/...' + codespell: docker: - image: circleci/python @@ -188,6 +208,8 @@ workflows: filters: *filter-not-release - test-backend: filters: *filter-not-release + - mysql-integration-test: + filters: *filter-not-release - deploy-master: requires: - build-all @@ -195,6 +217,7 @@ workflows: - test-frontend - codespell - gometalinter + - mysql-integration-test filters: branches: only: master @@ -210,6 +233,8 @@ workflows: filters: *filter-only-release - test-backend: filters: *filter-only-release + - mysql-integration-test: + filters: *filter-only-release - deploy-release: requires: - build-all @@ -217,4 +242,5 @@ workflows: - test-frontend - codespell - gometalinter + - mysql-integration-test filters: *filter-only-release diff --git a/docker/blocks/mysql/docker-compose.yaml b/docker/blocks/mysql/docker-compose.yaml index 53ff9da62a7..381b04a53c8 100644 --- a/docker/blocks/mysql/docker-compose.yaml +++ b/docker/blocks/mysql/docker-compose.yaml @@ -1,5 +1,5 @@ mysql: - image: mysql:latest + image: mysql:5.6 environment: MYSQL_ROOT_PASSWORD: rootpass MYSQL_DATABASE: grafana diff --git a/docker/blocks/mysql_tests/Dockerfile b/docker/blocks/mysql_tests/Dockerfile index fa91fa3c023..89e16bc2ed6 100644 --- a/docker/blocks/mysql_tests/Dockerfile +++ b/docker/blocks/mysql_tests/Dockerfile @@ -1,3 +1,3 @@ -FROM mysql:latest +FROM mysql:5.6 ADD setup.sql /docker-entrypoint-initdb.d -CMD ["mysqld"] \ No newline at end of file +CMD ["mysqld"] diff --git a/pkg/tsdb/mysql/mysql_test.go b/pkg/tsdb/mysql/mysql_test.go index 29c5b72b408..5650de237c5 100644 --- a/pkg/tsdb/mysql/mysql_test.go +++ b/pkg/tsdb/mysql/mysql_test.go @@ -601,7 +601,7 @@ func TestMySQL(t *testing.T) { Queries: []*tsdb.Query{ { Model: simplejson.NewFromAny(map[string]interface{}{ - "rawSql": `SELECT $__time(time), CONCAT(measurement, ' - value one') as metric, valueOne FROM metric_values ORDER BY 1`, + "rawSql": `SELECT $__time(time), CONCAT(measurement, ' - value one') as metric, valueOne FROM metric_values ORDER BY 1,2`, "format": "time_series", }), RefId: "A", @@ -615,8 +615,8 @@ func TestMySQL(t *testing.T) { So(queryResult.Error, ShouldBeNil) So(len(queryResult.Series), ShouldEqual, 2) - So(queryResult.Series[0].Name, ShouldEqual, "Metric B - value one") - So(queryResult.Series[1].Name, ShouldEqual, "Metric A - value one") + So(queryResult.Series[0].Name, ShouldEqual, "Metric A - value one") + So(queryResult.Series[1].Name, ShouldEqual, "Metric B - value one") }) Convey("When doing a metric query grouping by time should return correct series", func() { From e33b17fac666e03135fdeb1c5b9a0227e85e1ff2 Mon Sep 17 00:00:00 2001 From: Leonard Gram Date: Wed, 30 May 2018 09:40:45 +0200 Subject: [PATCH 121/488] build: integration testing postegres on ci. --- .circleci/config.yml | 25 ++++++++++++++++++++++ docker/blocks/postgres/docker-compose.yaml | 4 ++-- docker/blocks/postgres_tests/Dockerfile | 4 ++-- docker/blocks/postgres_tests/setup.sql | 2 +- 4 files changed, 30 insertions(+), 5 deletions(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index d9cc03b9527..46404e4e650 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -32,6 +32,25 @@ jobs: name: mysql integration tests command: 'GRAFANA_TEST_DB=mysql go test ./pkg/...' + postgres-integration-test: + docker: + - image: circleci/golang:1.10 + - image: circleci/postgres:9.3-ram + environment: + POSTGRES_USER: grafanatest + POSTGRES_PASSWORD: grafanatest + POSTGRES_DB: grafanatest + working_directory: /go/src/github.com/grafana/grafana + steps: + - checkout + - run: sudo apt update + - run: sudo apt install -y postgresql-client + - run: dockerize -wait tcp://127.0.0.1:5432 -timeout 120s + - run: 'PGPASSWORD=grafanatest psql -p 5432 -h 127.0.0.1 -U grafanatest -d grafanatest -f docker/blocks/postgres_tests/setup.sql' + - run: + name: postgres integration tests + command: 'GRAFANA_TEST_DB=postgres go test ./pkg/...' + codespell: docker: - image: circleci/python @@ -210,6 +229,8 @@ workflows: filters: *filter-not-release - mysql-integration-test: filters: *filter-not-release + - postgres-integration-test: + filters: *filter-not-release - deploy-master: requires: - build-all @@ -218,6 +239,7 @@ workflows: - codespell - gometalinter - mysql-integration-test + - postgres-integration-test filters: branches: only: master @@ -235,6 +257,8 @@ workflows: filters: *filter-only-release - mysql-integration-test: filters: *filter-only-release + - postgres-integration-test: + filters: *filter-only-release - deploy-release: requires: - build-all @@ -243,4 +267,5 @@ workflows: - codespell - gometalinter - mysql-integration-test + - postgres-integration-test filters: *filter-only-release diff --git a/docker/blocks/postgres/docker-compose.yaml b/docker/blocks/postgres/docker-compose.yaml index 566df7b8877..27736042f7b 100644 --- a/docker/blocks/postgres/docker-compose.yaml +++ b/docker/blocks/postgres/docker-compose.yaml @@ -1,5 +1,5 @@ postgrestest: - image: postgres:latest + image: postgres:9.3 environment: POSTGRES_USER: grafana POSTGRES_PASSWORD: password @@ -13,4 +13,4 @@ network_mode: bridge environment: FD_DATASOURCE: postgres - FD_PORT: 5432 \ No newline at end of file + FD_PORT: 5432 diff --git a/docker/blocks/postgres_tests/Dockerfile b/docker/blocks/postgres_tests/Dockerfile index afe4d199651..df188e1094d 100644 --- a/docker/blocks/postgres_tests/Dockerfile +++ b/docker/blocks/postgres_tests/Dockerfile @@ -1,3 +1,3 @@ -FROM postgres:latest +FROM postgres:9.3 ADD setup.sql /docker-entrypoint-initdb.d -CMD ["postgres"] \ No newline at end of file +CMD ["postgres"] diff --git a/docker/blocks/postgres_tests/setup.sql b/docker/blocks/postgres_tests/setup.sql index b182b7c292d..3b8a48f938d 100644 --- a/docker/blocks/postgres_tests/setup.sql +++ b/docker/blocks/postgres_tests/setup.sql @@ -1,3 +1,3 @@ CREATE DATABASE grafanadstest; REVOKE CONNECT ON DATABASE grafanadstest FROM PUBLIC; -GRANT CONNECT ON DATABASE grafanadstest TO grafanatest; \ No newline at end of file +GRANT CONNECT ON DATABASE grafanadstest TO grafanatest; From b379b2833760a24a5f4221f178255dfcbb6f1254 Mon Sep 17 00:00:00 2001 From: Leonard Gram Date: Wed, 30 May 2018 15:16:31 +0200 Subject: [PATCH 122/488] build: only runs db related tests on db. --- .circleci/config.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index 46404e4e650..e898ad9e214 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -30,7 +30,7 @@ jobs: - run: cat docker/blocks/mysql_tests/setup.sql | mysql -h 127.0.0.1 -P 3306 -u root -prootpass - run: name: mysql integration tests - command: 'GRAFANA_TEST_DB=mysql go test ./pkg/...' + command: 'GRAFANA_TEST_DB=mysql go test ./pkg/services/sqlstore/... ./pkg/tsdb/mysql/... ' postgres-integration-test: docker: @@ -49,7 +49,7 @@ jobs: - run: 'PGPASSWORD=grafanatest psql -p 5432 -h 127.0.0.1 -U grafanatest -d grafanatest -f docker/blocks/postgres_tests/setup.sql' - run: name: postgres integration tests - command: 'GRAFANA_TEST_DB=postgres go test ./pkg/...' + command: 'GRAFANA_TEST_DB=postgres go test ./pkg/services/sqlstore/... ./pkg/tsdb/postgres/...' codespell: docker: From b894b5e669f94424b83b364238d2e7b254954989 Mon Sep 17 00:00:00 2001 From: Patrick O'Carroll Date: Wed, 30 May 2018 18:09:57 +0200 Subject: [PATCH 123/488] Fix singlestat threshold tooltip (#12109) fix singlestat threshold tooltip --- public/app/plugins/panel/singlestat/editor.html | 2 +- public/app/plugins/panel/singlestat/module.ts | 2 ++ 2 files changed, 3 insertions(+), 1 deletion(-) diff --git a/public/app/plugins/panel/singlestat/editor.html b/public/app/plugins/panel/singlestat/editor.html index f444cd0170c..15f4e6a9efa 100644 --- a/public/app/plugins/panel/singlestat/editor.html +++ b/public/app/plugins/panel/singlestat/editor.html @@ -61,7 +61,7 @@
diff --git a/public/app/plugins/panel/singlestat/module.ts b/public/app/plugins/panel/singlestat/module.ts index b73a3bb32bd..20c4dcfeb70 100644 --- a/public/app/plugins/panel/singlestat/module.ts +++ b/public/app/plugins/panel/singlestat/module.ts @@ -714,11 +714,13 @@ function getColorForValue(data, value) { if (!_.isFinite(value)) { return null; } + for (var i = data.thresholds.length; i > 0; i--) { if (value >= data.thresholds[i - 1]) { return data.colorMap[i]; } } + return _.first(data.colorMap); } From a4b1dd036d04cd372a7475be425b9c53467f15c7 Mon Sep 17 00:00:00 2001 From: Marcus Efraimsson Date: Wed, 30 May 2018 18:11:47 +0200 Subject: [PATCH 124/488] changelog: add notes about closing #11971 [skip ci] --- CHANGELOG.md | 1 + 1 file changed, 1 insertion(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 3a86eeba75e..3d77986b290 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -20,6 +20,7 @@ * **Graphite**: Don't send distributed tracing headers when using direct/browser access mode [#11494](https://github.com/grafana/grafana/issues/11494) * **Sidenav**: Show create dashboard link for viewers if at least editor in one folder [#11858](https://github.com/grafana/grafana/issues/11858) * **SQL**: Second epochs are now correctly converted to ms. [#12085](https://github.com/grafana/grafana/pull/12085) +* **Singlestat**: Fix singlestat threshold tooltip [#11971](https://github.com/grafana/grafana/issues/11971) # 5.1.3 (2018-05-16) From 82ba27b5f22c60153f620430937392151c3d312f Mon Sep 17 00:00:00 2001 From: Marcus Efraimsson Date: Wed, 30 May 2018 21:31:31 +0200 Subject: [PATCH 125/488] changelog: add notes about closing #11771 [skip ci] --- CHANGELOG.md | 1 + 1 file changed, 1 insertion(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 3d77986b290..5b756ea0102 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -21,6 +21,7 @@ * **Sidenav**: Show create dashboard link for viewers if at least editor in one folder [#11858](https://github.com/grafana/grafana/issues/11858) * **SQL**: Second epochs are now correctly converted to ms. [#12085](https://github.com/grafana/grafana/pull/12085) * **Singlestat**: Fix singlestat threshold tooltip [#11971](https://github.com/grafana/grafana/issues/11971) +* **Dashboard**: Hide grid controls in fullscreen/low-activity views [#11771](https://github.com/grafana/grafana/issues/11771) # 5.1.3 (2018-05-16) From d5aeae3a90e2cd7b1318b2d62a7e4516aabff9a0 Mon Sep 17 00:00:00 2001 From: bergquist Date: Thu, 31 May 2018 08:27:29 +0200 Subject: [PATCH 126/488] test: fixes broken test on windows --- pkg/services/provisioning/dashboards/file_reader_test.go | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/pkg/services/provisioning/dashboards/file_reader_test.go b/pkg/services/provisioning/dashboards/file_reader_test.go index a04fbb23f82..87e9ec6d226 100644 --- a/pkg/services/provisioning/dashboards/file_reader_test.go +++ b/pkg/services/provisioning/dashboards/file_reader_test.go @@ -3,6 +3,7 @@ package dashboards import ( "os" "path/filepath" + "runtime" "testing" "time" @@ -52,7 +53,9 @@ func TestCreatingNewDashboardFileReader(t *testing.T) { reader, err := NewDashboardFileReader(cfg, log.New("test-logger")) So(err, ShouldBeNil) - So(reader.Path, ShouldEqual, "/var/lib/grafana/dashboards") + if runtime.GOOS != "windows" { + So(reader.Path, ShouldEqual, "/var/lib/grafana/dashboards") + } So(filepath.IsAbs(reader.Path), ShouldBeTrue) }) From 47d388437740d930f3273f99338a2721ec8a9225 Mon Sep 17 00:00:00 2001 From: bergquist Date: Mon, 21 May 2018 09:03:32 +0200 Subject: [PATCH 127/488] provisioning: follow symlinked folders fixes #11958 --- .../provisioning/dashboards/file_reader.go | 5 +++ .../dashboards/file_reader_linux_test.go | 39 +++++++++++++++++++ .../testdata/test-dashboards/symlink | 1 + 3 files changed, 45 insertions(+) create mode 100644 pkg/services/provisioning/dashboards/file_reader_linux_test.go create mode 120000 pkg/services/provisioning/dashboards/testdata/test-dashboards/symlink diff --git a/pkg/services/provisioning/dashboards/file_reader.go b/pkg/services/provisioning/dashboards/file_reader.go index 93846f5c474..628c63de3a8 100644 --- a/pkg/services/provisioning/dashboards/file_reader.go +++ b/pkg/services/provisioning/dashboards/file_reader.go @@ -47,6 +47,11 @@ func NewDashboardFileReader(cfg *DashboardsAsConfig, log log.Logger) (*fileReade log.Error("Cannot read directory", "error", err) } + path, err := filepath.EvalSymlinks(path) + if err != nil { + log.Error("Failed to read content of symlinked path: %s", path) + } + absPath, err := filepath.Abs(path) if err != nil { log.Error("Could not create absolute path ", "path", path) diff --git a/pkg/services/provisioning/dashboards/file_reader_linux_test.go b/pkg/services/provisioning/dashboards/file_reader_linux_test.go new file mode 100644 index 00000000000..9d4cdae8609 --- /dev/null +++ b/pkg/services/provisioning/dashboards/file_reader_linux_test.go @@ -0,0 +1,39 @@ +// +build linux + +package dashboards + +import ( + "path/filepath" + "testing" + + "github.com/grafana/grafana/pkg/log" +) + +var ( + symlinkedFolder = "testdata/test-dashboards/symlink" +) + +func TestProvsionedSymlinkedFolder(t *testing.T) { + cfg := &DashboardsAsConfig{ + Name: "Default", + Type: "file", + OrgId: 1, + Folder: "", + Options: map[string]interface{}{"path": symlinkedFolder}, + } + + reader, err := NewDashboardFileReader(cfg, log.New("test-logger")) + if err != nil { + t.Error("expected err to be nil") + } + + want, err := filepath.Abs(containingId) + + if err != nil { + t.Errorf("expected err to be nill") + } + + if reader.Path != want { + t.Errorf("got %s want %s", reader.Path, want) + } +} diff --git a/pkg/services/provisioning/dashboards/testdata/test-dashboards/symlink b/pkg/services/provisioning/dashboards/testdata/test-dashboards/symlink new file mode 120000 index 00000000000..42e166e6959 --- /dev/null +++ b/pkg/services/provisioning/dashboards/testdata/test-dashboards/symlink @@ -0,0 +1 @@ +containing-id/ \ No newline at end of file From 2bd4c14e5f4d0a525dd7f7b692484f8fbb8fc9bc Mon Sep 17 00:00:00 2001 From: bergquist Date: Thu, 31 May 2018 09:53:15 +0200 Subject: [PATCH 128/488] make path absolute before following symlink --- .../provisioning/dashboards/file_reader.go | 17 +++++++++-------- 1 file changed, 9 insertions(+), 8 deletions(-) diff --git a/pkg/services/provisioning/dashboards/file_reader.go b/pkg/services/provisioning/dashboards/file_reader.go index 628c63de3a8..a1ba4dbf8e2 100644 --- a/pkg/services/provisioning/dashboards/file_reader.go +++ b/pkg/services/provisioning/dashboards/file_reader.go @@ -47,20 +47,21 @@ func NewDashboardFileReader(cfg *DashboardsAsConfig, log log.Logger) (*fileReade log.Error("Cannot read directory", "error", err) } - path, err := filepath.EvalSymlinks(path) + copy := path + path, err := filepath.Abs(path) + if err != nil { + log.Error("Could not create absolute path ", "path", path) + path = copy //if .Abs return an error we fallback to path + } + + path, err = filepath.EvalSymlinks(path) if err != nil { log.Error("Failed to read content of symlinked path: %s", path) } - absPath, err := filepath.Abs(path) - if err != nil { - log.Error("Could not create absolute path ", "path", path) - absPath = path //if .Abs return an error we fallback to path - } - return &fileReader{ Cfg: cfg, - Path: absPath, + Path: path, log: log, dashboardService: dashboards.NewProvisioningService(), }, nil From 0c45ee63a9bf360ded82e3a229fd0a142187c797 Mon Sep 17 00:00:00 2001 From: David Kaltschmidt Date: Thu, 31 May 2018 11:26:24 +0200 Subject: [PATCH 129/488] Guard /explore by editor role on the backend --- pkg/api/api.go | 3 +++ 1 file changed, 3 insertions(+) diff --git a/pkg/api/api.go b/pkg/api/api.go index 01189f7a81e..c205e7d3e2f 100644 --- a/pkg/api/api.go +++ b/pkg/api/api.go @@ -77,6 +77,9 @@ func (hs *HTTPServer) registerRoutes() { r.Get("/dashboards/", reqSignedIn, Index) r.Get("/dashboards/*", reqSignedIn, Index) + r.Get("/explore/", reqEditorRole, Index) + r.Get("/explore/*", reqEditorRole, Index) + r.Get("/playlists/", reqSignedIn, Index) r.Get("/playlists/*", reqSignedIn, Index) r.Get("/alerting/", reqSignedIn, Index) From 44f5b92fbcd77330f28e61bdcb84d0b9499b6b47 Mon Sep 17 00:00:00 2001 From: bergquist Date: Thu, 31 May 2018 11:38:29 +0200 Subject: [PATCH 130/488] provisioning: only provision if json file is newer then db --- pkg/services/provisioning/dashboards/file_reader.go | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pkg/services/provisioning/dashboards/file_reader.go b/pkg/services/provisioning/dashboards/file_reader.go index 93846f5c474..cd4598794bc 100644 --- a/pkg/services/provisioning/dashboards/file_reader.go +++ b/pkg/services/provisioning/dashboards/file_reader.go @@ -159,7 +159,7 @@ func (fr *fileReader) saveDashboard(path string, folderId int64, fileInfo os.Fil } provisionedData, alreadyProvisioned := provisionedDashboardRefs[path] - upToDate := alreadyProvisioned && provisionedData.Updated == resolvedFileInfo.ModTime().Unix() + upToDate := alreadyProvisioned && provisionedData.Updated >= resolvedFileInfo.ModTime().Unix() dash, err := fr.readDashboardFromFile(path, resolvedFileInfo.ModTime(), folderId) if err != nil { From 938deae4b467c2fcf4f35304dba7968e514f49a8 Mon Sep 17 00:00:00 2001 From: Marcus Efraimsson Date: Thu, 31 May 2018 15:24:01 +0200 Subject: [PATCH 131/488] changelog: add notes about closing #11515 [skip ci] --- CHANGELOG.md | 1 + 1 file changed, 1 insertion(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 5b756ea0102..280d4429778 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -22,6 +22,7 @@ * **SQL**: Second epochs are now correctly converted to ms. [#12085](https://github.com/grafana/grafana/pull/12085) * **Singlestat**: Fix singlestat threshold tooltip [#11971](https://github.com/grafana/grafana/issues/11971) * **Dashboard**: Hide grid controls in fullscreen/low-activity views [#11771](https://github.com/grafana/grafana/issues/11771) +* **Dashboard**: Validate uid when importing dashboards [#11515](https://github.com/grafana/grafana/issues/11515) # 5.1.3 (2018-05-16) From 37f9bdfc8ce15f061d30c613e9c849a8907a6a54 Mon Sep 17 00:00:00 2001 From: Patrick O'Carroll Date: Thu, 31 May 2018 15:40:57 +0200 Subject: [PATCH 132/488] save modal ux improvements (#11822) changes to save modal when saving an updated dashboard Changed time range and variables are now not saved by default, you'll need to actively choose if you want to save updated time range and or variables. --- .../app/features/dashboard/dashboard_model.ts | 26 +++++- public/app/features/dashboard/save_modal.ts | 65 ++++++++++++-- .../dashboard/specs/dashboard_model.jest.ts | 59 ++++++++++++ .../dashboard/specs/save_modal.jest.ts | 90 +++++++++++++++++++ 4 files changed, 233 insertions(+), 7 deletions(-) create mode 100644 public/app/features/dashboard/specs/save_modal.jest.ts diff --git a/public/app/features/dashboard/dashboard_model.ts b/public/app/features/dashboard/dashboard_model.ts index 8a300a80341..a37e753bd89 100644 --- a/public/app/features/dashboard/dashboard_model.ts +++ b/public/app/features/dashboard/dashboard_model.ts @@ -22,8 +22,10 @@ export class DashboardModel { editable: any; graphTooltip: any; time: any; + originalTime: any; timepicker: any; templating: any; + originalTemplating: any; annotations: any; refresh: any; snapshot: any; @@ -68,8 +70,12 @@ export class DashboardModel { this.editable = data.editable !== false; this.graphTooltip = data.graphTooltip || 0; this.time = data.time || { from: 'now-6h', to: 'now' }; + this.originalTime = _.cloneDeep(this.time); this.timepicker = data.timepicker || {}; this.templating = this.ensureListExist(data.templating); + this.originalTemplating = _.map(this.templating.list, variable => { + return { name: variable.name, current: _.clone(variable.current) }; + }); this.annotations = this.ensureListExist(data.annotations); this.refresh = data.refresh; this.snapshot = data.snapshot; @@ -130,7 +136,12 @@ export class DashboardModel { } // cleans meta data and other non persistent state - getSaveModelClone() { + getSaveModelClone(options?) { + let defaults = _.defaults(options || {}, { + saveVariables: false, + saveTimerange: false, + }); + // make clone var copy: any = {}; for (var property in this) { @@ -142,10 +153,23 @@ export class DashboardModel { } // get variable save models + //console.log(this.templating.list); copy.templating = { list: _.map(this.templating.list, variable => (variable.getSaveModel ? variable.getSaveModel() : variable)), }; + if (!defaults.saveVariables && copy.templating.list.length === this.originalTemplating.length) { + for (let i = 0; i < copy.templating.list.length; i++) { + if (copy.templating.list[i].name === this.originalTemplating[i].name) { + copy.templating.list[i].current = this.originalTemplating[i].current; + } + } + } + + if (!defaults.saveTimerange) { + copy.time = this.originalTime; + } + // get panel save models copy.panels = _.chain(this.panels) .filter(panel => panel.type !== 'add-panel') diff --git a/public/app/features/dashboard/save_modal.ts b/public/app/features/dashboard/save_modal.ts index 33165758555..1c364fbc55f 100644 --- a/public/app/features/dashboard/save_modal.ts +++ b/public/app/features/dashboard/save_modal.ts @@ -1,4 +1,5 @@ import coreModule from 'app/core/core_module'; +import _ from 'lodash'; const template = `
+
+
Filter
+
+ Alert name + +
+
+ Dashboard title + +
+
+ + +
+
+ Dashboard tags + + +
+
State filter
diff --git a/public/app/plugins/panel/alertlist/module.ts b/public/app/plugins/panel/alertlist/module.ts index 35fbaead3b1..55869ce626d 100644 --- a/public/app/plugins/panel/alertlist/module.ts +++ b/public/app/plugins/panel/alertlist/module.ts @@ -21,6 +21,7 @@ class AlertListPanel extends PanelCtrl { currentAlerts: any = []; alertHistory: any = []; noAlertsMessage: string; + // Set and populate defaults panelDefaults = { show: 'current', @@ -28,6 +29,9 @@ class AlertListPanel extends PanelCtrl { stateFilter: [], onlyAlertsOnDashboard: false, sortOrder: 1, + dashboardFilter: '', + nameFilter: '', + folderId: null, }; /** @ngInject */ @@ -89,6 +93,11 @@ class AlertListPanel extends PanelCtrl { }); } + onFolderChange(folder: any) { + this.panel.folderId = folder.id; + this.refresh(); + } + getStateChanges() { var params: any = { limit: this.panel.limit, @@ -110,6 +119,7 @@ class AlertListPanel extends PanelCtrl { al.info = alertDef.getAlertAnnotationInfo(al); return al; }); + this.noAlertsMessage = this.alertHistory.length === 0 ? 'No alerts in current time range' : ''; return this.alertHistory; @@ -121,10 +131,26 @@ class AlertListPanel extends PanelCtrl { state: this.panel.stateFilter, }; + if (this.panel.nameFilter) { + params.query = this.panel.nameFilter; + } + + if (this.panel.folderId >= 0) { + params.folderId = this.panel.folderId; + } + + if (this.panel.dashboardFilter) { + params.dashboardQuery = this.panel.dashboardFilter; + } + if (this.panel.onlyAlertsOnDashboard) { params.dashboardId = this.dashboard.id; } + if (this.panel.dashboardTags) { + params.dashboardTag = this.panel.dashboardTags; + } + return this.backendSrv.get(`/api/alerts`, params).then(res => { this.currentAlerts = this.sortResult( _.map(res, al => { @@ -135,6 +161,9 @@ class AlertListPanel extends PanelCtrl { return al; }) ); + if (this.currentAlerts.length > this.panel.limit) { + this.currentAlerts = this.currentAlerts.slice(0, this.panel.limit); + } this.noAlertsMessage = this.currentAlerts.length === 0 ? 'No alerts' : ''; return this.currentAlerts; From b67872bc35c63eb6debf2ac121673442d0a3f948 Mon Sep 17 00:00:00 2001 From: Marcus Efraimsson Date: Fri, 1 Jun 2018 14:49:14 +0200 Subject: [PATCH 142/488] changelog: add notes about closing #11500, #8168, #6541 [skip ci] --- CHANGELOG.md | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 6d7e46d6cf4..7ef36a8796f 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,9 @@ # 5.2.0 (unreleased) +### New Features + +* **Alert list panel**: Updated to support filtering alerts by name, dashboard title, folder, tags [#11500](https://github.com/grafana/grafana/issues/11500), [#8168](https://github.com/grafana/grafana/issues/8168), [#6541](https://github.com/grafana/grafana/issues/6541) + ### Minor * **Dashboard**: Modified time range and variables are now not saved by default [#10748](https://github.com/grafana/grafana/issues/10748), [#8805](https://github.com/grafana/grafana/issues/8805) From f5cf92636451ef2bb80f86606e6e8b03cb28c962 Mon Sep 17 00:00:00 2001 From: Marcus Efraimsson Date: Fri, 1 Jun 2018 15:23:26 +0200 Subject: [PATCH 143/488] changelog: add notes about closing #5893 [skip ci] --- CHANGELOG.md | 1 + 1 file changed, 1 insertion(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 7ef36a8796f..76e538a8e32 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -2,6 +2,7 @@ ### New Features +* **Elasticsearch**: Alerting support [#5893](https://github.com/grafana/grafana/issues/5893), thx [@WPH95](https://github.com/WPH95) * **Alert list panel**: Updated to support filtering alerts by name, dashboard title, folder, tags [#11500](https://github.com/grafana/grafana/issues/11500), [#8168](https://github.com/grafana/grafana/issues/8168), [#6541](https://github.com/grafana/grafana/issues/6541) ### Minor From 75ee1e920890e2b7568407b0034cbddc01ebdce3 Mon Sep 17 00:00:00 2001 From: bergquist Date: Mon, 4 Jun 2018 08:13:20 +0200 Subject: [PATCH 144/488] renames intervalSeconds to updateIntervalSeconds --- docs/sources/administration/provisioning.md | 1 + .../provisioning/dashboards/config_reader.go | 4 +- .../dashboards/config_reader_test.go | 12 +-- .../provisioning/dashboards/file_reader.go | 2 +- .../dashboards-from-disk/dev-dashboards.yaml | 2 +- .../test-configs/version-0/version-0.yaml | 2 +- pkg/services/provisioning/dashboards/types.go | 80 +++++++++---------- 7 files changed, 53 insertions(+), 50 deletions(-) diff --git a/docs/sources/administration/provisioning.md b/docs/sources/administration/provisioning.md index 79b47aee9f6..888a0777796 100644 --- a/docs/sources/administration/provisioning.md +++ b/docs/sources/administration/provisioning.md @@ -197,6 +197,7 @@ providers: folder: '' type: file disableDeletion: false + updateIntervalSeconds: 3 #how often Grafana will scan for changed dashboards options: path: /var/lib/grafana/dashboards ``` diff --git a/pkg/services/provisioning/dashboards/config_reader.go b/pkg/services/provisioning/dashboards/config_reader.go index f8b6070c704..7508550838f 100644 --- a/pkg/services/provisioning/dashboards/config_reader.go +++ b/pkg/services/provisioning/dashboards/config_reader.go @@ -82,8 +82,8 @@ func (cr *configReader) readConfig() ([]*DashboardsAsConfig, error) { dashboards[i].OrgId = 1 } - if dashboards[i].IntervalSeconds == 0 { - dashboards[i].IntervalSeconds = 3 + if dashboards[i].UpdateIntervalSeconds == 0 { + dashboards[i].UpdateIntervalSeconds = 3 } } diff --git a/pkg/services/provisioning/dashboards/config_reader_test.go b/pkg/services/provisioning/dashboards/config_reader_test.go index b49cd258005..df0d2ae038e 100644 --- a/pkg/services/provisioning/dashboards/config_reader_test.go +++ b/pkg/services/provisioning/dashboards/config_reader_test.go @@ -22,7 +22,7 @@ func TestDashboardsAsConfig(t *testing.T) { cfg, err := cfgProvider.readConfig() So(err, ShouldBeNil) - validateDashboardAsConfig(cfg) + validateDashboardAsConfig(t, cfg) }) Convey("Can read config file in version 0 format", func() { @@ -30,7 +30,7 @@ func TestDashboardsAsConfig(t *testing.T) { cfg, err := cfgProvider.readConfig() So(err, ShouldBeNil) - validateDashboardAsConfig(cfg) + validateDashboardAsConfig(t, cfg) }) Convey("Should skip invalid path", func() { @@ -56,7 +56,9 @@ func TestDashboardsAsConfig(t *testing.T) { }) }) } -func validateDashboardAsConfig(cfg []*DashboardsAsConfig) { +func validateDashboardAsConfig(t *testing.T, cfg []*DashboardsAsConfig) { + t.Helper() + So(len(cfg), ShouldEqual, 2) ds := cfg[0] @@ -68,7 +70,7 @@ func validateDashboardAsConfig(cfg []*DashboardsAsConfig) { So(len(ds.Options), ShouldEqual, 1) So(ds.Options["path"], ShouldEqual, "/var/lib/grafana/dashboards") So(ds.DisableDeletion, ShouldBeTrue) - So(ds.IntervalSeconds, ShouldEqual, 10) + So(ds.UpdateIntervalSeconds, ShouldEqual, 10) ds2 := cfg[1] So(ds2.Name, ShouldEqual, "default") @@ -79,5 +81,5 @@ func validateDashboardAsConfig(cfg []*DashboardsAsConfig) { So(len(ds2.Options), ShouldEqual, 1) So(ds2.Options["path"], ShouldEqual, "/var/lib/grafana/dashboards") So(ds2.DisableDeletion, ShouldBeFalse) - So(ds2.IntervalSeconds, ShouldEqual, 3) + So(ds2.UpdateIntervalSeconds, ShouldEqual, 3) } diff --git a/pkg/services/provisioning/dashboards/file_reader.go b/pkg/services/provisioning/dashboards/file_reader.go index a25b0208ad3..89416d2596c 100644 --- a/pkg/services/provisioning/dashboards/file_reader.go +++ b/pkg/services/provisioning/dashboards/file_reader.go @@ -66,7 +66,7 @@ func (fr *fileReader) ReadAndListen(ctx context.Context) error { fr.log.Error("failed to search for dashboards", "error", err) } - ticker := time.NewTicker(time.Duration(int64(time.Second) * fr.Cfg.IntervalSeconds)) + ticker := time.NewTicker(time.Duration(int64(time.Second) * fr.Cfg.UpdateIntervalSeconds)) running := false diff --git a/pkg/services/provisioning/dashboards/testdata/test-configs/dashboards-from-disk/dev-dashboards.yaml b/pkg/services/provisioning/dashboards/testdata/test-configs/dashboards-from-disk/dev-dashboards.yaml index 5ea2a0a4f75..e26c329f87c 100644 --- a/pkg/services/provisioning/dashboards/testdata/test-configs/dashboards-from-disk/dev-dashboards.yaml +++ b/pkg/services/provisioning/dashboards/testdata/test-configs/dashboards-from-disk/dev-dashboards.yaml @@ -6,7 +6,7 @@ providers: folder: 'developers' editable: true disableDeletion: true - intervalSeconds: 10 + updateIntervalSeconds: 10 type: file options: path: /var/lib/grafana/dashboards diff --git a/pkg/services/provisioning/dashboards/testdata/test-configs/version-0/version-0.yaml b/pkg/services/provisioning/dashboards/testdata/test-configs/version-0/version-0.yaml index bdbb06079fd..69a317fb396 100644 --- a/pkg/services/provisioning/dashboards/testdata/test-configs/version-0/version-0.yaml +++ b/pkg/services/provisioning/dashboards/testdata/test-configs/version-0/version-0.yaml @@ -3,7 +3,7 @@ folder: 'developers' editable: true disableDeletion: true - intervalSeconds: 10 + updateIntervalSeconds: 10 type: file options: path: /var/lib/grafana/dashboards diff --git a/pkg/services/provisioning/dashboards/types.go b/pkg/services/provisioning/dashboards/types.go index 424e5e35f4a..a658b816c7d 100644 --- a/pkg/services/provisioning/dashboards/types.go +++ b/pkg/services/provisioning/dashboards/types.go @@ -10,25 +10,25 @@ import ( ) type DashboardsAsConfig struct { - Name string - Type string - OrgId int64 - Folder string - Editable bool - Options map[string]interface{} - DisableDeletion bool - IntervalSeconds int64 + Name string + Type string + OrgId int64 + Folder string + Editable bool + Options map[string]interface{} + DisableDeletion bool + UpdateIntervalSeconds int64 } type DashboardsAsConfigV0 struct { - Name string `json:"name" yaml:"name"` - Type string `json:"type" yaml:"type"` - OrgId int64 `json:"org_id" yaml:"org_id"` - Folder string `json:"folder" yaml:"folder"` - Editable bool `json:"editable" yaml:"editable"` - Options map[string]interface{} `json:"options" yaml:"options"` - DisableDeletion bool `json:"disableDeletion" yaml:"disableDeletion"` - IntervalSeconds int64 `json:"intervalSeconds" yaml:"intervalSeconds"` + Name string `json:"name" yaml:"name"` + Type string `json:"type" yaml:"type"` + OrgId int64 `json:"org_id" yaml:"org_id"` + Folder string `json:"folder" yaml:"folder"` + Editable bool `json:"editable" yaml:"editable"` + Options map[string]interface{} `json:"options" yaml:"options"` + DisableDeletion bool `json:"disableDeletion" yaml:"disableDeletion"` + UpdateIntervalSeconds int64 `json:"updateIntervalSeconds" yaml:"updateIntervalSeconds"` } type ConfigVersion struct { @@ -40,14 +40,14 @@ type DashboardAsConfigV1 struct { } type DashboardProviderConfigs struct { - Name string `json:"name" yaml:"name"` - Type string `json:"type" yaml:"type"` - OrgId int64 `json:"orgId" yaml:"orgId"` - Folder string `json:"folder" yaml:"folder"` - Editable bool `json:"editable" yaml:"editable"` - Options map[string]interface{} `json:"options" yaml:"options"` - DisableDeletion bool `json:"disableDeletion" yaml:"disableDeletion"` - IntervalSeconds int64 `json:"intervalSeconds" yaml:"intervalSeconds"` + Name string `json:"name" yaml:"name"` + Type string `json:"type" yaml:"type"` + OrgId int64 `json:"orgId" yaml:"orgId"` + Folder string `json:"folder" yaml:"folder"` + Editable bool `json:"editable" yaml:"editable"` + Options map[string]interface{} `json:"options" yaml:"options"` + DisableDeletion bool `json:"disableDeletion" yaml:"disableDeletion"` + UpdateIntervalSeconds int64 `json:"updateIntervalSeconds" yaml:"updateIntervalSeconds"` } func createDashboardJson(data *simplejson.Json, lastModified time.Time, cfg *DashboardsAsConfig, folderId int64) (*dashboards.SaveDashboardDTO, error) { @@ -71,14 +71,14 @@ func mapV0ToDashboardAsConfig(v0 []*DashboardsAsConfigV0) []*DashboardsAsConfig for _, v := range v0 { r = append(r, &DashboardsAsConfig{ - Name: v.Name, - Type: v.Type, - OrgId: v.OrgId, - Folder: v.Folder, - Editable: v.Editable, - Options: v.Options, - DisableDeletion: v.DisableDeletion, - IntervalSeconds: v.IntervalSeconds, + Name: v.Name, + Type: v.Type, + OrgId: v.OrgId, + Folder: v.Folder, + Editable: v.Editable, + Options: v.Options, + DisableDeletion: v.DisableDeletion, + UpdateIntervalSeconds: v.UpdateIntervalSeconds, }) } @@ -90,14 +90,14 @@ func (dc *DashboardAsConfigV1) mapToDashboardAsConfig() []*DashboardsAsConfig { for _, v := range dc.Providers { r = append(r, &DashboardsAsConfig{ - Name: v.Name, - Type: v.Type, - OrgId: v.OrgId, - Folder: v.Folder, - Editable: v.Editable, - Options: v.Options, - DisableDeletion: v.DisableDeletion, - IntervalSeconds: v.IntervalSeconds, + Name: v.Name, + Type: v.Type, + OrgId: v.OrgId, + Folder: v.Folder, + Editable: v.Editable, + Options: v.Options, + DisableDeletion: v.DisableDeletion, + UpdateIntervalSeconds: v.UpdateIntervalSeconds, }) } From 3f5078339c0193a416775e719fd5c8a0293229ab Mon Sep 17 00:00:00 2001 From: bergquist Date: Mon, 4 Jun 2018 08:27:03 +0200 Subject: [PATCH 145/488] tests: uses different paths depending on os --- .../provisioning/dashboards/file_reader_test.go | 11 +++++++---- 1 file changed, 7 insertions(+), 4 deletions(-) diff --git a/pkg/services/provisioning/dashboards/file_reader_test.go b/pkg/services/provisioning/dashboards/file_reader_test.go index 87e9ec6d226..bdc1e95aafe 100644 --- a/pkg/services/provisioning/dashboards/file_reader_test.go +++ b/pkg/services/provisioning/dashboards/file_reader_test.go @@ -49,13 +49,16 @@ func TestCreatingNewDashboardFileReader(t *testing.T) { }) Convey("using full path", func() { - cfg.Options["folder"] = "/var/lib/grafana/dashboards" + fullPath := "/var/lib/grafana/dashboards" + if runtime.GOOS == "windows" { + fullPath = `c:\var\lib\grafana` + } + + cfg.Options["folder"] = fullPath reader, err := NewDashboardFileReader(cfg, log.New("test-logger")) So(err, ShouldBeNil) - if runtime.GOOS != "windows" { - So(reader.Path, ShouldEqual, "/var/lib/grafana/dashboards") - } + So(reader.Path, ShouldEqual, fullPath) So(filepath.IsAbs(reader.Path), ShouldBeTrue) }) From f606654c50239fbc4616bcdd50c0441dd810ed1f Mon Sep 17 00:00:00 2001 From: bergquist Date: Mon, 4 Jun 2018 09:04:33 +0200 Subject: [PATCH 146/488] provisioning: adds fallback if evalsymlink/abs fails --- pkg/services/provisioning/dashboards/file_reader.go | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/pkg/services/provisioning/dashboards/file_reader.go b/pkg/services/provisioning/dashboards/file_reader.go index a1ba4dbf8e2..8af23980531 100644 --- a/pkg/services/provisioning/dashboards/file_reader.go +++ b/pkg/services/provisioning/dashboards/file_reader.go @@ -51,7 +51,6 @@ func NewDashboardFileReader(cfg *DashboardsAsConfig, log log.Logger) (*fileReade path, err := filepath.Abs(path) if err != nil { log.Error("Could not create absolute path ", "path", path) - path = copy //if .Abs return an error we fallback to path } path, err = filepath.EvalSymlinks(path) @@ -59,6 +58,11 @@ func NewDashboardFileReader(cfg *DashboardsAsConfig, log log.Logger) (*fileReade log.Error("Failed to read content of symlinked path: %s", path) } + if path == "" { + path = copy + log.Info("falling back to original path due to EvalSymlink/Abs failure") + } + return &fileReader{ Cfg: cfg, Path: path, From feb5e20779379863687e624e7ddf52e1c503061d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Torkel=20=C3=96degaard?= Date: Mon, 4 Jun 2018 11:17:50 +0200 Subject: [PATCH 147/488] datasource: added option no-direct-access to ds-http-settings diretive, closes #12138 --- public/app/features/plugins/ds_edit_ctrl.ts | 4 ++++ public/app/features/plugins/partials/ds_http_settings.html | 2 +- 2 files changed, 5 insertions(+), 1 deletion(-) diff --git a/public/app/features/plugins/ds_edit_ctrl.ts b/public/app/features/plugins/ds_edit_ctrl.ts index b98f0f48910..f86cc694255 100644 --- a/public/app/features/plugins/ds_edit_ctrl.ts +++ b/public/app/features/plugins/ds_edit_ctrl.ts @@ -204,10 +204,14 @@ coreModule.directive('datasourceHttpSettings', function() { scope: { current: '=', suggestUrl: '@', + noDirectAccess: '@', }, templateUrl: 'public/app/features/plugins/partials/ds_http_settings.html', link: { pre: function($scope, elem, attrs) { + // do not show access option if direct access is disabled + $scope.showAccessOption = $scope.noDirectAccess !== 'true'; + $scope.getSuggestUrls = function() { return [$scope.suggestUrl]; }; diff --git a/public/app/features/plugins/partials/ds_http_settings.html b/public/app/features/plugins/partials/ds_http_settings.html index b9f5683129c..b35aab0c099 100644 --- a/public/app/features/plugins/partials/ds_http_settings.html +++ b/public/app/features/plugins/partials/ds_http_settings.html @@ -22,7 +22,7 @@
-
+
Access
From 13c6f37ea581db9ecb04c859618847425d7cba46 Mon Sep 17 00:00:00 2001 From: Alexander Zobnin Date: Fri, 1 Jun 2018 13:39:44 +0300 Subject: [PATCH 148/488] alerting: show alerts for user with Viewer role changelog: add notes about closing #11167 remove changelog note reformat alert_test.go --- pkg/api/alerting.go | 2 +- pkg/services/sqlstore/alert.go | 2 +- pkg/services/sqlstore/alert_test.go | 6 +++--- 3 files changed, 5 insertions(+), 5 deletions(-) diff --git a/pkg/api/alerting.go b/pkg/api/alerting.go index 961fc11b2dc..60013fe2b10 100644 --- a/pkg/api/alerting.go +++ b/pkg/api/alerting.go @@ -79,7 +79,7 @@ func GetAlerts(c *m.ReqContext) Response { DashboardIds: dashboardIDs, Type: string(search.DashHitDB), FolderIds: folderIDs, - Permission: m.PERMISSION_EDIT, + Permission: m.PERMISSION_VIEW, } err := bus.Dispatch(&searchQuery) diff --git a/pkg/services/sqlstore/alert.go b/pkg/services/sqlstore/alert.go index 58ec7e2857a..531a70b2101 100644 --- a/pkg/services/sqlstore/alert.go +++ b/pkg/services/sqlstore/alert.go @@ -116,7 +116,7 @@ func HandleAlertsQuery(query *m.GetAlertsQuery) error { } if query.User.OrgRole != m.ROLE_ADMIN { - builder.writeDashboardPermissionFilter(query.User, m.PERMISSION_EDIT) + builder.writeDashboardPermissionFilter(query.User, m.PERMISSION_VIEW) } builder.Write(" ORDER BY name ASC") diff --git a/pkg/services/sqlstore/alert_test.go b/pkg/services/sqlstore/alert_test.go index be48c7b2f52..79fa99864e7 100644 --- a/pkg/services/sqlstore/alert_test.go +++ b/pkg/services/sqlstore/alert_test.go @@ -2,7 +2,6 @@ package sqlstore import ( "testing" - "time" "github.com/grafana/grafana/pkg/components/simplejson" @@ -110,11 +109,12 @@ func TestAlertingDataAccess(t *testing.T) { }) Convey("Viewer cannot read alerts", func() { - alertQuery := m.GetAlertsQuery{DashboardIDs: []int64{testDash.Id}, PanelId: 1, OrgId: 1, User: &m.SignedInUser{OrgRole: m.ROLE_VIEWER}} + viewerUser := &m.SignedInUser{OrgRole: m.ROLE_VIEWER, OrgId: 1} + alertQuery := m.GetAlertsQuery{DashboardIDs: []int64{testDash.Id}, PanelId: 1, OrgId: 1, User: viewerUser} err2 := HandleAlertsQuery(&alertQuery) So(err2, ShouldBeNil) - So(alertQuery.Result, ShouldHaveLength, 0) + So(alertQuery.Result, ShouldHaveLength, 1) }) Convey("Alerts with same dashboard id and panel id should update", func() { From e562ae753b75210a56d98e3689179bebb318d0f7 Mon Sep 17 00:00:00 2001 From: Leonard Gram Date: Mon, 4 Jun 2018 11:49:12 +0200 Subject: [PATCH 149/488] docs: docker secrets support. (#12141) Closes #12132 --- CHANGELOG.md | 1 + docs/sources/installation/docker.md | 12 ++++++++++++ 2 files changed, 13 insertions(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 76e538a8e32..ecbc99608c4 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -30,6 +30,7 @@ * **Singlestat**: Fix singlestat threshold tooltip [#11971](https://github.com/grafana/grafana/issues/11971) * **Dashboard**: Hide grid controls in fullscreen/low-activity views [#11771](https://github.com/grafana/grafana/issues/11771) * **Dashboard**: Validate uid when importing dashboards [#11515](https://github.com/grafana/grafana/issues/11515) +* **Docker**: Support for env variables ending with _FILE [grafana-docker #166](https://github.com/grafana/grafana-docker/pull/166), thx [@efrecon](https://github.com/efrecon) # 5.1.3 (2018-05-16) diff --git a/docs/sources/installation/docker.md b/docs/sources/installation/docker.md index e78796845c4..e7dee84b5f4 100644 --- a/docs/sources/installation/docker.md +++ b/docs/sources/installation/docker.md @@ -130,6 +130,18 @@ ID=$(id -u) # saves your user id in the ID variable docker run -d --user $ID --volume "$PWD/data:/var/lib/grafana" -p 3000:3000 grafana/grafana:5.1.0 ``` +## Reading secrets from files (support for Docker Secrets) + +It's possible to supply Grafana with configuration through files. This works well with [Docker Secrets](https://docs.docker.com/engine/swarm/secrets/) as the secrets by default gets mapped into `/run/secrets/` of the container. + +You can do this with any of the configuration options in conf/grafana.ini by setting `GF___FILE` to the path of the file holding the secret. + +Let's say you want to set the admin password this way. + +- Admin password secret: `/run/secrets/admin_password` +- Environment variable: `GF_SECURITY_ADMIN_PASSWORD_FILE=/run/secrets/admin_password` + + ## Migration from a previous version of the docker container to 5.1 or later The docker container for Grafana has seen a major rewrite for 5.1. From 7453df2662c569643e0d358c8e06ae99af89041e Mon Sep 17 00:00:00 2001 From: Marcus Efraimsson Date: Mon, 4 Jun 2018 11:57:13 +0200 Subject: [PATCH 150/488] changelog: add notes about closing #11167 [skip ci] --- CHANGELOG.md | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index ecbc99608c4..9eda912e86a 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -30,7 +30,8 @@ * **Singlestat**: Fix singlestat threshold tooltip [#11971](https://github.com/grafana/grafana/issues/11971) * **Dashboard**: Hide grid controls in fullscreen/low-activity views [#11771](https://github.com/grafana/grafana/issues/11771) * **Dashboard**: Validate uid when importing dashboards [#11515](https://github.com/grafana/grafana/issues/11515) -* **Docker**: Support for env variables ending with _FILE [grafana-docker #166](https://github.com/grafana/grafana-docker/pull/166), thx [@efrecon](https://github.com/efrecon) +* **Docker**: Support for env variables ending with _FILE [grafana-docker #166](https://github.com/grafana/grafana-docker/pull/166), thx [@efrecon](https://github.com/efrecon) +* **Alert list panel**: Show alerts for user with viewer role [#11167](https://github.com/grafana/grafana/issues/11167) # 5.1.3 (2018-05-16) From 08ee1da6b128b8a3191768448118aec2ed564ef2 Mon Sep 17 00:00:00 2001 From: David Kaltschmidt Date: Wed, 30 May 2018 11:29:44 +0200 Subject: [PATCH 151/488] InfluxDB IFQL datasource --- package.json | 1 + pkg/api/frontendsettings.go | 11 + pkg/models/datasource.go | 1 + public/app/core/table_model.ts | 4 + .../app/features/plugins/built_in_plugins.ts | 2 + .../datasource/influxdb-ifql/README.md | 26 ++ .../datasource/influxdb-ifql/datasource.ts | 255 +++++++++++++ .../influxdb-ifql/img/influxdb_logo.svg | 26 ++ .../datasource/influxdb-ifql/module.ts | 17 + .../partials/annotations.editor.html | 24 ++ .../influxdb-ifql/partials/config.html | 24 ++ .../influxdb-ifql/partials/query.editor.html | 24 ++ .../datasource/influxdb-ifql/plugin.json | 24 ++ .../datasource/influxdb-ifql/query_ctrl.ts | 17 + .../influxdb-ifql/response_parser.ts | 88 +++++ .../specs/response_parser.jest.ts | 63 ++++ .../specs/sample_response_csv.ts | 349 ++++++++++++++++++ yarn.lock | 4 + 18 files changed, 960 insertions(+) create mode 100644 public/app/plugins/datasource/influxdb-ifql/README.md create mode 100644 public/app/plugins/datasource/influxdb-ifql/datasource.ts create mode 100644 public/app/plugins/datasource/influxdb-ifql/img/influxdb_logo.svg create mode 100644 public/app/plugins/datasource/influxdb-ifql/module.ts create mode 100644 public/app/plugins/datasource/influxdb-ifql/partials/annotations.editor.html create mode 100644 public/app/plugins/datasource/influxdb-ifql/partials/config.html create mode 100644 public/app/plugins/datasource/influxdb-ifql/partials/query.editor.html create mode 100644 public/app/plugins/datasource/influxdb-ifql/plugin.json create mode 100644 public/app/plugins/datasource/influxdb-ifql/query_ctrl.ts create mode 100644 public/app/plugins/datasource/influxdb-ifql/response_parser.ts create mode 100644 public/app/plugins/datasource/influxdb-ifql/specs/response_parser.jest.ts create mode 100644 public/app/plugins/datasource/influxdb-ifql/specs/sample_response_csv.ts diff --git a/package.json b/package.json index df3da5812c1..5fd72357f6f 100644 --- a/package.json +++ b/package.json @@ -157,6 +157,7 @@ "moment": "^2.18.1", "mousetrap": "^1.6.0", "mousetrap-global-bind": "^1.1.0", + "papaparse": "^4.4.0", "prismjs": "^1.6.0", "prop-types": "^15.6.0", "react": "^16.2.0", diff --git a/pkg/api/frontendsettings.go b/pkg/api/frontendsettings.go index 5cd52122c3f..84524bad526 100644 --- a/pkg/api/frontendsettings.go +++ b/pkg/api/frontendsettings.go @@ -85,6 +85,13 @@ func getFrontendSettingsMap(c *m.ReqContext) (map[string]interface{}, error) { dsMap["database"] = ds.Database dsMap["url"] = url } + + if ds.Type == m.DS_INFLUXDB_IFQL { + dsMap["username"] = ds.User + dsMap["password"] = ds.Password + dsMap["database"] = ds.Database + dsMap["url"] = url + } } if ds.Type == m.DS_ES { @@ -95,6 +102,10 @@ func getFrontendSettingsMap(c *m.ReqContext) (map[string]interface{}, error) { dsMap["database"] = ds.Database } + if ds.Type == m.DS_INFLUXDB_IFQL { + dsMap["database"] = ds.Database + } + if ds.Type == m.DS_PROMETHEUS { // add unproxied server URL for link to Prometheus web UI dsMap["directUrl"] = ds.Url diff --git a/pkg/models/datasource.go b/pkg/models/datasource.go index b7e3e3eaa17..530f31242a9 100644 --- a/pkg/models/datasource.go +++ b/pkg/models/datasource.go @@ -12,6 +12,7 @@ const ( DS_GRAPHITE = "graphite" DS_INFLUXDB = "influxdb" DS_INFLUXDB_08 = "influxdb_08" + DS_INFLUXDB_IFQL = "influxdb-ifql" DS_ES = "elasticsearch" DS_OPENTSDB = "opentsdb" DS_CLOUDWATCH = "cloudwatch" diff --git a/public/app/core/table_model.ts b/public/app/core/table_model.ts index 57800b3e48d..5716aac2be6 100644 --- a/public/app/core/table_model.ts +++ b/public/app/core/table_model.ts @@ -44,4 +44,8 @@ export default class TableModel { this.columnMap[col.text] = col; } } + + addRow(row) { + this.rows.push(row); + } } diff --git a/public/app/features/plugins/built_in_plugins.ts b/public/app/features/plugins/built_in_plugins.ts index 6998321dd75..49be31e5474 100644 --- a/public/app/features/plugins/built_in_plugins.ts +++ b/public/app/features/plugins/built_in_plugins.ts @@ -4,6 +4,7 @@ import * as elasticsearchPlugin from 'app/plugins/datasource/elasticsearch/modul import * as opentsdbPlugin from 'app/plugins/datasource/opentsdb/module'; import * as grafanaPlugin from 'app/plugins/datasource/grafana/module'; import * as influxdbPlugin from 'app/plugins/datasource/influxdb/module'; +import * as influxdbIfqlPlugin from 'app/plugins/datasource/influxdb-ifql/module'; import * as mixedPlugin from 'app/plugins/datasource/mixed/module'; import * as mysqlPlugin from 'app/plugins/datasource/mysql/module'; import * as postgresPlugin from 'app/plugins/datasource/postgres/module'; @@ -30,6 +31,7 @@ const builtInPlugins = { 'app/plugins/datasource/opentsdb/module': opentsdbPlugin, 'app/plugins/datasource/grafana/module': grafanaPlugin, 'app/plugins/datasource/influxdb/module': influxdbPlugin, + 'app/plugins/datasource/influxdb-ifql/module': influxdbIfqlPlugin, 'app/plugins/datasource/mixed/module': mixedPlugin, 'app/plugins/datasource/mysql/module': mysqlPlugin, 'app/plugins/datasource/postgres/module': postgresPlugin, diff --git a/public/app/plugins/datasource/influxdb-ifql/README.md b/public/app/plugins/datasource/influxdb-ifql/README.md new file mode 100644 index 00000000000..91f82b2a89d --- /dev/null +++ b/public/app/plugins/datasource/influxdb-ifql/README.md @@ -0,0 +1,26 @@ +# InfluxDB (IFQL) Datasource [BETA] - Native Plugin + +Grafana ships with **built in** support for InfluxDB (>= 1.4.1). + +Use this datasource if you want to use IFQL to query your InfluxDB. +Feel free to run this datasource side-by-side with the non-IFQL datasource. +If you point both datasources to the same InfluxDB instance, you can switch query mode by switching the datasources. + +Read more about IFQL here: + +[https://github.com/influxdata/ifql](https://github.com/influxdata/ifql) + +Read more about InfluxDB here: + +[http://docs.grafana.org/datasources/influxdb/](http://docs.grafana.org/datasources/influxdb/) + +## Roadmap + +- Sync Grafana time ranges with `range()` +- Template variable expansion +- Syntax highlighting +- Tab completion (functions, values) +- Result helpers (result counts, table previews) +- Annotations support +- Alerting integration +- Explore UI integration diff --git a/public/app/plugins/datasource/influxdb-ifql/datasource.ts b/public/app/plugins/datasource/influxdb-ifql/datasource.ts new file mode 100644 index 00000000000..bd3cb7e2d5b --- /dev/null +++ b/public/app/plugins/datasource/influxdb-ifql/datasource.ts @@ -0,0 +1,255 @@ +import _ from 'lodash'; + +import * as dateMath from 'app/core/utils/datemath'; + +import { getTableModelFromResult, getTimeSeriesFromResult, parseResults } from './response_parser'; + +function serializeParams(params) { + if (!params) { + return ''; + } + + return _.reduce( + params, + (memo, value, key) => { + if (value === null || value === undefined) { + return memo; + } + memo.push(encodeURIComponent(key) + '=' + encodeURIComponent(value)); + return memo; + }, + [] + ).join('&'); +} + +const MAX_SERIES = 20; +export default class InfluxDatasource { + type: string; + urls: any; + username: string; + password: string; + name: string; + orgName: string; + database: any; + basicAuth: any; + withCredentials: any; + interval: any; + supportAnnotations: boolean; + supportMetrics: boolean; + + /** @ngInject */ + constructor(instanceSettings, private backendSrv, private templateSrv) { + this.type = 'influxdb-ifql'; + this.urls = instanceSettings.url.split(',').map(url => url.trim()); + + this.username = instanceSettings.username; + this.password = instanceSettings.password; + this.name = instanceSettings.name; + this.orgName = instanceSettings.orgName || 'defaultorgname'; + this.database = instanceSettings.database; + this.basicAuth = instanceSettings.basicAuth; + this.withCredentials = instanceSettings.withCredentials; + this.interval = (instanceSettings.jsonData || {}).timeInterval; + this.supportAnnotations = true; + this.supportMetrics = true; + } + + query(options) { + const targets = _.cloneDeep(options.targets); + const queryTargets = targets.filter(t => t.query); + if (queryTargets.length === 0) { + return Promise.resolve({ data: [] }); + } + + // replace grafana variables + const timeFilter = this.getTimeFilter(options); + options.scopedVars.timeFilter = { value: timeFilter }; + + const queries = queryTargets.map(target => { + const { query, resultFormat } = target; + + // TODO replace templated variables + // allQueries = this.templateSrv.replace(allQueries, scopedVars); + + if (resultFormat === 'table') { + return ( + this._seriesQuery(query, options) + .then(response => parseResults(response.data)) + // Keep only first result from each request + .then(results => results[0]) + .then(getTableModelFromResult) + ); + } else { + return this._seriesQuery(query, options) + .then(response => parseResults(response.data)) + .then(results => results.map(getTimeSeriesFromResult)); + } + }); + + return Promise.all(queries).then((series: any) => { + let seriesList = _.flattenDeep(series).slice(0, MAX_SERIES); + return { data: seriesList }; + }); + } + + annotationQuery(options) { + if (!options.annotation.query) { + return Promise.reject({ + message: 'Query missing in annotation definition', + }); + } + + var timeFilter = this.getTimeFilter({ rangeRaw: options.rangeRaw }); + var query = options.annotation.query.replace('$timeFilter', timeFilter); + query = this.templateSrv.replace(query, null, 'regex'); + + return {}; + } + + targetContainsTemplate(target) { + for (let group of target.groupBy) { + for (let param of group.params) { + if (this.templateSrv.variableExists(param)) { + return true; + } + } + } + + for (let i in target.tags) { + if (this.templateSrv.variableExists(target.tags[i].value)) { + return true; + } + } + + return false; + } + + metricFindQuery(query: string, options?: any) { + var interpolated = this.templateSrv.replace(query, null, 'regex'); + + return this._seriesQuery(interpolated, options).then(_.curry(parseResults)(query)); + } + + _seriesQuery(query: string, options?: any) { + if (!query) { + return Promise.resolve({ data: '' }); + } + return this._influxRequest('POST', '/v1/query', { q: query }, options); + } + + testDatasource() { + const query = `from(db:"${this.database}") |> last()`; + + return this._influxRequest('POST', '/v1/query', { q: query }) + .then(res => { + if (res && res.trim()) { + return { status: 'success', message: 'Data source connected and database found.' }; + } + return { + status: 'error', + message: + 'Data source connected, but has no data. Verify the "Database" field and make sure the database has data.', + }; + }) + .catch(err => { + return { status: 'error', message: err.message }; + }); + } + + _influxRequest(method: string, url: string, data: any, options?: any) { + // TODO reinstante Round-robin + // const currentUrl = this.urls.shift(); + // this.urls.push(currentUrl); + const currentUrl = this.urls[0]; + + let params: any = { + orgName: this.orgName, + }; + + if (this.username) { + params.u = this.username; + params.p = this.password; + } + + if (options && options.database) { + params.db = options.database; + } else if (this.database) { + params.db = this.database; + } + + // data sent as GET param + _.extend(params, data); + data = null; + + let req: any = { + method: method, + url: currentUrl + url, + params: params, + data: data, + precision: 'ms', + inspect: { type: this.type }, + paramSerializer: serializeParams, + }; + + req.headers = req.headers || {}; + if (this.basicAuth || this.withCredentials) { + req.withCredentials = true; + } + if (this.basicAuth) { + req.headers.Authorization = this.basicAuth; + } + + return this.backendSrv.datasourceRequest(req).then( + result => { + return result; + }, + function(err) { + if (err.status !== 0 || err.status >= 300) { + if (err.data && err.data.error) { + throw { + message: 'InfluxDB Error: ' + err.data.error, + data: err.data, + config: err.config, + }; + } else { + throw { + message: 'Network Error: ' + err.statusText + '(' + err.status + ')', + data: err.data, + config: err.config, + }; + } + } + } + ); + } + + getTimeFilter(options) { + var from = this.getInfluxTime(options.rangeRaw.from, false); + var until = this.getInfluxTime(options.rangeRaw.to, true); + var fromIsAbsolute = from[from.length - 1] === 'ms'; + + if (until === 'now()' && !fromIsAbsolute) { + return 'time >= ' + from; + } + + return 'time >= ' + from + ' and time <= ' + until; + } + + getInfluxTime(date, roundUp) { + if (_.isString(date)) { + if (date === 'now') { + return 'now()'; + } + + var parts = /^now-(\d+)([d|h|m|s])$/.exec(date); + if (parts) { + var amount = parseInt(parts[1]); + var unit = parts[2]; + return 'now() - ' + amount + unit; + } + date = dateMath.parse(date, roundUp); + } + + return date.valueOf() + 'ms'; + } +} diff --git a/public/app/plugins/datasource/influxdb-ifql/img/influxdb_logo.svg b/public/app/plugins/datasource/influxdb-ifql/img/influxdb_logo.svg new file mode 100644 index 00000000000..3c0e379e0d7 --- /dev/null +++ b/public/app/plugins/datasource/influxdb-ifql/img/influxdb_logo.svg @@ -0,0 +1,26 @@ + + + + + + diff --git a/public/app/plugins/datasource/influxdb-ifql/module.ts b/public/app/plugins/datasource/influxdb-ifql/module.ts new file mode 100644 index 00000000000..5997a7d061b --- /dev/null +++ b/public/app/plugins/datasource/influxdb-ifql/module.ts @@ -0,0 +1,17 @@ +import InfluxDatasource from './datasource'; +import { InfluxQueryCtrl } from './query_ctrl'; + +class InfluxConfigCtrl { + static templateUrl = 'partials/config.html'; +} + +class InfluxAnnotationsQueryCtrl { + static templateUrl = 'partials/annotations.editor.html'; +} + +export { + InfluxDatasource as Datasource, + InfluxQueryCtrl as QueryCtrl, + InfluxConfigCtrl as ConfigCtrl, + InfluxAnnotationsQueryCtrl as AnnotationsQueryCtrl, +}; diff --git a/public/app/plugins/datasource/influxdb-ifql/partials/annotations.editor.html b/public/app/plugins/datasource/influxdb-ifql/partials/annotations.editor.html new file mode 100644 index 00000000000..48991426c1e --- /dev/null +++ b/public/app/plugins/datasource/influxdb-ifql/partials/annotations.editor.html @@ -0,0 +1,24 @@ + +
+
+ +
+
+ +
Field mappings If your influxdb query returns more than one field you need to specify the column names below. An annotation event is composed of a title, tags, and an additional text field.
+
+
+
+ Text + +
+
+ Tags + +
+
+ Title (deprecated) + +
+
+
diff --git a/public/app/plugins/datasource/influxdb-ifql/partials/config.html b/public/app/plugins/datasource/influxdb-ifql/partials/config.html new file mode 100644 index 00000000000..be6f0438efd --- /dev/null +++ b/public/app/plugins/datasource/influxdb-ifql/partials/config.html @@ -0,0 +1,24 @@ + + + +

InfluxDB Details

+ +
+
+
+ Default Database + +
+
+ +
+
+ User + +
+
+ Password + +
+
+
\ No newline at end of file diff --git a/public/app/plugins/datasource/influxdb-ifql/partials/query.editor.html b/public/app/plugins/datasource/influxdb-ifql/partials/query.editor.html new file mode 100644 index 00000000000..31f5923cdb2 --- /dev/null +++ b/public/app/plugins/datasource/influxdb-ifql/partials/query.editor.html @@ -0,0 +1,24 @@ + + +
+ +
+
+
+ +
+ +
+
+
+ + +
+
+
+
+
+ +
\ No newline at end of file diff --git a/public/app/plugins/datasource/influxdb-ifql/plugin.json b/public/app/plugins/datasource/influxdb-ifql/plugin.json new file mode 100644 index 00000000000..b4eb764d556 --- /dev/null +++ b/public/app/plugins/datasource/influxdb-ifql/plugin.json @@ -0,0 +1,24 @@ +{ + "type": "datasource", + "name": "InfluxDB (IFQL) [BETA]", + "id": "influxdb-ifql", + "defaultMatchFormat": "regex values", + "metrics": true, + "annotations": false, + "alerting": false, + "queryOptions": { + "minInterval": true + }, + "info": { + "description": "InfluxDB Data Source for IFQL Queries for Grafana", + "author": { + "name": "Grafana Project", + "url": "https://grafana.com" + }, + "logos": { + "small": "img/influxdb_logo.svg", + "large": "img/influxdb_logo.svg" + }, + "version": "5.1.0" + } +} \ No newline at end of file diff --git a/public/app/plugins/datasource/influxdb-ifql/query_ctrl.ts b/public/app/plugins/datasource/influxdb-ifql/query_ctrl.ts new file mode 100644 index 00000000000..950a3feb58e --- /dev/null +++ b/public/app/plugins/datasource/influxdb-ifql/query_ctrl.ts @@ -0,0 +1,17 @@ +import { QueryCtrl } from 'app/plugins/sdk'; + +export class InfluxQueryCtrl extends QueryCtrl { + static templateUrl = 'partials/query.editor.html'; + + resultFormats: any[]; + + /** @ngInject **/ + constructor($scope, $injector) { + super($scope, $injector); + this.resultFormats = [{ text: 'Time series', value: 'time_series' }, { text: 'Table', value: 'table' }]; + } + + getCollapsedText() { + return this.target.query; + } +} diff --git a/public/app/plugins/datasource/influxdb-ifql/response_parser.ts b/public/app/plugins/datasource/influxdb-ifql/response_parser.ts new file mode 100644 index 00000000000..e2ef753392c --- /dev/null +++ b/public/app/plugins/datasource/influxdb-ifql/response_parser.ts @@ -0,0 +1,88 @@ +import Papa from 'papaparse'; +import groupBy from 'lodash/groupBy'; + +import TableModel from 'app/core/table_model'; + +const filterColumnKeys = key => key && key[0] !== '_' && key !== 'result' && key !== 'table'; + +const IGNORE_FIELDS_FOR_NAME = ['result', '', 'table']; +export const getNameFromRecord = record => { + // Measurement and field + const metric = [record._measurement, record._field]; + + // Add tags + const tags = Object.keys(record) + .filter(key => key[0] !== '_') + .filter(key => IGNORE_FIELDS_FOR_NAME.indexOf(key) === -1) + .map(key => `${key}=${record[key]}`); + + return [...metric, ...tags].join(' '); +}; + +const parseCSV = (input: string) => + Papa.parse(input, { + header: true, + comments: '#', + }).data; + +export const parseValue = (input: string) => { + const value = parseFloat(input); + return isNaN(value) ? null : value; +}; + +export const parseTime = (input: string) => Date.parse(input); + +export function parseResults(response: string): any[] { + return response.trim().split(/\n\s*\s/); +} + +export function getTableModelFromResult(result: string) { + const data = parseCSV(result); + + const table = new TableModel(); + if (data.length > 0) { + // First columns are fixed + const firstColumns = [ + { text: 'Time', id: '_time' }, + { text: 'Measurement', id: '_measurement' }, + { text: 'Field', id: '_field' }, + ]; + + // Dynamically add columns for tags + const firstRecord = data[0]; + const tags = Object.keys(firstRecord) + .filter(filterColumnKeys) + .map(key => ({ id: key, text: key })); + + const valueColumn = { id: '_value', text: 'Value' }; + const columns = [...firstColumns, ...tags, valueColumn]; + columns.forEach(c => table.addColumn(c)); + + // Add rows + data.forEach(record => { + const row = columns.map(c => record[c.id]); + table.addRow(row); + }); + } + + return table; +} + +export function getTimeSeriesFromResult(result: string) { + const data = parseCSV(result); + if (data.length === 0) { + return []; + } + + // Group results by table ID (assume one table per timeseries for now) + const tables = groupBy(data, 'table'); + const seriesList = Object.keys(tables) + .map(id => tables[id]) + .map(series => { + const datapoints = series.map(record => [parseValue(record._value), parseTime(record._time)]); + const alias = getNameFromRecord(series[0]); + return { datapoints, target: alias }; + }); + + return seriesList; +} diff --git a/public/app/plugins/datasource/influxdb-ifql/specs/response_parser.jest.ts b/public/app/plugins/datasource/influxdb-ifql/specs/response_parser.jest.ts new file mode 100644 index 00000000000..bac154c0760 --- /dev/null +++ b/public/app/plugins/datasource/influxdb-ifql/specs/response_parser.jest.ts @@ -0,0 +1,63 @@ +import { + getNameFromRecord, + getTableModelFromResult, + getTimeSeriesFromResult, + parseResults, + parseValue, +} from '../response_parser'; +import response from './sample_response_csv'; + +describe('influxdb ifql response parser', () => { + describe('parseResults()', () => { + it('expects three results', () => { + const results = parseResults(response); + expect(results.length).toBe(2); + }); + }); + + describe('getTableModelFromResult()', () => { + it('expects a table model', () => { + const results = parseResults(response); + const table = getTableModelFromResult(results[0]); + expect(table.columns.length).toBe(6); + expect(table.rows.length).toBe(300); + }); + }); + + describe('getTimeSeriesFromResult()', () => { + it('expects time series', () => { + const results = parseResults(response); + const series = getTimeSeriesFromResult(results[0]); + expect(series.length).toBe(50); + expect(series[0].datapoints.length).toBe(6); + }); + }); + + describe('getNameFromRecord()', () => { + it('expects name based on measurements and tags', () => { + const record = { + '': '', + result: '', + table: '0', + _start: '2018-06-02T06:35:25.651942602Z', + _stop: '2018-06-02T07:35:25.651942602Z', + _time: '2018-06-02T06:35:31Z', + _value: '0', + _field: 'usage_guest', + _measurement: 'cpu', + cpu: 'cpu-total', + host: 'kenobi-3.local', + }; + expect(getNameFromRecord(record)).toBe('cpu usage_guest cpu=cpu-total host=kenobi-3.local'); + }); + }); + + describe('parseValue()', () => { + it('parses a number', () => { + expect(parseValue('42.3')).toBe(42.3); + }); + it('parses a non-number to null', () => { + expect(parseValue('foo')).toBe(null); + }); + }); +}); diff --git a/public/app/plugins/datasource/influxdb-ifql/specs/sample_response_csv.ts b/public/app/plugins/datasource/influxdb-ifql/specs/sample_response_csv.ts new file mode 100644 index 00000000000..2c7c0194684 --- /dev/null +++ b/public/app/plugins/datasource/influxdb-ifql/specs/sample_response_csv.ts @@ -0,0 +1,349 @@ +const result = `#datatype,string,long,dateTime:RFC3339,dateTime:RFC3339,dateTime:RFC3339,double,string,string,string,string +#partition,false,false,true,true,false,false,true,true,true,true +#default,_result,,,,,,,,, +,result,table,_start,_stop,_time,_value,_field,_measurement,cpu,host +,,0,2018-06-01T12:54:13.516195939Z,2018-06-01T12:55:13.516195939Z,2018-06-01T12:54:23Z,0,usage_guest,cpu,cpu-total,kenobi-3.local +,,0,2018-06-01T12:54:13.516195939Z,2018-06-01T12:55:13.516195939Z,2018-06-01T12:54:33Z,0,usage_guest,cpu,cpu-total,kenobi-3.local +,,0,2018-06-01T12:54:13.516195939Z,2018-06-01T12:55:13.516195939Z,2018-06-01T12:54:43Z,0,usage_guest,cpu,cpu-total,kenobi-3.local +,,0,2018-06-01T12:54:13.516195939Z,2018-06-01T12:55:13.516195939Z,2018-06-01T12:54:53Z,0,usage_guest,cpu,cpu-total,kenobi-3.local +,,0,2018-06-01T12:54:13.516195939Z,2018-06-01T12:55:13.516195939Z,2018-06-01T12:55:03Z,0,usage_guest,cpu,cpu-total,kenobi-3.local +,,0,2018-06-01T12:54:13.516195939Z,2018-06-01T12:55:13.516195939Z,2018-06-01T12:55:13Z,0,usage_guest,cpu,cpu-total,kenobi-3.local +,,1,2018-06-01T12:54:13.516195939Z,2018-06-01T12:55:13.516195939Z,2018-06-01T12:54:23Z,0,usage_guest_nice,cpu,cpu-total,kenobi-3.local +,,1,2018-06-01T12:54:13.516195939Z,2018-06-01T12:55:13.516195939Z,2018-06-01T12:54:33Z,0,usage_guest_nice,cpu,cpu-total,kenobi-3.local +,,1,2018-06-01T12:54:13.516195939Z,2018-06-01T12:55:13.516195939Z,2018-06-01T12:54:43Z,0,usage_guest_nice,cpu,cpu-total,kenobi-3.local +,,1,2018-06-01T12:54:13.516195939Z,2018-06-01T12:55:13.516195939Z,2018-06-01T12:54:53Z,0,usage_guest_nice,cpu,cpu-total,kenobi-3.local +,,1,2018-06-01T12:54:13.516195939Z,2018-06-01T12:55:13.516195939Z,2018-06-01T12:55:03Z,0,usage_guest_nice,cpu,cpu-total,kenobi-3.local +,,1,2018-06-01T12:54:13.516195939Z,2018-06-01T12:55:13.516195939Z,2018-06-01T12:55:13Z,0,usage_guest_nice,cpu,cpu-total,kenobi-3.local +,,2,2018-06-01T12:54:13.516195939Z,2018-06-01T12:55:13.516195939Z,2018-06-01T12:54:23Z,81.87046761690422,usage_idle,cpu,cpu-total,kenobi-3.local +,,2,2018-06-01T12:54:13.516195939Z,2018-06-01T12:55:13.516195939Z,2018-06-01T12:54:33Z,82.03398300849575,usage_idle,cpu,cpu-total,kenobi-3.local +,,2,2018-06-01T12:54:13.516195939Z,2018-06-01T12:55:13.516195939Z,2018-06-01T12:54:43Z,76.26186906546727,usage_idle,cpu,cpu-total,kenobi-3.local +,,2,2018-06-01T12:54:13.516195939Z,2018-06-01T12:55:13.516195939Z,2018-06-01T12:54:53Z,79.65465465465465,usage_idle,cpu,cpu-total,kenobi-3.local +,,2,2018-06-01T12:54:13.516195939Z,2018-06-01T12:55:13.516195939Z,2018-06-01T12:55:03Z,70.72195853110168,usage_idle,cpu,cpu-total,kenobi-3.local +,,2,2018-06-01T12:54:13.516195939Z,2018-06-01T12:55:13.516195939Z,2018-06-01T12:55:13Z,69.86746686671668,usage_idle,cpu,cpu-total,kenobi-3.local +,,3,2018-06-01T12:54:13.516195939Z,2018-06-01T12:55:13.516195939Z,2018-06-01T12:54:23Z,0,usage_iowait,cpu,cpu-total,kenobi-3.local +,,3,2018-06-01T12:54:13.516195939Z,2018-06-01T12:55:13.516195939Z,2018-06-01T12:54:33Z,0,usage_iowait,cpu,cpu-total,kenobi-3.local +,,3,2018-06-01T12:54:13.516195939Z,2018-06-01T12:55:13.516195939Z,2018-06-01T12:54:43Z,0,usage_iowait,cpu,cpu-total,kenobi-3.local +,,3,2018-06-01T12:54:13.516195939Z,2018-06-01T12:55:13.516195939Z,2018-06-01T12:54:53Z,0,usage_iowait,cpu,cpu-total,kenobi-3.local +,,3,2018-06-01T12:54:13.516195939Z,2018-06-01T12:55:13.516195939Z,2018-06-01T12:55:03Z,0,usage_iowait,cpu,cpu-total,kenobi-3.local +,,3,2018-06-01T12:54:13.516195939Z,2018-06-01T12:55:13.516195939Z,2018-06-01T12:55:13Z,0,usage_iowait,cpu,cpu-total,kenobi-3.local +,,4,2018-06-01T12:54:13.516195939Z,2018-06-01T12:55:13.516195939Z,2018-06-01T12:54:23Z,0,usage_irq,cpu,cpu-total,kenobi-3.local +,,4,2018-06-01T12:54:13.516195939Z,2018-06-01T12:55:13.516195939Z,2018-06-01T12:54:33Z,0,usage_irq,cpu,cpu-total,kenobi-3.local +,,4,2018-06-01T12:54:13.516195939Z,2018-06-01T12:55:13.516195939Z,2018-06-01T12:54:43Z,0,usage_irq,cpu,cpu-total,kenobi-3.local +,,4,2018-06-01T12:54:13.516195939Z,2018-06-01T12:55:13.516195939Z,2018-06-01T12:54:53Z,0,usage_irq,cpu,cpu-total,kenobi-3.local +,,4,2018-06-01T12:54:13.516195939Z,2018-06-01T12:55:13.516195939Z,2018-06-01T12:55:03Z,0,usage_irq,cpu,cpu-total,kenobi-3.local +,,4,2018-06-01T12:54:13.516195939Z,2018-06-01T12:55:13.516195939Z,2018-06-01T12:55:13Z,0,usage_irq,cpu,cpu-total,kenobi-3.local +,,5,2018-06-01T12:54:13.516195939Z,2018-06-01T12:55:13.516195939Z,2018-06-01T12:54:23Z,0,usage_nice,cpu,cpu-total,kenobi-3.local +,,5,2018-06-01T12:54:13.516195939Z,2018-06-01T12:55:13.516195939Z,2018-06-01T12:54:33Z,0,usage_nice,cpu,cpu-total,kenobi-3.local +,,5,2018-06-01T12:54:13.516195939Z,2018-06-01T12:55:13.516195939Z,2018-06-01T12:54:43Z,0,usage_nice,cpu,cpu-total,kenobi-3.local +,,5,2018-06-01T12:54:13.516195939Z,2018-06-01T12:55:13.516195939Z,2018-06-01T12:54:53Z,0,usage_nice,cpu,cpu-total,kenobi-3.local +,,5,2018-06-01T12:54:13.516195939Z,2018-06-01T12:55:13.516195939Z,2018-06-01T12:55:03Z,0,usage_nice,cpu,cpu-total,kenobi-3.local +,,5,2018-06-01T12:54:13.516195939Z,2018-06-01T12:55:13.516195939Z,2018-06-01T12:55:13Z,0,usage_nice,cpu,cpu-total,kenobi-3.local +,,6,2018-06-01T12:54:13.516195939Z,2018-06-01T12:55:13.516195939Z,2018-06-01T12:54:23Z,0,usage_softirq,cpu,cpu-total,kenobi-3.local +,,6,2018-06-01T12:54:13.516195939Z,2018-06-01T12:55:13.516195939Z,2018-06-01T12:54:33Z,0,usage_softirq,cpu,cpu-total,kenobi-3.local +,,6,2018-06-01T12:54:13.516195939Z,2018-06-01T12:55:13.516195939Z,2018-06-01T12:54:43Z,0,usage_softirq,cpu,cpu-total,kenobi-3.local +,,6,2018-06-01T12:54:13.516195939Z,2018-06-01T12:55:13.516195939Z,2018-06-01T12:54:53Z,0,usage_softirq,cpu,cpu-total,kenobi-3.local +,,6,2018-06-01T12:54:13.516195939Z,2018-06-01T12:55:13.516195939Z,2018-06-01T12:55:03Z,0,usage_softirq,cpu,cpu-total,kenobi-3.local +,,6,2018-06-01T12:54:13.516195939Z,2018-06-01T12:55:13.516195939Z,2018-06-01T12:55:13Z,0,usage_softirq,cpu,cpu-total,kenobi-3.local +,,7,2018-06-01T12:54:13.516195939Z,2018-06-01T12:55:13.516195939Z,2018-06-01T12:54:23Z,0,usage_steal,cpu,cpu-total,kenobi-3.local +,,7,2018-06-01T12:54:13.516195939Z,2018-06-01T12:55:13.516195939Z,2018-06-01T12:54:33Z,0,usage_steal,cpu,cpu-total,kenobi-3.local +,,7,2018-06-01T12:54:13.516195939Z,2018-06-01T12:55:13.516195939Z,2018-06-01T12:54:43Z,0,usage_steal,cpu,cpu-total,kenobi-3.local +,,7,2018-06-01T12:54:13.516195939Z,2018-06-01T12:55:13.516195939Z,2018-06-01T12:54:53Z,0,usage_steal,cpu,cpu-total,kenobi-3.local +,,7,2018-06-01T12:54:13.516195939Z,2018-06-01T12:55:13.516195939Z,2018-06-01T12:55:03Z,0,usage_steal,cpu,cpu-total,kenobi-3.local +,,7,2018-06-01T12:54:13.516195939Z,2018-06-01T12:55:13.516195939Z,2018-06-01T12:55:13Z,0,usage_steal,cpu,cpu-total,kenobi-3.local +,,8,2018-06-01T12:54:13.516195939Z,2018-06-01T12:55:13.516195939Z,2018-06-01T12:54:23Z,6.25156289072268,usage_system,cpu,cpu-total,kenobi-3.local +,,8,2018-06-01T12:54:13.516195939Z,2018-06-01T12:55:13.516195939Z,2018-06-01T12:54:33Z,8.045977011494253,usage_system,cpu,cpu-total,kenobi-3.local +,,8,2018-06-01T12:54:13.516195939Z,2018-06-01T12:55:13.516195939Z,2018-06-01T12:54:43Z,8.79560219890055,usage_system,cpu,cpu-total,kenobi-3.local +,,8,2018-06-01T12:54:13.516195939Z,2018-06-01T12:55:13.516195939Z,2018-06-01T12:54:53Z,8.408408408408409,usage_system,cpu,cpu-total,kenobi-3.local +,,8,2018-06-01T12:54:13.516195939Z,2018-06-01T12:55:13.516195939Z,2018-06-01T12:55:03Z,11.64126904821384,usage_system,cpu,cpu-total,kenobi-3.local +,,8,2018-06-01T12:54:13.516195939Z,2018-06-01T12:55:13.516195939Z,2018-06-01T12:55:13Z,13.078269567391848,usage_system,cpu,cpu-total,kenobi-3.local +,,9,2018-06-01T12:54:13.516195939Z,2018-06-01T12:55:13.516195939Z,2018-06-01T12:54:23Z,11.877969492373094,usage_user,cpu,cpu-total,kenobi-3.local +,,9,2018-06-01T12:54:13.516195939Z,2018-06-01T12:55:13.516195939Z,2018-06-01T12:54:33Z,9.920039980009996,usage_user,cpu,cpu-total,kenobi-3.local +,,9,2018-06-01T12:54:13.516195939Z,2018-06-01T12:55:13.516195939Z,2018-06-01T12:54:43Z,14.942528735632184,usage_user,cpu,cpu-total,kenobi-3.local +,,9,2018-06-01T12:54:13.516195939Z,2018-06-01T12:55:13.516195939Z,2018-06-01T12:54:53Z,11.936936936936936,usage_user,cpu,cpu-total,kenobi-3.local +,,9,2018-06-01T12:54:13.516195939Z,2018-06-01T12:55:13.516195939Z,2018-06-01T12:55:03Z,17.636772420684487,usage_user,cpu,cpu-total,kenobi-3.local +,,9,2018-06-01T12:54:13.516195939Z,2018-06-01T12:55:13.516195939Z,2018-06-01T12:55:13Z,17.05426356589147,usage_user,cpu,cpu-total,kenobi-3.local +,,10,2018-06-01T12:54:13.516195939Z,2018-06-01T12:55:13.516195939Z,2018-06-01T12:54:23Z,0,usage_guest,cpu,cpu0,kenobi-3.local +,,10,2018-06-01T12:54:13.516195939Z,2018-06-01T12:55:13.516195939Z,2018-06-01T12:54:33Z,0,usage_guest,cpu,cpu0,kenobi-3.local +,,10,2018-06-01T12:54:13.516195939Z,2018-06-01T12:55:13.516195939Z,2018-06-01T12:54:43Z,0,usage_guest,cpu,cpu0,kenobi-3.local +,,10,2018-06-01T12:54:13.516195939Z,2018-06-01T12:55:13.516195939Z,2018-06-01T12:54:53Z,0,usage_guest,cpu,cpu0,kenobi-3.local +,,10,2018-06-01T12:54:13.516195939Z,2018-06-01T12:55:13.516195939Z,2018-06-01T12:55:03Z,0,usage_guest,cpu,cpu0,kenobi-3.local +,,10,2018-06-01T12:54:13.516195939Z,2018-06-01T12:55:13.516195939Z,2018-06-01T12:55:13Z,0,usage_guest,cpu,cpu0,kenobi-3.local +,,11,2018-06-01T12:54:13.516195939Z,2018-06-01T12:55:13.516195939Z,2018-06-01T12:54:23Z,0,usage_guest_nice,cpu,cpu0,kenobi-3.local +,,11,2018-06-01T12:54:13.516195939Z,2018-06-01T12:55:13.516195939Z,2018-06-01T12:54:33Z,0,usage_guest_nice,cpu,cpu0,kenobi-3.local +,,11,2018-06-01T12:54:13.516195939Z,2018-06-01T12:55:13.516195939Z,2018-06-01T12:54:43Z,0,usage_guest_nice,cpu,cpu0,kenobi-3.local +,,11,2018-06-01T12:54:13.516195939Z,2018-06-01T12:55:13.516195939Z,2018-06-01T12:54:53Z,0,usage_guest_nice,cpu,cpu0,kenobi-3.local +,,11,2018-06-01T12:54:13.516195939Z,2018-06-01T12:55:13.516195939Z,2018-06-01T12:55:03Z,0,usage_guest_nice,cpu,cpu0,kenobi-3.local +,,11,2018-06-01T12:54:13.516195939Z,2018-06-01T12:55:13.516195939Z,2018-06-01T12:55:13Z,0,usage_guest_nice,cpu,cpu0,kenobi-3.local +,,12,2018-06-01T12:54:13.516195939Z,2018-06-01T12:55:13.516195939Z,2018-06-01T12:54:23Z,73.1,usage_idle,cpu,cpu0,kenobi-3.local +,,12,2018-06-01T12:54:13.516195939Z,2018-06-01T12:55:13.516195939Z,2018-06-01T12:54:33Z,69.03096903096903,usage_idle,cpu,cpu0,kenobi-3.local +,,12,2018-06-01T12:54:13.516195939Z,2018-06-01T12:55:13.516195939Z,2018-06-01T12:54:43Z,63.63636363636363,usage_idle,cpu,cpu0,kenobi-3.local +,,12,2018-06-01T12:54:13.516195939Z,2018-06-01T12:55:13.516195939Z,2018-06-01T12:54:53Z,67.86786786786787,usage_idle,cpu,cpu0,kenobi-3.local +,,12,2018-06-01T12:54:13.516195939Z,2018-06-01T12:55:13.516195939Z,2018-06-01T12:55:03Z,57.4,usage_idle,cpu,cpu0,kenobi-3.local +,,12,2018-06-01T12:54:13.516195939Z,2018-06-01T12:55:13.516195939Z,2018-06-01T12:55:13Z,57.8,usage_idle,cpu,cpu0,kenobi-3.local +,,13,2018-06-01T12:54:13.516195939Z,2018-06-01T12:55:13.516195939Z,2018-06-01T12:54:23Z,0,usage_iowait,cpu,cpu0,kenobi-3.local +,,13,2018-06-01T12:54:13.516195939Z,2018-06-01T12:55:13.516195939Z,2018-06-01T12:54:33Z,0,usage_iowait,cpu,cpu0,kenobi-3.local +,,13,2018-06-01T12:54:13.516195939Z,2018-06-01T12:55:13.516195939Z,2018-06-01T12:54:43Z,0,usage_iowait,cpu,cpu0,kenobi-3.local +,,13,2018-06-01T12:54:13.516195939Z,2018-06-01T12:55:13.516195939Z,2018-06-01T12:54:53Z,0,usage_iowait,cpu,cpu0,kenobi-3.local +,,13,2018-06-01T12:54:13.516195939Z,2018-06-01T12:55:13.516195939Z,2018-06-01T12:55:03Z,0,usage_iowait,cpu,cpu0,kenobi-3.local +,,13,2018-06-01T12:54:13.516195939Z,2018-06-01T12:55:13.516195939Z,2018-06-01T12:55:13Z,0,usage_iowait,cpu,cpu0,kenobi-3.local +,,14,2018-06-01T12:54:13.516195939Z,2018-06-01T12:55:13.516195939Z,2018-06-01T12:54:23Z,0,usage_irq,cpu,cpu0,kenobi-3.local +,,14,2018-06-01T12:54:13.516195939Z,2018-06-01T12:55:13.516195939Z,2018-06-01T12:54:33Z,0,usage_irq,cpu,cpu0,kenobi-3.local +,,14,2018-06-01T12:54:13.516195939Z,2018-06-01T12:55:13.516195939Z,2018-06-01T12:54:43Z,0,usage_irq,cpu,cpu0,kenobi-3.local +,,14,2018-06-01T12:54:13.516195939Z,2018-06-01T12:55:13.516195939Z,2018-06-01T12:54:53Z,0,usage_irq,cpu,cpu0,kenobi-3.local +,,14,2018-06-01T12:54:13.516195939Z,2018-06-01T12:55:13.516195939Z,2018-06-01T12:55:03Z,0,usage_irq,cpu,cpu0,kenobi-3.local +,,14,2018-06-01T12:54:13.516195939Z,2018-06-01T12:55:13.516195939Z,2018-06-01T12:55:13Z,0,usage_irq,cpu,cpu0,kenobi-3.local +,,15,2018-06-01T12:54:13.516195939Z,2018-06-01T12:55:13.516195939Z,2018-06-01T12:54:23Z,0,usage_nice,cpu,cpu0,kenobi-3.local +,,15,2018-06-01T12:54:13.516195939Z,2018-06-01T12:55:13.516195939Z,2018-06-01T12:54:33Z,0,usage_nice,cpu,cpu0,kenobi-3.local +,,15,2018-06-01T12:54:13.516195939Z,2018-06-01T12:55:13.516195939Z,2018-06-01T12:54:43Z,0,usage_nice,cpu,cpu0,kenobi-3.local +,,15,2018-06-01T12:54:13.516195939Z,2018-06-01T12:55:13.516195939Z,2018-06-01T12:54:53Z,0,usage_nice,cpu,cpu0,kenobi-3.local +,,15,2018-06-01T12:54:13.516195939Z,2018-06-01T12:55:13.516195939Z,2018-06-01T12:55:03Z,0,usage_nice,cpu,cpu0,kenobi-3.local +,,15,2018-06-01T12:54:13.516195939Z,2018-06-01T12:55:13.516195939Z,2018-06-01T12:55:13Z,0,usage_nice,cpu,cpu0,kenobi-3.local +,,16,2018-06-01T12:54:13.516195939Z,2018-06-01T12:55:13.516195939Z,2018-06-01T12:54:23Z,0,usage_softirq,cpu,cpu0,kenobi-3.local +,,16,2018-06-01T12:54:13.516195939Z,2018-06-01T12:55:13.516195939Z,2018-06-01T12:54:33Z,0,usage_softirq,cpu,cpu0,kenobi-3.local +,,16,2018-06-01T12:54:13.516195939Z,2018-06-01T12:55:13.516195939Z,2018-06-01T12:54:43Z,0,usage_softirq,cpu,cpu0,kenobi-3.local +,,16,2018-06-01T12:54:13.516195939Z,2018-06-01T12:55:13.516195939Z,2018-06-01T12:54:53Z,0,usage_softirq,cpu,cpu0,kenobi-3.local +,,16,2018-06-01T12:54:13.516195939Z,2018-06-01T12:55:13.516195939Z,2018-06-01T12:55:03Z,0,usage_softirq,cpu,cpu0,kenobi-3.local +,,16,2018-06-01T12:54:13.516195939Z,2018-06-01T12:55:13.516195939Z,2018-06-01T12:55:13Z,0,usage_softirq,cpu,cpu0,kenobi-3.local +,,17,2018-06-01T12:54:13.516195939Z,2018-06-01T12:55:13.516195939Z,2018-06-01T12:54:23Z,0,usage_steal,cpu,cpu0,kenobi-3.local +,,17,2018-06-01T12:54:13.516195939Z,2018-06-01T12:55:13.516195939Z,2018-06-01T12:54:33Z,0,usage_steal,cpu,cpu0,kenobi-3.local +,,17,2018-06-01T12:54:13.516195939Z,2018-06-01T12:55:13.516195939Z,2018-06-01T12:54:43Z,0,usage_steal,cpu,cpu0,kenobi-3.local +,,17,2018-06-01T12:54:13.516195939Z,2018-06-01T12:55:13.516195939Z,2018-06-01T12:54:53Z,0,usage_steal,cpu,cpu0,kenobi-3.local +,,17,2018-06-01T12:54:13.516195939Z,2018-06-01T12:55:13.516195939Z,2018-06-01T12:55:03Z,0,usage_steal,cpu,cpu0,kenobi-3.local +,,17,2018-06-01T12:54:13.516195939Z,2018-06-01T12:55:13.516195939Z,2018-06-01T12:55:13Z,0,usage_steal,cpu,cpu0,kenobi-3.local +,,18,2018-06-01T12:54:13.516195939Z,2018-06-01T12:55:13.516195939Z,2018-06-01T12:54:23Z,9.6,usage_system,cpu,cpu0,kenobi-3.local +,,18,2018-06-01T12:54:13.516195939Z,2018-06-01T12:55:13.516195939Z,2018-06-01T12:54:33Z,14.985014985014985,usage_system,cpu,cpu0,kenobi-3.local +,,18,2018-06-01T12:54:13.516195939Z,2018-06-01T12:55:13.516195939Z,2018-06-01T12:54:43Z,14.185814185814186,usage_system,cpu,cpu0,kenobi-3.local +,,18,2018-06-01T12:54:13.516195939Z,2018-06-01T12:55:13.516195939Z,2018-06-01T12:54:53Z,13.813813813813814,usage_system,cpu,cpu0,kenobi-3.local +,,18,2018-06-01T12:54:13.516195939Z,2018-06-01T12:55:13.516195939Z,2018-06-01T12:55:03Z,17.9,usage_system,cpu,cpu0,kenobi-3.local +,,18,2018-06-01T12:54:13.516195939Z,2018-06-01T12:55:13.516195939Z,2018-06-01T12:55:13Z,20,usage_system,cpu,cpu0,kenobi-3.local +,,19,2018-06-01T12:54:13.516195939Z,2018-06-01T12:55:13.516195939Z,2018-06-01T12:54:23Z,17.3,usage_user,cpu,cpu0,kenobi-3.local +,,19,2018-06-01T12:54:13.516195939Z,2018-06-01T12:55:13.516195939Z,2018-06-01T12:54:33Z,15.984015984015985,usage_user,cpu,cpu0,kenobi-3.local +,,19,2018-06-01T12:54:13.516195939Z,2018-06-01T12:55:13.516195939Z,2018-06-01T12:54:43Z,22.17782217782218,usage_user,cpu,cpu0,kenobi-3.local +,,19,2018-06-01T12:54:13.516195939Z,2018-06-01T12:55:13.516195939Z,2018-06-01T12:54:53Z,18.31831831831832,usage_user,cpu,cpu0,kenobi-3.local +,,19,2018-06-01T12:54:13.516195939Z,2018-06-01T12:55:13.516195939Z,2018-06-01T12:55:03Z,24.7,usage_user,cpu,cpu0,kenobi-3.local +,,19,2018-06-01T12:54:13.516195939Z,2018-06-01T12:55:13.516195939Z,2018-06-01T12:55:13Z,22.2,usage_user,cpu,cpu0,kenobi-3.local +,,20,2018-06-01T12:54:13.516195939Z,2018-06-01T12:55:13.516195939Z,2018-06-01T12:54:23Z,0,usage_guest,cpu,cpu1,kenobi-3.local +,,20,2018-06-01T12:54:13.516195939Z,2018-06-01T12:55:13.516195939Z,2018-06-01T12:54:33Z,0,usage_guest,cpu,cpu1,kenobi-3.local +,,20,2018-06-01T12:54:13.516195939Z,2018-06-01T12:55:13.516195939Z,2018-06-01T12:54:43Z,0,usage_guest,cpu,cpu1,kenobi-3.local +,,20,2018-06-01T12:54:13.516195939Z,2018-06-01T12:55:13.516195939Z,2018-06-01T12:54:53Z,0,usage_guest,cpu,cpu1,kenobi-3.local +,,20,2018-06-01T12:54:13.516195939Z,2018-06-01T12:55:13.516195939Z,2018-06-01T12:55:03Z,0,usage_guest,cpu,cpu1,kenobi-3.local +,,20,2018-06-01T12:54:13.516195939Z,2018-06-01T12:55:13.516195939Z,2018-06-01T12:55:13Z,0,usage_guest,cpu,cpu1,kenobi-3.local +,,21,2018-06-01T12:54:13.516195939Z,2018-06-01T12:55:13.516195939Z,2018-06-01T12:54:23Z,0,usage_guest_nice,cpu,cpu1,kenobi-3.local +,,21,2018-06-01T12:54:13.516195939Z,2018-06-01T12:55:13.516195939Z,2018-06-01T12:54:33Z,0,usage_guest_nice,cpu,cpu1,kenobi-3.local +,,21,2018-06-01T12:54:13.516195939Z,2018-06-01T12:55:13.516195939Z,2018-06-01T12:54:43Z,0,usage_guest_nice,cpu,cpu1,kenobi-3.local +,,21,2018-06-01T12:54:13.516195939Z,2018-06-01T12:55:13.516195939Z,2018-06-01T12:54:53Z,0,usage_guest_nice,cpu,cpu1,kenobi-3.local +,,21,2018-06-01T12:54:13.516195939Z,2018-06-01T12:55:13.516195939Z,2018-06-01T12:55:03Z,0,usage_guest_nice,cpu,cpu1,kenobi-3.local +,,21,2018-06-01T12:54:13.516195939Z,2018-06-01T12:55:13.516195939Z,2018-06-01T12:55:13Z,0,usage_guest_nice,cpu,cpu1,kenobi-3.local +,,22,2018-06-01T12:54:13.516195939Z,2018-06-01T12:55:13.516195939Z,2018-06-01T12:54:23Z,89.8,usage_idle,cpu,cpu1,kenobi-3.local +,,22,2018-06-01T12:54:13.516195939Z,2018-06-01T12:55:13.516195939Z,2018-06-01T12:54:33Z,91.8,usage_idle,cpu,cpu1,kenobi-3.local +,,22,2018-06-01T12:54:13.516195939Z,2018-06-01T12:55:13.516195939Z,2018-06-01T12:54:43Z,87.11288711288711,usage_idle,cpu,cpu1,kenobi-3.local +,,22,2018-06-01T12:54:13.516195939Z,2018-06-01T12:55:13.516195939Z,2018-06-01T12:54:53Z,89.48948948948949,usage_idle,cpu,cpu1,kenobi-3.local +,,22,2018-06-01T12:54:13.516195939Z,2018-06-01T12:55:13.516195939Z,2018-06-01T12:55:03Z,83,usage_idle,cpu,cpu1,kenobi-3.local +,,22,2018-06-01T12:54:13.516195939Z,2018-06-01T12:55:13.516195939Z,2018-06-01T12:55:13Z,80.1,usage_idle,cpu,cpu1,kenobi-3.local +,,23,2018-06-01T12:54:13.516195939Z,2018-06-01T12:55:13.516195939Z,2018-06-01T12:54:23Z,0,usage_iowait,cpu,cpu1,kenobi-3.local +,,23,2018-06-01T12:54:13.516195939Z,2018-06-01T12:55:13.516195939Z,2018-06-01T12:54:33Z,0,usage_iowait,cpu,cpu1,kenobi-3.local +,,23,2018-06-01T12:54:13.516195939Z,2018-06-01T12:55:13.516195939Z,2018-06-01T12:54:43Z,0,usage_iowait,cpu,cpu1,kenobi-3.local +,,23,2018-06-01T12:54:13.516195939Z,2018-06-01T12:55:13.516195939Z,2018-06-01T12:54:53Z,0,usage_iowait,cpu,cpu1,kenobi-3.local +,,23,2018-06-01T12:54:13.516195939Z,2018-06-01T12:55:13.516195939Z,2018-06-01T12:55:03Z,0,usage_iowait,cpu,cpu1,kenobi-3.local +,,23,2018-06-01T12:54:13.516195939Z,2018-06-01T12:55:13.516195939Z,2018-06-01T12:55:13Z,0,usage_iowait,cpu,cpu1,kenobi-3.local +,,24,2018-06-01T12:54:13.516195939Z,2018-06-01T12:55:13.516195939Z,2018-06-01T12:54:23Z,0,usage_irq,cpu,cpu1,kenobi-3.local +,,24,2018-06-01T12:54:13.516195939Z,2018-06-01T12:55:13.516195939Z,2018-06-01T12:54:33Z,0,usage_irq,cpu,cpu1,kenobi-3.local +,,24,2018-06-01T12:54:13.516195939Z,2018-06-01T12:55:13.516195939Z,2018-06-01T12:54:43Z,0,usage_irq,cpu,cpu1,kenobi-3.local +,,24,2018-06-01T12:54:13.516195939Z,2018-06-01T12:55:13.516195939Z,2018-06-01T12:54:53Z,0,usage_irq,cpu,cpu1,kenobi-3.local +,,24,2018-06-01T12:54:13.516195939Z,2018-06-01T12:55:13.516195939Z,2018-06-01T12:55:03Z,0,usage_irq,cpu,cpu1,kenobi-3.local +,,24,2018-06-01T12:54:13.516195939Z,2018-06-01T12:55:13.516195939Z,2018-06-01T12:55:13Z,0,usage_irq,cpu,cpu1,kenobi-3.local +,,25,2018-06-01T12:54:13.516195939Z,2018-06-01T12:55:13.516195939Z,2018-06-01T12:54:23Z,0,usage_nice,cpu,cpu1,kenobi-3.local +,,25,2018-06-01T12:54:13.516195939Z,2018-06-01T12:55:13.516195939Z,2018-06-01T12:54:33Z,0,usage_nice,cpu,cpu1,kenobi-3.local +,,25,2018-06-01T12:54:13.516195939Z,2018-06-01T12:55:13.516195939Z,2018-06-01T12:54:43Z,0,usage_nice,cpu,cpu1,kenobi-3.local +,,25,2018-06-01T12:54:13.516195939Z,2018-06-01T12:55:13.516195939Z,2018-06-01T12:54:53Z,0,usage_nice,cpu,cpu1,kenobi-3.local +,,25,2018-06-01T12:54:13.516195939Z,2018-06-01T12:55:13.516195939Z,2018-06-01T12:55:03Z,0,usage_nice,cpu,cpu1,kenobi-3.local +,,25,2018-06-01T12:54:13.516195939Z,2018-06-01T12:55:13.516195939Z,2018-06-01T12:55:13Z,0,usage_nice,cpu,cpu1,kenobi-3.local +,,26,2018-06-01T12:54:13.516195939Z,2018-06-01T12:55:13.516195939Z,2018-06-01T12:54:23Z,0,usage_softirq,cpu,cpu1,kenobi-3.local +,,26,2018-06-01T12:54:13.516195939Z,2018-06-01T12:55:13.516195939Z,2018-06-01T12:54:33Z,0,usage_softirq,cpu,cpu1,kenobi-3.local +,,26,2018-06-01T12:54:13.516195939Z,2018-06-01T12:55:13.516195939Z,2018-06-01T12:54:43Z,0,usage_softirq,cpu,cpu1,kenobi-3.local +,,26,2018-06-01T12:54:13.516195939Z,2018-06-01T12:55:13.516195939Z,2018-06-01T12:54:53Z,0,usage_softirq,cpu,cpu1,kenobi-3.local +,,26,2018-06-01T12:54:13.516195939Z,2018-06-01T12:55:13.516195939Z,2018-06-01T12:55:03Z,0,usage_softirq,cpu,cpu1,kenobi-3.local +,,26,2018-06-01T12:54:13.516195939Z,2018-06-01T12:55:13.516195939Z,2018-06-01T12:55:13Z,0,usage_softirq,cpu,cpu1,kenobi-3.local +,,27,2018-06-01T12:54:13.516195939Z,2018-06-01T12:55:13.516195939Z,2018-06-01T12:54:23Z,0,usage_steal,cpu,cpu1,kenobi-3.local +,,27,2018-06-01T12:54:13.516195939Z,2018-06-01T12:55:13.516195939Z,2018-06-01T12:54:33Z,0,usage_steal,cpu,cpu1,kenobi-3.local +,,27,2018-06-01T12:54:13.516195939Z,2018-06-01T12:55:13.516195939Z,2018-06-01T12:54:43Z,0,usage_steal,cpu,cpu1,kenobi-3.local +,,27,2018-06-01T12:54:13.516195939Z,2018-06-01T12:55:13.516195939Z,2018-06-01T12:54:53Z,0,usage_steal,cpu,cpu1,kenobi-3.local +,,27,2018-06-01T12:54:13.516195939Z,2018-06-01T12:55:13.516195939Z,2018-06-01T12:55:03Z,0,usage_steal,cpu,cpu1,kenobi-3.local +,,27,2018-06-01T12:54:13.516195939Z,2018-06-01T12:55:13.516195939Z,2018-06-01T12:55:13Z,0,usage_steal,cpu,cpu1,kenobi-3.local +,,28,2018-06-01T12:54:13.516195939Z,2018-06-01T12:55:13.516195939Z,2018-06-01T12:54:23Z,3.5,usage_system,cpu,cpu1,kenobi-3.local +,,28,2018-06-01T12:54:13.516195939Z,2018-06-01T12:55:13.516195939Z,2018-06-01T12:54:33Z,4,usage_system,cpu,cpu1,kenobi-3.local +,,28,2018-06-01T12:54:13.516195939Z,2018-06-01T12:55:13.516195939Z,2018-06-01T12:54:43Z,4.895104895104895,usage_system,cpu,cpu1,kenobi-3.local +,,28,2018-06-01T12:54:13.516195939Z,2018-06-01T12:55:13.516195939Z,2018-06-01T12:54:53Z,4.504504504504505,usage_system,cpu,cpu1,kenobi-3.local +,,28,2018-06-01T12:54:13.516195939Z,2018-06-01T12:55:13.516195939Z,2018-06-01T12:55:03Z,6.3,usage_system,cpu,cpu1,kenobi-3.local +,,28,2018-06-01T12:54:13.516195939Z,2018-06-01T12:55:13.516195939Z,2018-06-01T12:55:13Z,7.9,usage_system,cpu,cpu1,kenobi-3.local +,,29,2018-06-01T12:54:13.516195939Z,2018-06-01T12:55:13.516195939Z,2018-06-01T12:54:23Z,6.7,usage_user,cpu,cpu1,kenobi-3.local +,,29,2018-06-01T12:54:13.516195939Z,2018-06-01T12:55:13.516195939Z,2018-06-01T12:54:33Z,4.2,usage_user,cpu,cpu1,kenobi-3.local +,,29,2018-06-01T12:54:13.516195939Z,2018-06-01T12:55:13.516195939Z,2018-06-01T12:54:43Z,7.992007992007992,usage_user,cpu,cpu1,kenobi-3.local +,,29,2018-06-01T12:54:13.516195939Z,2018-06-01T12:55:13.516195939Z,2018-06-01T12:54:53Z,6.006006006006006,usage_user,cpu,cpu1,kenobi-3.local +,,29,2018-06-01T12:54:13.516195939Z,2018-06-01T12:55:13.516195939Z,2018-06-01T12:55:03Z,10.7,usage_user,cpu,cpu1,kenobi-3.local +,,29,2018-06-01T12:54:13.516195939Z,2018-06-01T12:55:13.516195939Z,2018-06-01T12:55:13Z,12,usage_user,cpu,cpu1,kenobi-3.local +,,30,2018-06-01T12:54:13.516195939Z,2018-06-01T12:55:13.516195939Z,2018-06-01T12:54:23Z,0,usage_guest,cpu,cpu2,kenobi-3.local +,,30,2018-06-01T12:54:13.516195939Z,2018-06-01T12:55:13.516195939Z,2018-06-01T12:54:33Z,0,usage_guest,cpu,cpu2,kenobi-3.local +,,30,2018-06-01T12:54:13.516195939Z,2018-06-01T12:55:13.516195939Z,2018-06-01T12:54:43Z,0,usage_guest,cpu,cpu2,kenobi-3.local +,,30,2018-06-01T12:54:13.516195939Z,2018-06-01T12:55:13.516195939Z,2018-06-01T12:54:53Z,0,usage_guest,cpu,cpu2,kenobi-3.local +,,30,2018-06-01T12:54:13.516195939Z,2018-06-01T12:55:13.516195939Z,2018-06-01T12:55:03Z,0,usage_guest,cpu,cpu2,kenobi-3.local +,,30,2018-06-01T12:54:13.516195939Z,2018-06-01T12:55:13.516195939Z,2018-06-01T12:55:13Z,0,usage_guest,cpu,cpu2,kenobi-3.local +,,31,2018-06-01T12:54:13.516195939Z,2018-06-01T12:55:13.516195939Z,2018-06-01T12:54:23Z,0,usage_guest_nice,cpu,cpu2,kenobi-3.local +,,31,2018-06-01T12:54:13.516195939Z,2018-06-01T12:55:13.516195939Z,2018-06-01T12:54:33Z,0,usage_guest_nice,cpu,cpu2,kenobi-3.local +,,31,2018-06-01T12:54:13.516195939Z,2018-06-01T12:55:13.516195939Z,2018-06-01T12:54:43Z,0,usage_guest_nice,cpu,cpu2,kenobi-3.local +,,31,2018-06-01T12:54:13.516195939Z,2018-06-01T12:55:13.516195939Z,2018-06-01T12:54:53Z,0,usage_guest_nice,cpu,cpu2,kenobi-3.local +,,31,2018-06-01T12:54:13.516195939Z,2018-06-01T12:55:13.516195939Z,2018-06-01T12:55:03Z,0,usage_guest_nice,cpu,cpu2,kenobi-3.local +,,31,2018-06-01T12:54:13.516195939Z,2018-06-01T12:55:13.516195939Z,2018-06-01T12:55:13Z,0,usage_guest_nice,cpu,cpu2,kenobi-3.local +,,32,2018-06-01T12:54:13.516195939Z,2018-06-01T12:55:13.516195939Z,2018-06-01T12:54:23Z,75.17517517517517,usage_idle,cpu,cpu2,kenobi-3.local +,,32,2018-06-01T12:54:13.516195939Z,2018-06-01T12:55:13.516195939Z,2018-06-01T12:54:33Z,74.82517482517483,usage_idle,cpu,cpu2,kenobi-3.local +,,32,2018-06-01T12:54:13.516195939Z,2018-06-01T12:55:13.516195939Z,2018-06-01T12:54:43Z,67.9,usage_idle,cpu,cpu2,kenobi-3.local +,,32,2018-06-01T12:54:13.516195939Z,2018-06-01T12:55:13.516195939Z,2018-06-01T12:54:53Z,72.47247247247248,usage_idle,cpu,cpu2,kenobi-3.local +,,32,2018-06-01T12:54:13.516195939Z,2018-06-01T12:55:13.516195939Z,2018-06-01T12:55:03Z,61.63836163836164,usage_idle,cpu,cpu2,kenobi-3.local +,,32,2018-06-01T12:54:13.516195939Z,2018-06-01T12:55:13.516195939Z,2018-06-01T12:55:13Z,62,usage_idle,cpu,cpu2,kenobi-3.local +,,33,2018-06-01T12:54:13.516195939Z,2018-06-01T12:55:13.516195939Z,2018-06-01T12:54:23Z,0,usage_iowait,cpu,cpu2,kenobi-3.local +,,33,2018-06-01T12:54:13.516195939Z,2018-06-01T12:55:13.516195939Z,2018-06-01T12:54:33Z,0,usage_iowait,cpu,cpu2,kenobi-3.local +,,33,2018-06-01T12:54:13.516195939Z,2018-06-01T12:55:13.516195939Z,2018-06-01T12:54:43Z,0,usage_iowait,cpu,cpu2,kenobi-3.local +,,33,2018-06-01T12:54:13.516195939Z,2018-06-01T12:55:13.516195939Z,2018-06-01T12:54:53Z,0,usage_iowait,cpu,cpu2,kenobi-3.local +,,33,2018-06-01T12:54:13.516195939Z,2018-06-01T12:55:13.516195939Z,2018-06-01T12:55:03Z,0,usage_iowait,cpu,cpu2,kenobi-3.local +,,33,2018-06-01T12:54:13.516195939Z,2018-06-01T12:55:13.516195939Z,2018-06-01T12:55:13Z,0,usage_iowait,cpu,cpu2,kenobi-3.local +,,34,2018-06-01T12:54:13.516195939Z,2018-06-01T12:55:13.516195939Z,2018-06-01T12:54:23Z,0,usage_irq,cpu,cpu2,kenobi-3.local +,,34,2018-06-01T12:54:13.516195939Z,2018-06-01T12:55:13.516195939Z,2018-06-01T12:54:33Z,0,usage_irq,cpu,cpu2,kenobi-3.local +,,34,2018-06-01T12:54:13.516195939Z,2018-06-01T12:55:13.516195939Z,2018-06-01T12:54:43Z,0,usage_irq,cpu,cpu2,kenobi-3.local +,,34,2018-06-01T12:54:13.516195939Z,2018-06-01T12:55:13.516195939Z,2018-06-01T12:54:53Z,0,usage_irq,cpu,cpu2,kenobi-3.local +,,34,2018-06-01T12:54:13.516195939Z,2018-06-01T12:55:13.516195939Z,2018-06-01T12:55:03Z,0,usage_irq,cpu,cpu2,kenobi-3.local +,,34,2018-06-01T12:54:13.516195939Z,2018-06-01T12:55:13.516195939Z,2018-06-01T12:55:13Z,0,usage_irq,cpu,cpu2,kenobi-3.local +,,35,2018-06-01T12:54:13.516195939Z,2018-06-01T12:55:13.516195939Z,2018-06-01T12:54:23Z,0,usage_nice,cpu,cpu2,kenobi-3.local +,,35,2018-06-01T12:54:13.516195939Z,2018-06-01T12:55:13.516195939Z,2018-06-01T12:54:33Z,0,usage_nice,cpu,cpu2,kenobi-3.local +,,35,2018-06-01T12:54:13.516195939Z,2018-06-01T12:55:13.516195939Z,2018-06-01T12:54:43Z,0,usage_nice,cpu,cpu2,kenobi-3.local +,,35,2018-06-01T12:54:13.516195939Z,2018-06-01T12:55:13.516195939Z,2018-06-01T12:54:53Z,0,usage_nice,cpu,cpu2,kenobi-3.local +,,35,2018-06-01T12:54:13.516195939Z,2018-06-01T12:55:13.516195939Z,2018-06-01T12:55:03Z,0,usage_nice,cpu,cpu2,kenobi-3.local +,,35,2018-06-01T12:54:13.516195939Z,2018-06-01T12:55:13.516195939Z,2018-06-01T12:55:13Z,0,usage_nice,cpu,cpu2,kenobi-3.local +,,36,2018-06-01T12:54:13.516195939Z,2018-06-01T12:55:13.516195939Z,2018-06-01T12:54:23Z,0,usage_softirq,cpu,cpu2,kenobi-3.local +,,36,2018-06-01T12:54:13.516195939Z,2018-06-01T12:55:13.516195939Z,2018-06-01T12:54:33Z,0,usage_softirq,cpu,cpu2,kenobi-3.local +,,36,2018-06-01T12:54:13.516195939Z,2018-06-01T12:55:13.516195939Z,2018-06-01T12:54:43Z,0,usage_softirq,cpu,cpu2,kenobi-3.local +,,36,2018-06-01T12:54:13.516195939Z,2018-06-01T12:55:13.516195939Z,2018-06-01T12:54:53Z,0,usage_softirq,cpu,cpu2,kenobi-3.local +,,36,2018-06-01T12:54:13.516195939Z,2018-06-01T12:55:13.516195939Z,2018-06-01T12:55:03Z,0,usage_softirq,cpu,cpu2,kenobi-3.local +,,36,2018-06-01T12:54:13.516195939Z,2018-06-01T12:55:13.516195939Z,2018-06-01T12:55:13Z,0,usage_softirq,cpu,cpu2,kenobi-3.local +,,37,2018-06-01T12:54:13.516195939Z,2018-06-01T12:55:13.516195939Z,2018-06-01T12:54:23Z,0,usage_steal,cpu,cpu2,kenobi-3.local +,,37,2018-06-01T12:54:13.516195939Z,2018-06-01T12:55:13.516195939Z,2018-06-01T12:54:33Z,0,usage_steal,cpu,cpu2,kenobi-3.local +,,37,2018-06-01T12:54:13.516195939Z,2018-06-01T12:55:13.516195939Z,2018-06-01T12:54:43Z,0,usage_steal,cpu,cpu2,kenobi-3.local +,,37,2018-06-01T12:54:13.516195939Z,2018-06-01T12:55:13.516195939Z,2018-06-01T12:54:53Z,0,usage_steal,cpu,cpu2,kenobi-3.local +,,37,2018-06-01T12:54:13.516195939Z,2018-06-01T12:55:13.516195939Z,2018-06-01T12:55:03Z,0,usage_steal,cpu,cpu2,kenobi-3.local +,,37,2018-06-01T12:54:13.516195939Z,2018-06-01T12:55:13.516195939Z,2018-06-01T12:55:13Z,0,usage_steal,cpu,cpu2,kenobi-3.local +,,38,2018-06-01T12:54:13.516195939Z,2018-06-01T12:55:13.516195939Z,2018-06-01T12:54:23Z,8.208208208208209,usage_system,cpu,cpu2,kenobi-3.local +,,38,2018-06-01T12:54:13.516195939Z,2018-06-01T12:55:13.516195939Z,2018-06-01T12:54:33Z,9.99000999000999,usage_system,cpu,cpu2,kenobi-3.local +,,38,2018-06-01T12:54:13.516195939Z,2018-06-01T12:55:13.516195939Z,2018-06-01T12:54:43Z,11.2,usage_system,cpu,cpu2,kenobi-3.local +,,38,2018-06-01T12:54:13.516195939Z,2018-06-01T12:55:13.516195939Z,2018-06-01T12:54:53Z,10.81081081081081,usage_system,cpu,cpu2,kenobi-3.local +,,38,2018-06-01T12:54:13.516195939Z,2018-06-01T12:55:13.516195939Z,2018-06-01T12:55:03Z,14.785214785214785,usage_system,cpu,cpu2,kenobi-3.local +,,38,2018-06-01T12:54:13.516195939Z,2018-06-01T12:55:13.516195939Z,2018-06-01T12:55:13Z,16.2,usage_system,cpu,cpu2,kenobi-3.local +,,39,2018-06-01T12:54:13.516195939Z,2018-06-01T12:55:13.516195939Z,2018-06-01T12:54:23Z,16.616616616616618,usage_user,cpu,cpu2,kenobi-3.local +,,39,2018-06-01T12:54:13.516195939Z,2018-06-01T12:55:13.516195939Z,2018-06-01T12:54:33Z,15.184815184815184,usage_user,cpu,cpu2,kenobi-3.local +,,39,2018-06-01T12:54:13.516195939Z,2018-06-01T12:55:13.516195939Z,2018-06-01T12:54:43Z,20.9,usage_user,cpu,cpu2,kenobi-3.local +,,39,2018-06-01T12:54:13.516195939Z,2018-06-01T12:55:13.516195939Z,2018-06-01T12:54:53Z,16.716716716716718,usage_user,cpu,cpu2,kenobi-3.local +,,39,2018-06-01T12:54:13.516195939Z,2018-06-01T12:55:13.516195939Z,2018-06-01T12:55:03Z,23.576423576423576,usage_user,cpu,cpu2,kenobi-3.local +,,39,2018-06-01T12:54:13.516195939Z,2018-06-01T12:55:13.516195939Z,2018-06-01T12:55:13Z,21.8,usage_user,cpu,cpu2,kenobi-3.local +,,40,2018-06-01T12:54:13.516195939Z,2018-06-01T12:55:13.516195939Z,2018-06-01T12:54:23Z,0,usage_guest,cpu,cpu3,kenobi-3.local +,,40,2018-06-01T12:54:13.516195939Z,2018-06-01T12:55:13.516195939Z,2018-06-01T12:54:33Z,0,usage_guest,cpu,cpu3,kenobi-3.local +,,40,2018-06-01T12:54:13.516195939Z,2018-06-01T12:55:13.516195939Z,2018-06-01T12:54:43Z,0,usage_guest,cpu,cpu3,kenobi-3.local +,,40,2018-06-01T12:54:13.516195939Z,2018-06-01T12:55:13.516195939Z,2018-06-01T12:54:53Z,0,usage_guest,cpu,cpu3,kenobi-3.local +,,40,2018-06-01T12:54:13.516195939Z,2018-06-01T12:55:13.516195939Z,2018-06-01T12:55:03Z,0,usage_guest,cpu,cpu3,kenobi-3.local +,,40,2018-06-01T12:54:13.516195939Z,2018-06-01T12:55:13.516195939Z,2018-06-01T12:55:13Z,0,usage_guest,cpu,cpu3,kenobi-3.local +,,41,2018-06-01T12:54:13.516195939Z,2018-06-01T12:55:13.516195939Z,2018-06-01T12:54:23Z,0,usage_guest_nice,cpu,cpu3,kenobi-3.local +,,41,2018-06-01T12:54:13.516195939Z,2018-06-01T12:55:13.516195939Z,2018-06-01T12:54:33Z,0,usage_guest_nice,cpu,cpu3,kenobi-3.local +,,41,2018-06-01T12:54:13.516195939Z,2018-06-01T12:55:13.516195939Z,2018-06-01T12:54:43Z,0,usage_guest_nice,cpu,cpu3,kenobi-3.local +,,41,2018-06-01T12:54:13.516195939Z,2018-06-01T12:55:13.516195939Z,2018-06-01T12:54:53Z,0,usage_guest_nice,cpu,cpu3,kenobi-3.local +,,41,2018-06-01T12:54:13.516195939Z,2018-06-01T12:55:13.516195939Z,2018-06-01T12:55:03Z,0,usage_guest_nice,cpu,cpu3,kenobi-3.local +,,41,2018-06-01T12:54:13.516195939Z,2018-06-01T12:55:13.516195939Z,2018-06-01T12:55:13Z,0,usage_guest_nice,cpu,cpu3,kenobi-3.local +,,42,2018-06-01T12:54:13.516195939Z,2018-06-01T12:55:13.516195939Z,2018-06-01T12:54:23Z,89.4,usage_idle,cpu,cpu3,kenobi-3.local +,,42,2018-06-01T12:54:13.516195939Z,2018-06-01T12:55:13.516195939Z,2018-06-01T12:54:33Z,92.5,usage_idle,cpu,cpu3,kenobi-3.local +,,42,2018-06-01T12:54:13.516195939Z,2018-06-01T12:55:13.516195939Z,2018-06-01T12:54:43Z,86.4,usage_idle,cpu,cpu3,kenobi-3.local +,,42,2018-06-01T12:54:13.516195939Z,2018-06-01T12:55:13.516195939Z,2018-06-01T12:54:53Z,88.78878878878879,usage_idle,cpu,cpu3,kenobi-3.local +,,42,2018-06-01T12:54:13.516195939Z,2018-06-01T12:55:13.516195939Z,2018-06-01T12:55:03Z,80.83832335329342,usage_idle,cpu,cpu3,kenobi-3.local +,,42,2018-06-01T12:54:13.516195939Z,2018-06-01T12:55:13.516195939Z,2018-06-01T12:55:13Z,79.57957957957957,usage_idle,cpu,cpu3,kenobi-3.local +,,43,2018-06-01T12:54:13.516195939Z,2018-06-01T12:55:13.516195939Z,2018-06-01T12:54:23Z,0,usage_iowait,cpu,cpu3,kenobi-3.local +,,43,2018-06-01T12:54:13.516195939Z,2018-06-01T12:55:13.516195939Z,2018-06-01T12:54:33Z,0,usage_iowait,cpu,cpu3,kenobi-3.local +,,43,2018-06-01T12:54:13.516195939Z,2018-06-01T12:55:13.516195939Z,2018-06-01T12:54:43Z,0,usage_iowait,cpu,cpu3,kenobi-3.local +,,43,2018-06-01T12:54:13.516195939Z,2018-06-01T12:55:13.516195939Z,2018-06-01T12:54:53Z,0,usage_iowait,cpu,cpu3,kenobi-3.local +,,43,2018-06-01T12:54:13.516195939Z,2018-06-01T12:55:13.516195939Z,2018-06-01T12:55:03Z,0,usage_iowait,cpu,cpu3,kenobi-3.local +,,43,2018-06-01T12:54:13.516195939Z,2018-06-01T12:55:13.516195939Z,2018-06-01T12:55:13Z,0,usage_iowait,cpu,cpu3,kenobi-3.local +,,44,2018-06-01T12:54:13.516195939Z,2018-06-01T12:55:13.516195939Z,2018-06-01T12:54:23Z,0,usage_irq,cpu,cpu3,kenobi-3.local +,,44,2018-06-01T12:54:13.516195939Z,2018-06-01T12:55:13.516195939Z,2018-06-01T12:54:33Z,0,usage_irq,cpu,cpu3,kenobi-3.local +,,44,2018-06-01T12:54:13.516195939Z,2018-06-01T12:55:13.516195939Z,2018-06-01T12:54:43Z,0,usage_irq,cpu,cpu3,kenobi-3.local +,,44,2018-06-01T12:54:13.516195939Z,2018-06-01T12:55:13.516195939Z,2018-06-01T12:54:53Z,0,usage_irq,cpu,cpu3,kenobi-3.local +,,44,2018-06-01T12:54:13.516195939Z,2018-06-01T12:55:13.516195939Z,2018-06-01T12:55:03Z,0,usage_irq,cpu,cpu3,kenobi-3.local +,,44,2018-06-01T12:54:13.516195939Z,2018-06-01T12:55:13.516195939Z,2018-06-01T12:55:13Z,0,usage_irq,cpu,cpu3,kenobi-3.local +,,45,2018-06-01T12:54:13.516195939Z,2018-06-01T12:55:13.516195939Z,2018-06-01T12:54:23Z,0,usage_nice,cpu,cpu3,kenobi-3.local +,,45,2018-06-01T12:54:13.516195939Z,2018-06-01T12:55:13.516195939Z,2018-06-01T12:54:33Z,0,usage_nice,cpu,cpu3,kenobi-3.local +,,45,2018-06-01T12:54:13.516195939Z,2018-06-01T12:55:13.516195939Z,2018-06-01T12:54:43Z,0,usage_nice,cpu,cpu3,kenobi-3.local +,,45,2018-06-01T12:54:13.516195939Z,2018-06-01T12:55:13.516195939Z,2018-06-01T12:54:53Z,0,usage_nice,cpu,cpu3,kenobi-3.local +,,45,2018-06-01T12:54:13.516195939Z,2018-06-01T12:55:13.516195939Z,2018-06-01T12:55:03Z,0,usage_nice,cpu,cpu3,kenobi-3.local +,,45,2018-06-01T12:54:13.516195939Z,2018-06-01T12:55:13.516195939Z,2018-06-01T12:55:13Z,0,usage_nice,cpu,cpu3,kenobi-3.local +,,46,2018-06-01T12:54:13.516195939Z,2018-06-01T12:55:13.516195939Z,2018-06-01T12:54:23Z,0,usage_softirq,cpu,cpu3,kenobi-3.local +,,46,2018-06-01T12:54:13.516195939Z,2018-06-01T12:55:13.516195939Z,2018-06-01T12:54:33Z,0,usage_softirq,cpu,cpu3,kenobi-3.local +,,46,2018-06-01T12:54:13.516195939Z,2018-06-01T12:55:13.516195939Z,2018-06-01T12:54:43Z,0,usage_softirq,cpu,cpu3,kenobi-3.local +,,46,2018-06-01T12:54:13.516195939Z,2018-06-01T12:55:13.516195939Z,2018-06-01T12:54:53Z,0,usage_softirq,cpu,cpu3,kenobi-3.local +,,46,2018-06-01T12:54:13.516195939Z,2018-06-01T12:55:13.516195939Z,2018-06-01T12:55:03Z,0,usage_softirq,cpu,cpu3,kenobi-3.local +,,46,2018-06-01T12:54:13.516195939Z,2018-06-01T12:55:13.516195939Z,2018-06-01T12:55:13Z,0,usage_softirq,cpu,cpu3,kenobi-3.local +,,47,2018-06-01T12:54:13.516195939Z,2018-06-01T12:55:13.516195939Z,2018-06-01T12:54:23Z,0,usage_steal,cpu,cpu3,kenobi-3.local +,,47,2018-06-01T12:54:13.516195939Z,2018-06-01T12:55:13.516195939Z,2018-06-01T12:54:33Z,0,usage_steal,cpu,cpu3,kenobi-3.local +,,47,2018-06-01T12:54:13.516195939Z,2018-06-01T12:55:13.516195939Z,2018-06-01T12:54:43Z,0,usage_steal,cpu,cpu3,kenobi-3.local +,,47,2018-06-01T12:54:13.516195939Z,2018-06-01T12:55:13.516195939Z,2018-06-01T12:54:53Z,0,usage_steal,cpu,cpu3,kenobi-3.local +,,47,2018-06-01T12:54:13.516195939Z,2018-06-01T12:55:13.516195939Z,2018-06-01T12:55:03Z,0,usage_steal,cpu,cpu3,kenobi-3.local +,,47,2018-06-01T12:54:13.516195939Z,2018-06-01T12:55:13.516195939Z,2018-06-01T12:55:13Z,0,usage_steal,cpu,cpu3,kenobi-3.local +,,48,2018-06-01T12:54:13.516195939Z,2018-06-01T12:55:13.516195939Z,2018-06-01T12:54:23Z,3.7,usage_system,cpu,cpu3,kenobi-3.local +,,48,2018-06-01T12:54:13.516195939Z,2018-06-01T12:55:13.516195939Z,2018-06-01T12:54:33Z,3.2,usage_system,cpu,cpu3,kenobi-3.local +,,48,2018-06-01T12:54:13.516195939Z,2018-06-01T12:55:13.516195939Z,2018-06-01T12:54:43Z,4.9,usage_system,cpu,cpu3,kenobi-3.local +,,48,2018-06-01T12:54:13.516195939Z,2018-06-01T12:55:13.516195939Z,2018-06-01T12:54:53Z,4.504504504504505,usage_system,cpu,cpu3,kenobi-3.local +,,48,2018-06-01T12:54:13.516195939Z,2018-06-01T12:55:13.516195939Z,2018-06-01T12:55:03Z,7.584830339321357,usage_system,cpu,cpu3,kenobi-3.local +,,48,2018-06-01T12:54:13.516195939Z,2018-06-01T12:55:13.516195939Z,2018-06-01T12:55:13Z,8.208208208208209,usage_system,cpu,cpu3,kenobi-3.local +,,49,2018-06-01T12:54:13.516195939Z,2018-06-01T12:55:13.516195939Z,2018-06-01T12:54:23Z,6.9,usage_user,cpu,cpu3,kenobi-3.local +,,49,2018-06-01T12:54:13.516195939Z,2018-06-01T12:55:13.516195939Z,2018-06-01T12:54:33Z,4.3,usage_user,cpu,cpu3,kenobi-3.local +,,49,2018-06-01T12:54:13.516195939Z,2018-06-01T12:55:13.516195939Z,2018-06-01T12:54:43Z,8.7,usage_user,cpu,cpu3,kenobi-3.local +,,49,2018-06-01T12:54:13.516195939Z,2018-06-01T12:55:13.516195939Z,2018-06-01T12:54:53Z,6.706706706706707,usage_user,cpu,cpu3,kenobi-3.local +,,49,2018-06-01T12:54:13.516195939Z,2018-06-01T12:55:13.516195939Z,2018-06-01T12:55:03Z,11.57684630738523,usage_user,cpu,cpu3,kenobi-3.local +,,49,2018-06-01T12:54:13.516195939Z,2018-06-01T12:55:13.516195939Z,2018-06-01T12:55:13Z,12.212212212212211,usage_user,cpu,cpu3,kenobi-3.local + +#datatype,string,long,dateTime:RFC3339,dateTime:RFC3339,dateTime:RFC3339,long,string,string,string,string,string,string,string +#partition,false,false,true,true,false,false,true,true,true,true,true,true,true +#default,_result,,,,,,,,,,,, +,result,table,_start,_stop,_time,_value,_field,_measurement,device,fstype,host,mode,path +,,50,2018-06-01T12:54:13.516195939Z,2018-06-01T12:55:13.516195939Z,2018-06-01T12:54:23Z,9024180224,free,disk,disk1,hfs,kenobi-3.local,rw,/ +,,50,2018-06-01T12:54:13.516195939Z,2018-06-01T12:55:13.516195939Z,2018-06-01T12:54:33Z,9025056768,free,disk,disk1,hfs,kenobi-3.local,rw,/ +,,50,2018-06-01T12:54:13.516195939Z,2018-06-01T12:55:13.516195939Z,2018-06-01T12:54:43Z,9024774144,free,disk,disk1,hfs,kenobi-3.local,rw,/ +,,50,2018-06-01T12:54:13.516195939Z,2018-06-01T12:55:13.516195939Z,2018-06-01T12:54:53Z,9024638976,free,disk,disk1,hfs,kenobi-3.local,rw,/ +,,50,2018-06-01T12:54:13.516195939Z,2018-06-01T12:55:13.516195939Z,2018-06-01T12:55:03Z,9024299008,free,disk,disk1,hfs,kenobi-3.local,rw,/ +,,50,2018-06-01T12:54:13.516195939Z,2018-06-01T12:55:13.516195939Z,2018-06-01T12:55:13Z,9024036864,free,disk,disk1,hfs,kenobi-3.local,rw,/ +,,51,2018-06-01T12:54:13.516195939Z,2018-06-01T12:55:13.516195939Z,2018-06-01T12:54:23Z,4290025660,inodes_free,disk,disk1,hfs,kenobi-3.local,rw,/ +,,51,2018-06-01T12:54:13.516195939Z,2018-06-01T12:55:13.516195939Z,2018-06-01T12:54:33Z,4290025659,inodes_free,disk,disk1,hfs,kenobi-3.local,rw,/ +,,51,2018-06-01T12:54:13.516195939Z,2018-06-01T12:55:13.516195939Z,2018-06-01T12:54:43Z,4290025659,inodes_free,disk,disk1,hfs,kenobi-3.local,rw,/ +,,51,2018-06-01T12:54:13.516195939Z,2018-06-01T12:55:13.516195939Z,2018-06-01T12:54:53Z,4290025660,inodes_free,disk,disk1,hfs,kenobi-3.local,rw,/ +,,51,2018-06-01T12:54:13.516195939Z,2018-06-01T12:55:13.516195939Z,2018-06-01T12:55:03Z,4290025660,inodes_free,disk,disk1,hfs,kenobi-3.local,rw,/ +,,51,2018-06-01T12:54:13.516195939Z,2018-06-01T12:55:13.516195939Z,2018-06-01T12:55:13Z,4290025657,inodes_free,disk,disk1,hfs,kenobi-3.local,rw,/ +,,52,2018-06-01T12:54:13.516195939Z,2018-06-01T12:55:13.516195939Z,2018-06-01T12:54:23Z,4294967279,inodes_total,disk,disk1,hfs,kenobi-3.local,rw,/ +,,52,2018-06-01T12:54:13.516195939Z,2018-06-01T12:55:13.516195939Z,2018-06-01T12:54:33Z,4294967279,inodes_total,disk,disk1,hfs,kenobi-3.local,rw,/ +,,52,2018-06-01T12:54:13.516195939Z,2018-06-01T12:55:13.516195939Z,2018-06-01T12:54:43Z,4294967279,inodes_total,disk,disk1,hfs,kenobi-3.local,rw,/ +,,52,2018-06-01T12:54:13.516195939Z,2018-06-01T12:55:13.516195939Z,2018-06-01T12:54:53Z,4294967279,inodes_total,disk,disk1,hfs,kenobi-3.local,rw,/ +,,52,2018-06-01T12:54:13.516195939Z,2018-06-01T12:55:13.516195939Z,2018-06-01T12:55:03Z,4294967279,inodes_total,disk,disk1,hfs,kenobi-3.local,rw,/ +,,52,2018-06-01T12:54:13.516195939Z,2018-06-01T12:55:13.516195939Z,2018-06-01T12:55:13Z,4294967279,inodes_total,disk,disk1,hfs,kenobi-3.local,rw,/ +,,53,2018-06-01T12:54:13.516195939Z,2018-06-01T12:55:13.516195939Z,2018-06-01T12:54:23Z,4941619,inodes_used,disk,disk1,hfs,kenobi-3.local,rw,/ +,,53,2018-06-01T12:54:13.516195939Z,2018-06-01T12:55:13.516195939Z,2018-06-01T12:54:33Z,4941620,inodes_used,disk,disk1,hfs,kenobi-3.local,rw,/ +,,53,2018-06-01T12:54:13.516195939Z,2018-06-01T12:55:13.516195939Z,2018-06-01T12:54:43Z,4941620,inodes_used,disk,disk1,hfs,kenobi-3.local,rw,/ +,,53,2018-06-01T12:54:13.516195939Z,2018-06-01T12:55:13.516195939Z,2018-06-01T12:54:53Z,4941619,inodes_used,disk,disk1,hfs,kenobi-3.local,rw,/ +,,53,2018-06-01T12:54:13.516195939Z,2018-06-01T12:55:13.516195939Z,2018-06-01T12:55:03Z,4941619,inodes_used,disk,disk1,hfs,kenobi-3.local,rw,/ +,,53,2018-06-01T12:54:13.516195939Z,2018-06-01T12:55:13.516195939Z,2018-06-01T12:55:13Z,4941622,inodes_used,disk,disk1,hfs,kenobi-3.local,rw,/ +,,54,2018-06-01T12:54:13.516195939Z,2018-06-01T12:55:13.516195939Z,2018-06-01T12:54:23Z,249804886016,total,disk,disk1,hfs,kenobi-3.local,rw,/ +,,54,2018-06-01T12:54:13.516195939Z,2018-06-01T12:55:13.516195939Z,2018-06-01T12:54:33Z,249804886016,total,disk,disk1,hfs,kenobi-3.local,rw,/ +,,54,2018-06-01T12:54:13.516195939Z,2018-06-01T12:55:13.516195939Z,2018-06-01T12:54:43Z,249804886016,total,disk,disk1,hfs,kenobi-3.local,rw,/ +,,54,2018-06-01T12:54:13.516195939Z,2018-06-01T12:55:13.516195939Z,2018-06-01T12:54:53Z,249804886016,total,disk,disk1,hfs,kenobi-3.local,rw,/ +,,54,2018-06-01T12:54:13.516195939Z,2018-06-01T12:55:13.516195939Z,2018-06-01T12:55:03Z,249804886016,total,disk,disk1,hfs,kenobi-3.local,rw,/ +,,54,2018-06-01T12:54:13.516195939Z,2018-06-01T12:55:13.516195939Z,2018-06-01T12:55:13Z,249804886016,total,disk,disk1,hfs,kenobi-3.local,rw,/ +,,55,2018-06-01T12:54:13.516195939Z,2018-06-01T12:55:13.516195939Z,2018-06-01T12:54:23Z,240518561792,used,disk,disk1,hfs,kenobi-3.local,rw,/ +,,55,2018-06-01T12:54:13.516195939Z,2018-06-01T12:55:13.516195939Z,2018-06-01T12:54:33Z,240517685248,used,disk,disk1,hfs,kenobi-3.local,rw,/ +,,55,2018-06-01T12:54:13.516195939Z,2018-06-01T12:55:13.516195939Z,2018-06-01T12:54:43Z,240517967872,used,disk,disk1,hfs,kenobi-3.local,rw,/ +,,55,2018-06-01T12:54:13.516195939Z,2018-06-01T12:55:13.516195939Z,2018-06-01T12:54:53Z,240518103040,used,disk,disk1,hfs,kenobi-3.local,rw,/ +,,55,2018-06-01T12:54:13.516195939Z,2018-06-01T12:55:13.516195939Z,2018-06-01T12:55:03Z,240518443008,used,disk,disk1,hfs,kenobi-3.local,rw,/ +,,55,2018-06-01T12:54:13.516195939Z,2018-06-01T12:55:13.516195939Z,2018-06-01T12:55:13Z,240518705152,used,disk,disk1,hfs,kenobi-3.local,rw,/ + +`; + +export default result; diff --git a/yarn.lock b/yarn.lock index f58731040c6..97435f665fb 100644 --- a/yarn.lock +++ b/yarn.lock @@ -7928,6 +7928,10 @@ pako@~1.0.5: version "1.0.6" resolved "https://registry.yarnpkg.com/pako/-/pako-1.0.6.tgz#0101211baa70c4bca4a0f63f2206e97b7dfaf258" +papaparse@^4.4.0: + version "4.4.0" + resolved "https://registry.yarnpkg.com/papaparse/-/papaparse-4.4.0.tgz#6bcdbda80873e00cfb0bdcd7a4571c72a9a40168" + parallel-transform@^1.1.0: version "1.1.0" resolved "https://registry.yarnpkg.com/parallel-transform/-/parallel-transform-1.1.0.tgz#d410f065b05da23081fcd10f28854c29bda33b06" From 0d5579b4c04fa7c04c3ae59950f962775a3f0777 Mon Sep 17 00:00:00 2001 From: Marcus Efraimsson Date: Mon, 4 Jun 2018 14:31:43 +0200 Subject: [PATCH 152/488] docs: what's new in v5.2 --- docs/sources/guides/whats-new-in-v5-2.md | 70 ++++++++++++++++++++++++ 1 file changed, 70 insertions(+) create mode 100644 docs/sources/guides/whats-new-in-v5-2.md diff --git a/docs/sources/guides/whats-new-in-v5-2.md b/docs/sources/guides/whats-new-in-v5-2.md new file mode 100644 index 00000000000..8cff353ff45 --- /dev/null +++ b/docs/sources/guides/whats-new-in-v5-2.md @@ -0,0 +1,70 @@ ++++ +title = "What's New in Grafana v5.2" +description = "Feature & improvement highlights for Grafana v5.2" +keywords = ["grafana", "new", "documentation", "5.2"] +type = "docs" +[menu.docs] +name = "Version 5.2" +identifier = "v5.2" +parent = "whatsnew" +weight = -8 ++++ + +# What's New in Grafana v5.2 + +Grafana v5.2 brings new features, many enhancements and bug fixes. This article will detail the major new features and enhancements. + +* [Elasticsearch alerting]({{< relref "#elasticsearch-alerting" >}}) it's finally here! +* [Cross platform build support]({{< relref "#cross-platform-build-support" >}}) enables native builds of Grafana for many more platforms! +* [Improved Docker image]({{< relref "#improved-docker-image" >}}) with support for docker secrets +* [Prometheus]({{< relref "#prometheus" >}}) with alignment enhancements +* [Alerting]({{< relref "#alerting" >}}) with alert notification channel type for Discord +* [Dashboards & Panels]({{< relref "#dashboards-panels" >}}) + +## Elasticsearch alerting + +{{< docs-imagebox img="/img/docs/v52/elasticsearch_alerting.png" max-width="800px" class="docs-image--right" >}} + +Grafana v5.2 ships with an updated Elasticsearch datasource with support for alerting. Alerting support for Elasticsearch has been one of +the most requested features by our community and now it's finally here. Please try it out and let us know what you think. + +
+ +## Cross platform build support + +Grafana v5.2 brings an improved build pipeline with cross platform support. This enables native builds of Grafana for ARMv7 (x32), ARM64 (x64), +MacOS/Darwin (x64) and Windows (x64) in both stable and nightly builds. + +We've been longing for native ARM build support for a long time. With the help from our amazing community this is now finally available. + +## Improved Docker image + +The Grafana docker image now includes support for Docker secrets which enables you to supply Grafana with configuration through files. More +information in the [Installing using Docker documentation](/installation/docker/#reading-secrets-from-files-support-for-docker-secrets). + +## Prometheus + +The Prometheus datasource now aligns the start/end of the query sent to Prometheus with the step, which ensures PromQL expressions with *rate* +functions get consistent results, and thus avoid graphs jumping around on reload. + +## Alerting + +By popular demand Grafana now includes support for an alert notification channel type for [Discord](https://discordapp.com/). + +## Dashboards & Panels + +### Modified time range and variables are no longer saved by default + +{{< docs-imagebox img="/img/docs/v52/dashboard_save_modal.png" max-width="800px" class="docs-image--right" >}} + +Starting from Grafana v5.2 a modified time range or variable are no longer saved by default. To save a modified +time range or variable you'll need to actively select that when saving a dashboard, see screenshot. +This should hopefully make it easier to have sane defaults of time and variables in dashboards and make it more explicit +when you actually want to overwrite those settings. + +
+ +## Changelog + +Checkout the [CHANGELOG.md](https://github.com/grafana/grafana/blob/master/CHANGELOG.md) file for a complete list +of new features, changes, and bug fixes. From 38906acda98a43302f3f688042dc40e757284495 Mon Sep 17 00:00:00 2001 From: Marcus Efraimsson Date: Mon, 4 Jun 2018 15:15:47 +0200 Subject: [PATCH 153/488] elasticsearch: sort bucket keys to fix issue wth response parser tests --- pkg/tsdb/elasticsearch/response_parser.go | 13 ++++++++++--- 1 file changed, 10 insertions(+), 3 deletions(-) diff --git a/pkg/tsdb/elasticsearch/response_parser.go b/pkg/tsdb/elasticsearch/response_parser.go index 4a45d6271b9..7bdab60389c 100644 --- a/pkg/tsdb/elasticsearch/response_parser.go +++ b/pkg/tsdb/elasticsearch/response_parser.go @@ -113,15 +113,22 @@ func (rp *responseParser) processBuckets(aggs map[string]interface{}, target *Qu } } - for k, v := range esAgg.Get("buckets").MustMap() { - bucket := simplejson.NewFromAny(v) + buckets := esAgg.Get("buckets").MustMap() + bucketKeys := make([]string, 0) + for k := range buckets { + bucketKeys = append(bucketKeys, k) + } + sort.Strings(bucketKeys) + + for _, bucketKey := range bucketKeys { + bucket := simplejson.NewFromAny(buckets[bucketKey]) newProps := make(map[string]string, 0) for k, v := range props { newProps[k] = v } - newProps["filter"] = k + newProps["filter"] = bucketKey err = rp.processBuckets(bucket.MustMap(), target, series, table, newProps, depth+1) if err != nil { From c138ff2c903c4cb7b5844529dae70037e651a15e Mon Sep 17 00:00:00 2001 From: bergquist Date: Mon, 4 Jun 2018 15:16:05 +0200 Subject: [PATCH 154/488] changelog: adds note about closing #11670 --- CHANGELOG.md | 1 + 1 file changed, 1 insertion(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 9eda912e86a..f11d06e990a 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -32,6 +32,7 @@ * **Dashboard**: Validate uid when importing dashboards [#11515](https://github.com/grafana/grafana/issues/11515) * **Docker**: Support for env variables ending with _FILE [grafana-docker #166](https://github.com/grafana/grafana-docker/pull/166), thx [@efrecon](https://github.com/efrecon) * **Alert list panel**: Show alerts for user with viewer role [#11167](https://github.com/grafana/grafana/issues/11167) +* **Provisioning**: Verify checksum of dashboards before updating to reduce load on database [#11670](https://github.com/grafana/grafana/issues/11670) # 5.1.3 (2018-05-16) From d089b5e05dccfd60d49b802be3a28ec3530fb0e8 Mon Sep 17 00:00:00 2001 From: bergquist Date: Mon, 4 Jun 2018 15:20:26 +0200 Subject: [PATCH 155/488] provisioning: turn relative symlinked path into absolut paths --- pkg/services/provisioning/dashboards/file_reader.go | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/pkg/services/provisioning/dashboards/file_reader.go b/pkg/services/provisioning/dashboards/file_reader.go index 8af23980531..3196c3a35af 100644 --- a/pkg/services/provisioning/dashboards/file_reader.go +++ b/pkg/services/provisioning/dashboards/file_reader.go @@ -48,16 +48,25 @@ func NewDashboardFileReader(cfg *DashboardsAsConfig, log log.Logger) (*fileReade } copy := path + + // get absolut path of config file path, err := filepath.Abs(path) if err != nil { log.Error("Could not create absolute path ", "path", path) } + // follow the symlink to get the real path path, err = filepath.EvalSymlinks(path) if err != nil { log.Error("Failed to read content of symlinked path: %s", path) } + // get the absolut path in case the symlink is relative + path, err = filepath.Abs(path) + if err != nil { + log.Error("Could not create absolute path ", "path", path) + } + if path == "" { path = copy log.Info("falling back to original path due to EvalSymlink/Abs failure") From cd4026da6b60967dee2c51d626715913d1fa9914 Mon Sep 17 00:00:00 2001 From: bergquist Date: Mon, 4 Jun 2018 15:38:37 +0200 Subject: [PATCH 156/488] Revert "provisioning: turn relative symlinked path into absolut paths" This reverts commit d089b5e05dccfd60d49b802be3a28ec3530fb0e8. --- pkg/services/provisioning/dashboards/file_reader.go | 9 --------- 1 file changed, 9 deletions(-) diff --git a/pkg/services/provisioning/dashboards/file_reader.go b/pkg/services/provisioning/dashboards/file_reader.go index 3196c3a35af..8af23980531 100644 --- a/pkg/services/provisioning/dashboards/file_reader.go +++ b/pkg/services/provisioning/dashboards/file_reader.go @@ -48,25 +48,16 @@ func NewDashboardFileReader(cfg *DashboardsAsConfig, log log.Logger) (*fileReade } copy := path - - // get absolut path of config file path, err := filepath.Abs(path) if err != nil { log.Error("Could not create absolute path ", "path", path) } - // follow the symlink to get the real path path, err = filepath.EvalSymlinks(path) if err != nil { log.Error("Failed to read content of symlinked path: %s", path) } - // get the absolut path in case the symlink is relative - path, err = filepath.Abs(path) - if err != nil { - log.Error("Could not create absolute path ", "path", path) - } - if path == "" { path = copy log.Info("falling back to original path due to EvalSymlink/Abs failure") From 829af9425f4e1f6d0d3cea9f8d5fa78e46bc4a2c Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Torkel=20=C3=96degaard?= Date: Mon, 4 Jun 2018 15:45:29 +0200 Subject: [PATCH 157/488] revert: reverted singlestat panel position change PR #12004 --- public/sass/components/_panel_singlestat.scss | 11 +++++------ 1 file changed, 5 insertions(+), 6 deletions(-) diff --git a/public/sass/components/_panel_singlestat.scss b/public/sass/components/_panel_singlestat.scss index af11de3b835..d680941bfb1 100644 --- a/public/sass/components/_panel_singlestat.scss +++ b/public/sass/components/_panel_singlestat.scss @@ -7,14 +7,13 @@ .singlestat-panel-value-container { line-height: 1; - position: absolute; + display: table-cell; + vertical-align: middle; + text-align: center; + position: relative; z-index: 1; font-size: 3em; - font-weight: bold; - margin: 0; - top: 50%; - left: 50%; - transform: translate(-50%, -50%); + font-weight: $font-weight-semi-bold; } .singlestat-panel-prefix { From 574e92e1d8497f2be17d781b2eeb3e98867d2b39 Mon Sep 17 00:00:00 2001 From: bergquist Date: Mon, 4 Jun 2018 15:23:17 +0200 Subject: [PATCH 158/488] changelog: adds note about closing #11958 [skip ci] --- CHANGELOG.md | 1 + 1 file changed, 1 insertion(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index f11d06e990a..22e2c29c91b 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -33,6 +33,7 @@ * **Docker**: Support for env variables ending with _FILE [grafana-docker #166](https://github.com/grafana/grafana-docker/pull/166), thx [@efrecon](https://github.com/efrecon) * **Alert list panel**: Show alerts for user with viewer role [#11167](https://github.com/grafana/grafana/issues/11167) * **Provisioning**: Verify checksum of dashboards before updating to reduce load on database [#11670](https://github.com/grafana/grafana/issues/11670) +* **Provisioning**: Support symlinked files in dashboard provisioning config files [#11958](https://github.com/grafana/grafana/issues/11958) # 5.1.3 (2018-05-16) From cb6c6c817234b59cce137f071ea31ccc58f1896d Mon Sep 17 00:00:00 2001 From: Patrick O'Carroll Date: Wed, 23 May 2018 11:34:22 +0200 Subject: [PATCH 159/488] change admin password after first login --- public/app/core/controllers/login_ctrl.ts | 66 +++++++++-- public/app/partials/login.html | 135 +++++++++++++--------- public/sass/components/_gf-form.scss | 4 + public/sass/pages/_login.scss | 38 ++++++ 4 files changed, 184 insertions(+), 59 deletions(-) diff --git a/public/app/core/controllers/login_ctrl.ts b/public/app/core/controllers/login_ctrl.ts index 313fc2efa1a..0a66f83d08a 100644 --- a/public/app/core/controllers/login_ctrl.ts +++ b/public/app/core/controllers/login_ctrl.ts @@ -11,10 +11,15 @@ export class LoginCtrl { password: '', }; + $scope.command = {}; + $scope.result = ''; + contextSrv.sidemenu = false; $scope.oauth = config.oauth; $scope.oauthEnabled = _.keys(config.oauth).length > 0; + $scope.ldapEnabled = config.ldapEnabled; + $scope.authProxyEnabled = config.authProxyEnabled; $scope.disableLoginForm = config.disableLoginForm; $scope.disableUserSignUp = config.disableUserSignUp; @@ -39,6 +44,43 @@ export class LoginCtrl { } }; + $scope.changeView = function() { + let loginView = document.querySelector('#login-view'); + let changePasswordView = document.querySelector('#change-password-view'); + + loginView.className += ' add'; + setTimeout(() => { + loginView.className += ' hidden'; + }, 250); + setTimeout(() => { + changePasswordView.classList.remove('hidden'); + }, 251); + setTimeout(() => { + changePasswordView.classList.remove('remove'); + }, 301); + + setTimeout(() => { + document.getElementById('newPassword').focus(); + }, 400); + }; + + $scope.changePassword = function() { + $scope.command.oldPassword = 'admin'; + + if ($scope.command.newPassword !== $scope.command.confirmNew) { + $scope.appEvent('alert-warning', ['New passwords do not match', '']); + return; + } + + backendSrv.put('/api/user/password', $scope.command).then(function() { + $scope.toGrafana(); + }); + }; + + $scope.skip = function() { + $scope.toGrafana(); + }; + $scope.loginModeChanged = function(newValue) { $scope.submitBtnText = newValue ? 'Log in' : 'Sign up'; }; @@ -65,18 +107,28 @@ export class LoginCtrl { } backendSrv.post('/login', $scope.formModel).then(function(result) { - var params = $location.search(); + $scope.result = result; - if (params.redirect && params.redirect[0] === '/') { - window.location.href = config.appSubUrl + params.redirect; - } else if (result.redirectUrl) { - window.location.href = result.redirectUrl; - } else { - window.location.href = config.appSubUrl + '/'; + if ($scope.formModel.password !== 'admin' || $scope.ldapEnabled || $scope.authProxyEnabled) { + $scope.toGrafana(); + return; } + $scope.changeView(); }); }; + $scope.toGrafana = function() { + var params = $location.search(); + + if (params.redirect && params.redirect[0] === '/') { + window.location.href = config.appSubUrl + params.redirect; + } else if ($scope.result.redirectUrl) { + window.location.href = $scope.result.redirectUrl; + } else { + window.location.href = config.appSubUrl + '/'; + } + }; + $scope.init(); } } diff --git a/public/app/partials/login.html b/public/app/partials/login.html index 8680924977f..8be9e777b9f 100644 --- a/public/app/partials/login.html +++ b/public/app/partials/login.html @@ -4,70 +4,101 @@ Grafana
-