mirror of
https://github.com/grafana/grafana.git
synced 2025-02-25 18:55:37 -06:00
Alerting: Improve alert rule testing (#16286)
* tsdb: add support for setting debug flag of tsdb query * alerting: adds debug flag in eval context Debug flag is set when testing an alert rule and this debug flag is used to return more debug information in test aler rule response. This debug flag is also provided to tsdb queries so datasources can optionally add support for returning additional debug data * alerting: improve test alert rule ui Adds buttons for expand/collapse json and copy json to clipboard, very similar to how the query inspector works. * elasticsearch: implement support for tsdb query debug flag * elasticsearch: embedding client response in struct * alerting: return proper query model when testing rule
This commit is contained in:
parent
eecd8d1064
commit
5713048f48
@ -44,6 +44,7 @@ type MetricRequest struct {
|
|||||||
From string `json:"from"`
|
From string `json:"from"`
|
||||||
To string `json:"to"`
|
To string `json:"to"`
|
||||||
Queries []*simplejson.Json `json:"queries"`
|
Queries []*simplejson.Json `json:"queries"`
|
||||||
|
Debug bool `json:"debug"`
|
||||||
}
|
}
|
||||||
|
|
||||||
type UserStars struct {
|
type UserStars struct {
|
||||||
|
@ -34,7 +34,7 @@ func (hs *HTTPServer) QueryMetrics(c *m.ReqContext, reqDto dtos.MetricRequest) R
|
|||||||
return Error(500, "Unable to load datasource meta data", err)
|
return Error(500, "Unable to load datasource meta data", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
request := &tsdb.TsdbQuery{TimeRange: timeRange}
|
request := &tsdb.TsdbQuery{TimeRange: timeRange, Debug: reqDto.Debug}
|
||||||
|
|
||||||
for _, query := range reqDto.Queries {
|
for _, query := range reqDto.Queries {
|
||||||
request.Queries = append(request.Queries, &tsdb.Query{
|
request.Queries = append(request.Queries, &tsdb.Query{
|
||||||
|
@ -114,9 +114,46 @@ func (c *QueryCondition) executeQuery(context *alerting.EvalContext, timeRange *
|
|||||||
return nil, fmt.Errorf("Could not find datasource %v", err)
|
return nil, fmt.Errorf("Could not find datasource %v", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
req := c.getRequestForAlertRule(getDsInfo.Result, timeRange)
|
req := c.getRequestForAlertRule(getDsInfo.Result, timeRange, context.IsDebug)
|
||||||
result := make(tsdb.TimeSeriesSlice, 0)
|
result := make(tsdb.TimeSeriesSlice, 0)
|
||||||
|
|
||||||
|
if context.IsDebug {
|
||||||
|
data := simplejson.New()
|
||||||
|
if req.TimeRange != nil {
|
||||||
|
data.Set("from", req.TimeRange.GetFromAsMsEpoch())
|
||||||
|
data.Set("to", req.TimeRange.GetToAsMsEpoch())
|
||||||
|
}
|
||||||
|
|
||||||
|
type queryDto struct {
|
||||||
|
RefId string `json:"refId"`
|
||||||
|
Model *simplejson.Json `json:"model"`
|
||||||
|
Datasource *simplejson.Json `json:"datasource"`
|
||||||
|
MaxDataPoints int64 `json:"maxDataPoints"`
|
||||||
|
IntervalMs int64 `json:"intervalMs"`
|
||||||
|
}
|
||||||
|
|
||||||
|
queries := []*queryDto{}
|
||||||
|
for _, q := range req.Queries {
|
||||||
|
queries = append(queries, &queryDto{
|
||||||
|
RefId: q.RefId,
|
||||||
|
Model: q.Model,
|
||||||
|
Datasource: simplejson.NewFromAny(map[string]interface{}{
|
||||||
|
"id": q.DataSource.Id,
|
||||||
|
"name": q.DataSource.Name,
|
||||||
|
}),
|
||||||
|
MaxDataPoints: q.MaxDataPoints,
|
||||||
|
IntervalMs: q.IntervalMs,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
data.Set("queries", queries)
|
||||||
|
|
||||||
|
context.Logs = append(context.Logs, &alerting.ResultLogEntry{
|
||||||
|
Message: fmt.Sprintf("Condition[%d]: Query", c.Index),
|
||||||
|
Data: data,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
resp, err := c.HandleRequest(context.Ctx, getDsInfo.Result, req)
|
resp, err := c.HandleRequest(context.Ctx, getDsInfo.Result, req)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
if err == gocontext.DeadlineExceeded {
|
if err == gocontext.DeadlineExceeded {
|
||||||
@ -133,10 +170,20 @@ func (c *QueryCondition) executeQuery(context *alerting.EvalContext, timeRange *
|
|||||||
|
|
||||||
result = append(result, v.Series...)
|
result = append(result, v.Series...)
|
||||||
|
|
||||||
|
queryResultData := map[string]interface{}{}
|
||||||
|
|
||||||
if context.IsTestRun {
|
if context.IsTestRun {
|
||||||
|
queryResultData["series"] = v.Series
|
||||||
|
}
|
||||||
|
|
||||||
|
if context.IsDebug && v.Meta != nil {
|
||||||
|
queryResultData["meta"] = v.Meta
|
||||||
|
}
|
||||||
|
|
||||||
|
if context.IsTestRun || context.IsDebug {
|
||||||
context.Logs = append(context.Logs, &alerting.ResultLogEntry{
|
context.Logs = append(context.Logs, &alerting.ResultLogEntry{
|
||||||
Message: fmt.Sprintf("Condition[%d]: Query Result", c.Index),
|
Message: fmt.Sprintf("Condition[%d]: Query Result", c.Index),
|
||||||
Data: v.Series,
|
Data: simplejson.NewFromAny(queryResultData),
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -144,7 +191,7 @@ func (c *QueryCondition) executeQuery(context *alerting.EvalContext, timeRange *
|
|||||||
return result, nil
|
return result, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (c *QueryCondition) getRequestForAlertRule(datasource *models.DataSource, timeRange *tsdb.TimeRange) *tsdb.TsdbQuery {
|
func (c *QueryCondition) getRequestForAlertRule(datasource *models.DataSource, timeRange *tsdb.TimeRange, debug bool) *tsdb.TsdbQuery {
|
||||||
req := &tsdb.TsdbQuery{
|
req := &tsdb.TsdbQuery{
|
||||||
TimeRange: timeRange,
|
TimeRange: timeRange,
|
||||||
Queries: []*tsdb.Query{
|
Queries: []*tsdb.Query{
|
||||||
@ -154,6 +201,7 @@ func (c *QueryCondition) getRequestForAlertRule(datasource *models.DataSource, t
|
|||||||
DataSource: datasource,
|
DataSource: datasource,
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
|
Debug: debug,
|
||||||
}
|
}
|
||||||
|
|
||||||
return req
|
return req
|
||||||
|
@ -15,6 +15,7 @@ import (
|
|||||||
type EvalContext struct {
|
type EvalContext struct {
|
||||||
Firing bool
|
Firing bool
|
||||||
IsTestRun bool
|
IsTestRun bool
|
||||||
|
IsDebug bool
|
||||||
EvalMatches []*EvalMatch
|
EvalMatches []*EvalMatch
|
||||||
Logs []*ResultLogEntry
|
Logs []*ResultLogEntry
|
||||||
Error error
|
Error error
|
||||||
|
@ -54,6 +54,7 @@ func testAlertRule(rule *Rule) *EvalContext {
|
|||||||
|
|
||||||
context := NewEvalContext(context.Background(), rule)
|
context := NewEvalContext(context.Background(), rule)
|
||||||
context.IsTestRun = true
|
context.IsTestRun = true
|
||||||
|
context.IsDebug = true
|
||||||
|
|
||||||
handler.Eval(context)
|
handler.Eval(context)
|
||||||
context.Rule.State = context.GetNewState()
|
context.Rule.State = context.GetNewState()
|
||||||
|
@ -5,6 +5,7 @@ import (
|
|||||||
"context"
|
"context"
|
||||||
"encoding/json"
|
"encoding/json"
|
||||||
"fmt"
|
"fmt"
|
||||||
|
"io/ioutil"
|
||||||
"net/http"
|
"net/http"
|
||||||
"net/url"
|
"net/url"
|
||||||
"path"
|
"path"
|
||||||
@ -37,6 +38,7 @@ type Client interface {
|
|||||||
GetMinInterval(queryInterval string) (time.Duration, error)
|
GetMinInterval(queryInterval string) (time.Duration, error)
|
||||||
ExecuteMultisearch(r *MultiSearchRequest) (*MultiSearchResponse, error)
|
ExecuteMultisearch(r *MultiSearchRequest) (*MultiSearchResponse, error)
|
||||||
MultiSearch() *MultiSearchRequestBuilder
|
MultiSearch() *MultiSearchRequestBuilder
|
||||||
|
EnableDebug()
|
||||||
}
|
}
|
||||||
|
|
||||||
// NewClient creates a new elasticsearch client
|
// NewClient creates a new elasticsearch client
|
||||||
@ -80,12 +82,13 @@ var NewClient = func(ctx context.Context, ds *models.DataSource, timeRange *tsdb
|
|||||||
}
|
}
|
||||||
|
|
||||||
type baseClientImpl struct {
|
type baseClientImpl struct {
|
||||||
ctx context.Context
|
ctx context.Context
|
||||||
ds *models.DataSource
|
ds *models.DataSource
|
||||||
version int
|
version int
|
||||||
timeField string
|
timeField string
|
||||||
indices []string
|
indices []string
|
||||||
timeRange *tsdb.TimeRange
|
timeRange *tsdb.TimeRange
|
||||||
|
debugEnabled bool
|
||||||
}
|
}
|
||||||
|
|
||||||
func (c *baseClientImpl) GetVersion() int {
|
func (c *baseClientImpl) GetVersion() int {
|
||||||
@ -112,7 +115,7 @@ type multiRequest struct {
|
|||||||
interval tsdb.Interval
|
interval tsdb.Interval
|
||||||
}
|
}
|
||||||
|
|
||||||
func (c *baseClientImpl) executeBatchRequest(uriPath, uriQuery string, requests []*multiRequest) (*http.Response, error) {
|
func (c *baseClientImpl) executeBatchRequest(uriPath, uriQuery string, requests []*multiRequest) (*response, error) {
|
||||||
bytes, err := c.encodeBatchRequests(requests)
|
bytes, err := c.encodeBatchRequests(requests)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
@ -150,7 +153,7 @@ func (c *baseClientImpl) encodeBatchRequests(requests []*multiRequest) ([]byte,
|
|||||||
return payload.Bytes(), nil
|
return payload.Bytes(), nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (c *baseClientImpl) executeRequest(method, uriPath, uriQuery string, body []byte) (*http.Response, error) {
|
func (c *baseClientImpl) executeRequest(method, uriPath, uriQuery string, body []byte) (*response, error) {
|
||||||
u, _ := url.Parse(c.ds.Url)
|
u, _ := url.Parse(c.ds.Url)
|
||||||
u.Path = path.Join(u.Path, uriPath)
|
u.Path = path.Join(u.Path, uriPath)
|
||||||
u.RawQuery = uriQuery
|
u.RawQuery = uriQuery
|
||||||
@ -168,6 +171,15 @@ func (c *baseClientImpl) executeRequest(method, uriPath, uriQuery string, body [
|
|||||||
|
|
||||||
clientLog.Debug("Executing request", "url", req.URL.String(), "method", method)
|
clientLog.Debug("Executing request", "url", req.URL.String(), "method", method)
|
||||||
|
|
||||||
|
var reqInfo *SearchRequestInfo
|
||||||
|
if c.debugEnabled {
|
||||||
|
reqInfo = &SearchRequestInfo{
|
||||||
|
Method: req.Method,
|
||||||
|
Url: req.URL.String(),
|
||||||
|
Data: string(body),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
req.Header.Set("User-Agent", "Grafana")
|
req.Header.Set("User-Agent", "Grafana")
|
||||||
req.Header.Set("Content-Type", "application/json")
|
req.Header.Set("Content-Type", "application/json")
|
||||||
|
|
||||||
@ -191,7 +203,11 @@ func (c *baseClientImpl) executeRequest(method, uriPath, uriQuery string, body [
|
|||||||
elapsed := time.Since(start)
|
elapsed := time.Since(start)
|
||||||
clientLog.Debug("Executed request", "took", elapsed)
|
clientLog.Debug("Executed request", "took", elapsed)
|
||||||
}()
|
}()
|
||||||
return ctxhttp.Do(c.ctx, httpClient, req)
|
res, err := ctxhttp.Do(c.ctx, httpClient, req)
|
||||||
|
return &response{
|
||||||
|
httpResponse: res,
|
||||||
|
reqInfo: reqInfo,
|
||||||
|
}, err
|
||||||
}
|
}
|
||||||
|
|
||||||
func (c *baseClientImpl) ExecuteMultisearch(r *MultiSearchRequest) (*MultiSearchResponse, error) {
|
func (c *baseClientImpl) ExecuteMultisearch(r *MultiSearchRequest) (*MultiSearchResponse, error) {
|
||||||
@ -199,18 +215,31 @@ func (c *baseClientImpl) ExecuteMultisearch(r *MultiSearchRequest) (*MultiSearch
|
|||||||
|
|
||||||
multiRequests := c.createMultiSearchRequests(r.Requests)
|
multiRequests := c.createMultiSearchRequests(r.Requests)
|
||||||
queryParams := c.getMultiSearchQueryParameters()
|
queryParams := c.getMultiSearchQueryParameters()
|
||||||
res, err := c.executeBatchRequest("_msearch", queryParams, multiRequests)
|
clientRes, err := c.executeBatchRequest("_msearch", queryParams, multiRequests)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
res := clientRes.httpResponse
|
||||||
|
defer res.Body.Close()
|
||||||
|
|
||||||
clientLog.Debug("Received multisearch response", "code", res.StatusCode, "status", res.Status, "content-length", res.ContentLength)
|
clientLog.Debug("Received multisearch response", "code", res.StatusCode, "status", res.Status, "content-length", res.ContentLength)
|
||||||
|
|
||||||
start := time.Now()
|
start := time.Now()
|
||||||
clientLog.Debug("Decoding multisearch json response")
|
clientLog.Debug("Decoding multisearch json response")
|
||||||
|
|
||||||
|
var bodyBytes []byte
|
||||||
|
if c.debugEnabled {
|
||||||
|
tmpBytes, err := ioutil.ReadAll(res.Body)
|
||||||
|
if err != nil {
|
||||||
|
clientLog.Error("failed to read http response bytes", "error", err)
|
||||||
|
} else {
|
||||||
|
bodyBytes = make([]byte, len(tmpBytes))
|
||||||
|
copy(bodyBytes, tmpBytes)
|
||||||
|
res.Body = ioutil.NopCloser(bytes.NewBuffer(tmpBytes))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
var msr MultiSearchResponse
|
var msr MultiSearchResponse
|
||||||
defer res.Body.Close()
|
|
||||||
dec := json.NewDecoder(res.Body)
|
dec := json.NewDecoder(res.Body)
|
||||||
err = dec.Decode(&msr)
|
err = dec.Decode(&msr)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
@ -222,6 +251,24 @@ func (c *baseClientImpl) ExecuteMultisearch(r *MultiSearchRequest) (*MultiSearch
|
|||||||
|
|
||||||
msr.Status = res.StatusCode
|
msr.Status = res.StatusCode
|
||||||
|
|
||||||
|
if c.debugEnabled {
|
||||||
|
bodyJSON, err := simplejson.NewFromReader(bytes.NewBuffer(bodyBytes))
|
||||||
|
var data *simplejson.Json
|
||||||
|
if err != nil {
|
||||||
|
clientLog.Error("failed to decode http response into json", "error", err)
|
||||||
|
} else {
|
||||||
|
data = bodyJSON
|
||||||
|
}
|
||||||
|
|
||||||
|
msr.DebugInfo = &SearchDebugInfo{
|
||||||
|
Request: clientRes.reqInfo,
|
||||||
|
Response: &SearchResponseInfo{
|
||||||
|
Status: res.StatusCode,
|
||||||
|
Data: data,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
return &msr, nil
|
return &msr, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -266,3 +313,7 @@ func (c *baseClientImpl) getMultiSearchQueryParameters() string {
|
|||||||
func (c *baseClientImpl) MultiSearch() *MultiSearchRequestBuilder {
|
func (c *baseClientImpl) MultiSearch() *MultiSearchRequestBuilder {
|
||||||
return NewMultiSearchRequestBuilder(c.GetVersion())
|
return NewMultiSearchRequestBuilder(c.GetVersion())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (c *baseClientImpl) EnableDebug() {
|
||||||
|
c.debugEnabled = true
|
||||||
|
}
|
||||||
|
@ -2,10 +2,34 @@ package es
|
|||||||
|
|
||||||
import (
|
import (
|
||||||
"encoding/json"
|
"encoding/json"
|
||||||
|
"net/http"
|
||||||
|
|
||||||
|
"github.com/grafana/grafana/pkg/components/simplejson"
|
||||||
|
|
||||||
"github.com/grafana/grafana/pkg/tsdb"
|
"github.com/grafana/grafana/pkg/tsdb"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
type response struct {
|
||||||
|
httpResponse *http.Response
|
||||||
|
reqInfo *SearchRequestInfo
|
||||||
|
}
|
||||||
|
|
||||||
|
type SearchRequestInfo struct {
|
||||||
|
Method string `json:"method"`
|
||||||
|
Url string `json:"url"`
|
||||||
|
Data string `json:"data"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type SearchResponseInfo struct {
|
||||||
|
Status int `json:"status"`
|
||||||
|
Data *simplejson.Json `json:"data"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type SearchDebugInfo struct {
|
||||||
|
Request *SearchRequestInfo `json:"request"`
|
||||||
|
Response *SearchResponseInfo `json:"response"`
|
||||||
|
}
|
||||||
|
|
||||||
// SearchRequest represents a search request
|
// SearchRequest represents a search request
|
||||||
type SearchRequest struct {
|
type SearchRequest struct {
|
||||||
Index string
|
Index string
|
||||||
@ -60,6 +84,7 @@ type MultiSearchRequest struct {
|
|||||||
type MultiSearchResponse struct {
|
type MultiSearchResponse struct {
|
||||||
Status int `json:"status,omitempty"`
|
Status int `json:"status,omitempty"`
|
||||||
Responses []*SearchResponse `json:"responses"`
|
Responses []*SearchResponse `json:"responses"`
|
||||||
|
DebugInfo *SearchDebugInfo `json:"-"`
|
||||||
}
|
}
|
||||||
|
|
||||||
// Query represents a query
|
// Query represents a query
|
||||||
|
@ -40,6 +40,10 @@ func (e *ElasticsearchExecutor) Query(ctx context.Context, dsInfo *models.DataSo
|
|||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if tsdbQuery.Debug {
|
||||||
|
client.EnableDebug()
|
||||||
|
}
|
||||||
|
|
||||||
query := newTimeSeriesQuery(client, tsdbQuery, intervalCalculator)
|
query := newTimeSeriesQuery(client, tsdbQuery, intervalCalculator)
|
||||||
return query.execute()
|
return query.execute()
|
||||||
}
|
}
|
||||||
|
@ -29,12 +29,14 @@ const (
|
|||||||
type responseParser struct {
|
type responseParser struct {
|
||||||
Responses []*es.SearchResponse
|
Responses []*es.SearchResponse
|
||||||
Targets []*Query
|
Targets []*Query
|
||||||
|
DebugInfo *es.SearchDebugInfo
|
||||||
}
|
}
|
||||||
|
|
||||||
var newResponseParser = func(responses []*es.SearchResponse, targets []*Query) *responseParser {
|
var newResponseParser = func(responses []*es.SearchResponse, targets []*Query, debugInfo *es.SearchDebugInfo) *responseParser {
|
||||||
return &responseParser{
|
return &responseParser{
|
||||||
Responses: responses,
|
Responses: responses,
|
||||||
Targets: targets,
|
Targets: targets,
|
||||||
|
DebugInfo: debugInfo,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -49,12 +51,19 @@ func (rp *responseParser) getTimeSeries() (*tsdb.Response, error) {
|
|||||||
for i, res := range rp.Responses {
|
for i, res := range rp.Responses {
|
||||||
target := rp.Targets[i]
|
target := rp.Targets[i]
|
||||||
|
|
||||||
|
var debugInfo *simplejson.Json
|
||||||
|
if rp.DebugInfo != nil && i == 0 {
|
||||||
|
debugInfo = simplejson.NewFromAny(rp.DebugInfo)
|
||||||
|
}
|
||||||
|
|
||||||
if res.Error != nil {
|
if res.Error != nil {
|
||||||
result.Results[target.RefID] = getErrorFromElasticResponse(res)
|
result.Results[target.RefID] = getErrorFromElasticResponse(res)
|
||||||
|
result.Results[target.RefID].Meta = debugInfo
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
|
|
||||||
queryRes := tsdb.NewQueryResult()
|
queryRes := tsdb.NewQueryResult()
|
||||||
|
queryRes.Meta = debugInfo
|
||||||
props := make(map[string]string)
|
props := make(map[string]string)
|
||||||
table := tsdb.Table{
|
table := tsdb.Table{
|
||||||
Columns: make([]tsdb.TableColumn, 0),
|
Columns: make([]tsdb.TableColumn, 0),
|
||||||
|
@ -954,5 +954,5 @@ func newResponseParserForTest(tsdbQueries map[string]string, responseBody string
|
|||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
|
||||||
return newResponseParser(response.Responses, queries), nil
|
return newResponseParser(response.Responses, queries, nil), nil
|
||||||
}
|
}
|
||||||
|
@ -163,7 +163,7 @@ func (e *timeSeriesQuery) execute() (*tsdb.Response, error) {
|
|||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
|
||||||
rp := newResponseParser(res.Responses, queries)
|
rp := newResponseParser(res.Responses, queries, res.DebugInfo)
|
||||||
return rp.getTimeSeries()
|
return rp.getTimeSeries()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -635,6 +635,8 @@ func newFakeClient(version int) *fakeClient {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (c *fakeClient) EnableDebug() {}
|
||||||
|
|
||||||
func (c *fakeClient) GetVersion() int {
|
func (c *fakeClient) GetVersion() int {
|
||||||
return c.version
|
return c.version
|
||||||
}
|
}
|
||||||
|
@ -9,6 +9,7 @@ import (
|
|||||||
type TsdbQuery struct {
|
type TsdbQuery struct {
|
||||||
TimeRange *TimeRange
|
TimeRange *TimeRange
|
||||||
Queries []*Query
|
Queries []*Query
|
||||||
|
Debug bool
|
||||||
}
|
}
|
||||||
|
|
||||||
type Query struct {
|
type Query struct {
|
||||||
|
@ -1,5 +1,7 @@
|
|||||||
import React, { PureComponent } from 'react';
|
import React, { PureComponent } from 'react';
|
||||||
import { JSONFormatter } from 'app/core/components/JSONFormatter/JSONFormatter';
|
import { JSONFormatter } from 'app/core/components/JSONFormatter/JSONFormatter';
|
||||||
|
import appEvents from 'app/core/app_events';
|
||||||
|
import { CopyToClipboard } from 'app/core/components/CopyToClipboard/CopyToClipboard';
|
||||||
import { getBackendSrv } from '@grafana/runtime';
|
import { getBackendSrv } from '@grafana/runtime';
|
||||||
import { DashboardModel } from '../dashboard/state/DashboardModel';
|
import { DashboardModel } from '../dashboard/state/DashboardModel';
|
||||||
import { LoadingPlaceholder } from '@grafana/ui/src';
|
import { LoadingPlaceholder } from '@grafana/ui/src';
|
||||||
@ -11,15 +13,20 @@ export interface Props {
|
|||||||
|
|
||||||
interface State {
|
interface State {
|
||||||
isLoading: boolean;
|
isLoading: boolean;
|
||||||
|
allNodesExpanded: boolean;
|
||||||
testRuleResponse: {};
|
testRuleResponse: {};
|
||||||
}
|
}
|
||||||
|
|
||||||
export class TestRuleResult extends PureComponent<Props, State> {
|
export class TestRuleResult extends PureComponent<Props, State> {
|
||||||
readonly state: State = {
|
readonly state: State = {
|
||||||
isLoading: false,
|
isLoading: false,
|
||||||
|
allNodesExpanded: null,
|
||||||
testRuleResponse: {},
|
testRuleResponse: {},
|
||||||
};
|
};
|
||||||
|
|
||||||
|
formattedJson: any;
|
||||||
|
clipboard: any;
|
||||||
|
|
||||||
componentDidMount() {
|
componentDidMount() {
|
||||||
this.testRule();
|
this.testRule();
|
||||||
}
|
}
|
||||||
@ -33,6 +40,50 @@ export class TestRuleResult extends PureComponent<Props, State> {
|
|||||||
this.setState({ isLoading: false, testRuleResponse });
|
this.setState({ isLoading: false, testRuleResponse });
|
||||||
}
|
}
|
||||||
|
|
||||||
|
setFormattedJson = formattedJson => {
|
||||||
|
this.formattedJson = formattedJson;
|
||||||
|
};
|
||||||
|
|
||||||
|
getTextForClipboard = () => {
|
||||||
|
return JSON.stringify(this.formattedJson, null, 2);
|
||||||
|
};
|
||||||
|
|
||||||
|
onClipboardSuccess = () => {
|
||||||
|
appEvents.emit('alert-success', ['Content copied to clipboard']);
|
||||||
|
};
|
||||||
|
|
||||||
|
onToggleExpand = () => {
|
||||||
|
this.setState(prevState => ({
|
||||||
|
...prevState,
|
||||||
|
allNodesExpanded: !this.state.allNodesExpanded,
|
||||||
|
}));
|
||||||
|
};
|
||||||
|
|
||||||
|
getNrOfOpenNodes = () => {
|
||||||
|
if (this.state.allNodesExpanded === null) {
|
||||||
|
return 3; // 3 is default, ie when state is null
|
||||||
|
} else if (this.state.allNodesExpanded) {
|
||||||
|
return 20;
|
||||||
|
}
|
||||||
|
return 1;
|
||||||
|
};
|
||||||
|
|
||||||
|
renderExpandCollapse = () => {
|
||||||
|
const { allNodesExpanded } = this.state;
|
||||||
|
|
||||||
|
const collapse = (
|
||||||
|
<>
|
||||||
|
<i className="fa fa-minus-square-o" /> Collapse All
|
||||||
|
</>
|
||||||
|
);
|
||||||
|
const expand = (
|
||||||
|
<>
|
||||||
|
<i className="fa fa-plus-square-o" /> Expand All
|
||||||
|
</>
|
||||||
|
);
|
||||||
|
return allNodesExpanded ? collapse : expand;
|
||||||
|
};
|
||||||
|
|
||||||
render() {
|
render() {
|
||||||
const { testRuleResponse, isLoading } = this.state;
|
const { testRuleResponse, isLoading } = this.state;
|
||||||
|
|
||||||
@ -40,6 +91,25 @@ export class TestRuleResult extends PureComponent<Props, State> {
|
|||||||
return <LoadingPlaceholder text="Evaluating rule" />;
|
return <LoadingPlaceholder text="Evaluating rule" />;
|
||||||
}
|
}
|
||||||
|
|
||||||
return <JSONFormatter json={testRuleResponse} />;
|
const openNodes = this.getNrOfOpenNodes();
|
||||||
|
|
||||||
|
return (
|
||||||
|
<>
|
||||||
|
<div className="pull-right">
|
||||||
|
<button className="btn btn-transparent btn-p-x-0 m-r-1" onClick={this.onToggleExpand}>
|
||||||
|
{this.renderExpandCollapse()}
|
||||||
|
</button>
|
||||||
|
<CopyToClipboard
|
||||||
|
className="btn btn-transparent btn-p-x-0"
|
||||||
|
text={this.getTextForClipboard}
|
||||||
|
onSuccess={this.onClipboardSuccess}
|
||||||
|
>
|
||||||
|
<i className="fa fa-clipboard" /> Copy to Clipboard
|
||||||
|
</CopyToClipboard>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<JSONFormatter json={testRuleResponse} open={openNodes} onDidRender={this.setFormattedJson} />
|
||||||
|
</>
|
||||||
|
);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
Loading…
Reference in New Issue
Block a user