Prometheus: Introduce prometheus backend library (#83952)

* Move files to prometheus-library

* refactor core prometheus to use prometheus-library

* modify client transport options

* mock

* have a type

* import aliases

* rename

* call the right method

* remove unrelated test from the library

* update codeowners

* go work sync

* update go.work.sum

* make swagger-clean && make openapi3-gen

* add promlib to makefile

* remove clilogger

* Export the function

* update unit test

* add prometheus_test.go

* fix mock type

* use mapUtil from grafana-plugin-sdk-go
This commit is contained in:
ismail simsek
2024-03-11 17:22:33 +01:00
committed by GitHub
parent cfc7ea92da
commit 3fb6319d1b
114 changed files with 672 additions and 340 deletions

View File

@@ -0,0 +1,172 @@
package client
import (
"bytes"
"context"
"io"
"net/http"
"net/url"
"path"
"strconv"
"strings"
"time"
"github.com/grafana/grafana-plugin-sdk-go/backend"
"github.com/grafana/grafana/pkg/promlib/models"
)
type doer interface {
Do(req *http.Request) (*http.Response, error)
}
// Client is a custom Prometheus client. Reason for this is that Prom Go client serializes response into its own
// objects, we have to go through them and then serialize again into DataFrame which isn't very efficient. Using custom
// client we can parse response directly into DataFrame.
type Client struct {
doer doer
method string
baseUrl string
}
func NewClient(d doer, method, baseUrl string) *Client {
return &Client{doer: d, method: method, baseUrl: baseUrl}
}
func (c *Client) QueryRange(ctx context.Context, q *models.Query) (*http.Response, error) {
tr := q.TimeRange()
qv := map[string]string{
"query": q.Expr,
"start": formatTime(tr.Start),
"end": formatTime(tr.End),
"step": strconv.FormatFloat(tr.Step.Seconds(), 'f', -1, 64),
}
req, err := c.createQueryRequest(ctx, "api/v1/query_range", qv)
if err != nil {
return nil, err
}
return c.doer.Do(req)
}
func (c *Client) QueryInstant(ctx context.Context, q *models.Query) (*http.Response, error) {
// We do not need a time range here.
// Instant query evaluates at a single point in time.
// Using q.TimeRange is aligning the query range to step.
// Which causes a misleading time point.
// Instead of aligning we use time point directly.
// https://prometheus.io/docs/prometheus/latest/querying/api/#instant-queries
qv := map[string]string{"query": q.Expr, "time": formatTime(q.End)}
req, err := c.createQueryRequest(ctx, "api/v1/query", qv)
if err != nil {
return nil, err
}
return c.doer.Do(req)
}
func (c *Client) QueryExemplars(ctx context.Context, q *models.Query) (*http.Response, error) {
tr := q.TimeRange()
qv := map[string]string{
"query": q.Expr,
"start": formatTime(tr.Start),
"end": formatTime(tr.End),
}
req, err := c.createQueryRequest(ctx, "api/v1/query_exemplars", qv)
if err != nil {
return nil, err
}
return c.doer.Do(req)
}
func (c *Client) QueryResource(ctx context.Context, req *backend.CallResourceRequest) (*http.Response, error) {
// The way URL is represented in CallResourceRequest and what we need for the fetch function is different
// so here we have to do a bit of parsing, so we can then compose it with the base url in correct way.
reqUrlParsed, err := url.Parse(req.URL)
if err != nil {
return nil, err
}
u, err := c.createUrl(req.Path, nil)
if err != nil {
return nil, err
}
u.RawQuery = reqUrlParsed.RawQuery
// We use method from the request, as for resources front end may do a fallback to GET if POST does not work
// nad we want to respect that.
httpRequest, err := createRequest(ctx, req.Method, u, bytes.NewReader(req.Body))
if err != nil {
return nil, err
}
return c.doer.Do(httpRequest)
}
func (c *Client) createQueryRequest(ctx context.Context, endpoint string, qv map[string]string) (*http.Request, error) {
if strings.ToUpper(c.method) == http.MethodPost {
u, err := c.createUrl(endpoint, nil)
if err != nil {
return nil, err
}
v := make(url.Values)
for key, val := range qv {
v.Set(key, val)
}
return createRequest(ctx, c.method, u, strings.NewReader(v.Encode()))
}
u, err := c.createUrl(endpoint, qv)
if err != nil {
return nil, err
}
return createRequest(ctx, c.method, u, http.NoBody)
}
func (c *Client) createUrl(endpoint string, qs map[string]string) (*url.URL, error) {
finalUrl, err := url.ParseRequestURI(c.baseUrl)
if err != nil {
return nil, err
}
finalUrl.Path = path.Join(finalUrl.Path, endpoint)
// don't re-encode the Query if not needed
if len(qs) != 0 {
urlQuery := finalUrl.Query()
for key, val := range qs {
urlQuery.Set(key, val)
}
finalUrl.RawQuery = urlQuery.Encode()
}
return finalUrl, nil
}
func createRequest(ctx context.Context, method string, u *url.URL, bodyReader io.Reader) (*http.Request, error) {
request, err := http.NewRequestWithContext(ctx, method, u.String(), bodyReader)
if err != nil {
return nil, err
}
if strings.ToUpper(method) == http.MethodPost {
// This may not be true but right now we don't have more information here and seems like we send just this type
// of encoding right now if it is a POST
request.Header.Set("Content-Type", "application/x-www-form-urlencoded")
// This allows transport to retry request. See https://github.com/prometheus/client_golang/pull/1022
// It's set to nil so it is not actually sent over the wire, just used in Go http lib to retry requests.
request.Header["Idempotency-Key"] = nil
}
return request, nil
}
func formatTime(t time.Time) string {
return strconv.FormatFloat(float64(t.Unix())+float64(t.Nanosecond())/1e9, 'f', -1, 64)
}

View File

@@ -0,0 +1,141 @@
package client
import (
"context"
"fmt"
"io"
"net/http"
"testing"
"time"
"github.com/grafana/grafana-plugin-sdk-go/backend"
"github.com/stretchr/testify/require"
"github.com/grafana/grafana/pkg/promlib/models"
)
type MockDoer struct {
Req *http.Request
}
func (doer *MockDoer) Do(req *http.Request) (*http.Response, error) {
doer.Req = req
return &http.Response{
StatusCode: http.StatusOK,
Status: "200 OK",
}, nil
}
func TestClient(t *testing.T) {
t.Run("QueryResource", func(t *testing.T) {
doer := &MockDoer{}
// The method here does not really matter for resource calls
client := NewClient(doer, http.MethodGet, "http://localhost:9090")
t.Run("sends correct POST request", func(t *testing.T) {
req := &backend.CallResourceRequest{
PluginContext: backend.PluginContext{},
Path: "/api/v1/series",
Method: http.MethodPost,
URL: "/api/v1/series",
Body: []byte("match%5B%5D: ALERTS\nstart: 1655271408\nend: 1655293008"),
}
res, err := client.QueryResource(context.Background(), req)
defer func() {
if res != nil && res.Body != nil {
if err := res.Body.Close(); err != nil {
fmt.Println("Error", "err", err)
}
}
}()
require.NoError(t, err)
require.NotNil(t, doer.Req)
require.Equal(t, http.MethodPost, doer.Req.Method)
body, err := io.ReadAll(doer.Req.Body)
require.NoError(t, err)
require.Equal(t, []byte("match%5B%5D: ALERTS\nstart: 1655271408\nend: 1655293008"), body)
require.Equal(t, "http://localhost:9090/api/v1/series", doer.Req.URL.String())
})
t.Run("sends correct GET request", func(t *testing.T) {
req := &backend.CallResourceRequest{
PluginContext: backend.PluginContext{},
Path: "/api/v1/series",
Method: http.MethodGet,
URL: "api/v1/series?match%5B%5D=ALERTS&start=1655272558&end=1655294158",
}
res, err := client.QueryResource(context.Background(), req)
defer func() {
if res != nil && res.Body != nil {
if err := res.Body.Close(); err != nil {
fmt.Println("Error", "err", err)
}
}
}()
require.NoError(t, err)
require.NotNil(t, doer.Req)
require.Equal(t, http.MethodGet, doer.Req.Method)
body, err := io.ReadAll(doer.Req.Body)
require.NoError(t, err)
require.Equal(t, []byte{}, body)
require.Equal(t, "http://localhost:9090/api/v1/series?match%5B%5D=ALERTS&start=1655272558&end=1655294158", doer.Req.URL.String())
})
})
t.Run("QueryRange", func(t *testing.T) {
doer := &MockDoer{}
t.Run("sends correct POST query", func(t *testing.T) {
client := NewClient(doer, http.MethodPost, "http://localhost:9090")
req := &models.Query{
Expr: "rate(ALERTS{job=\"test\" [$__rate_interval]})",
Start: time.Unix(0, 0),
End: time.Unix(1234, 0),
RangeQuery: true,
Step: 1 * time.Second,
}
res, err := client.QueryRange(context.Background(), req)
defer func() {
if res != nil && res.Body != nil {
if err := res.Body.Close(); err != nil {
fmt.Println("Error", "err", err)
}
}
}()
require.NoError(t, err)
require.NotNil(t, doer.Req)
require.Equal(t, http.MethodPost, doer.Req.Method)
require.Equal(t, "application/x-www-form-urlencoded", doer.Req.Header.Get("Content-Type"))
body, err := io.ReadAll(doer.Req.Body)
require.NoError(t, err)
require.Equal(t, []byte("end=1234&query=rate%28ALERTS%7Bjob%3D%22test%22+%5B%24__rate_interval%5D%7D%29&start=0&step=1"), body)
require.Equal(t, "http://localhost:9090/api/v1/query_range", doer.Req.URL.String())
})
t.Run("sends correct GET query", func(t *testing.T) {
client := NewClient(doer, http.MethodGet, "http://localhost:9090")
req := &models.Query{
Expr: "rate(ALERTS{job=\"test\" [$__rate_interval]})",
Start: time.Unix(0, 0),
End: time.Unix(1234, 0),
RangeQuery: true,
Step: 1 * time.Second,
}
res, err := client.QueryRange(context.Background(), req)
defer func() {
if res != nil && res.Body != nil {
if err := res.Body.Close(); err != nil {
fmt.Println("Error", "err", err)
}
}
}()
require.NoError(t, err)
require.NotNil(t, doer.Req)
require.Equal(t, http.MethodGet, doer.Req.Method)
body, err := io.ReadAll(doer.Req.Body)
require.NoError(t, err)
require.Equal(t, []byte{}, body)
require.Equal(t, "http://localhost:9090/api/v1/query_range?end=1234&query=rate%28ALERTS%7Bjob%3D%22test%22+%5B%24__rate_interval%5D%7D%29&start=0&step=1", doer.Req.URL.String())
})
})
}

View File

@@ -0,0 +1,49 @@
package client
import (
"context"
"fmt"
"strings"
"github.com/grafana/grafana-plugin-sdk-go/backend"
sdkhttpclient "github.com/grafana/grafana-plugin-sdk-go/backend/httpclient"
"github.com/grafana/grafana-plugin-sdk-go/data/utils/maputil"
"github.com/grafana/grafana-plugin-sdk-go/backend/log"
"github.com/grafana/grafana/pkg/promlib/middleware"
"github.com/grafana/grafana/pkg/promlib/utils"
)
// CreateTransportOptions creates options for the http client.
func CreateTransportOptions(ctx context.Context, settings backend.DataSourceInstanceSettings, logger log.Logger) (*sdkhttpclient.Options, error) {
opts, err := settings.HTTPClientOptions(ctx)
if err != nil {
return nil, fmt.Errorf("error getting HTTP options: %w", err)
}
jsonData, err := utils.GetJsonData(settings)
if err != nil {
return nil, fmt.Errorf("error reading settings: %w", err)
}
httpMethod, _ := maputil.GetStringOptional(jsonData, "httpMethod")
opts.Middlewares = middlewares(logger, httpMethod)
return &opts, nil
}
func middlewares(logger log.Logger, httpMethod string) []sdkhttpclient.Middleware {
middlewares := []sdkhttpclient.Middleware{
// TODO: probably isn't needed anymore and should by done by http infra code
middleware.CustomQueryParameters(logger),
sdkhttpclient.CustomHeadersMiddleware(),
}
// Needed to control GET vs POST method of the requests
if strings.ToLower(httpMethod) == "get" {
middlewares = append(middlewares, middleware.ForceHttpGet(logger))
}
return middlewares
}

View File

@@ -0,0 +1,26 @@
package client
import (
"context"
"testing"
"github.com/grafana/grafana-plugin-sdk-go/backend"
"github.com/stretchr/testify/require"
)
func TestCreateTransportOptions(t *testing.T) {
t.Run("creates correct options object", func(t *testing.T) {
settings := backend.DataSourceInstanceSettings{
BasicAuthEnabled: false,
BasicAuthUser: "",
JSONData: []byte(`{"httpHeaderName1": "foo"}`),
DecryptedSecureJSONData: map[string]string{
"httpHeaderValue1": "bar",
},
}
opts, err := CreateTransportOptions(context.Background(), settings, backend.NewLoggerWith("logger", "test"))
require.NoError(t, err)
require.Equal(t, map[string]string{"foo": "bar"}, opts.Headers)
require.Equal(t, 2, len(opts.Middlewares))
})
}

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,86 @@
package converter
import (
"os"
"path"
"strings"
"testing"
"time"
sdkjsoniter "github.com/grafana/grafana-plugin-sdk-go/data/utils/jsoniter"
"github.com/grafana/grafana-plugin-sdk-go/experimental"
jsoniter "github.com/json-iterator/go"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
)
const update = false
var files = []string{
"prom-labels",
"prom-matrix",
"prom-matrix-with-nans",
"prom-matrix-histogram-no-labels",
"prom-matrix-histogram-partitioned",
"prom-vector-histogram-no-labels",
"prom-vector",
"prom-string",
"prom-scalar",
"prom-series",
"prom-warnings",
"prom-error",
"prom-exemplars-a",
"prom-exemplars-b",
"prom-exemplars-diff-labels",
"loki-streams-a",
"loki-streams-b",
"loki-streams-c",
}
func TestReadPromFrames(t *testing.T) {
for _, name := range files {
t.Run(name, runScenario(name, Options{}))
}
}
func runScenario(name string, opts Options) func(t *testing.T) {
return func(t *testing.T) {
// Safe to disable, this is a test.
// nolint:gosec
f, err := os.Open(path.Join("testdata", name+".json"))
require.NoError(t, err)
iter := jsoniter.Parse(sdkjsoniter.ConfigDefault, f, 1024)
rsp := ReadPrometheusStyleResult(iter, opts)
if strings.Contains(name, "error") {
require.Error(t, rsp.Error)
return
}
require.NoError(t, rsp.Error)
fname := name + "-frame"
experimental.CheckGoldenJSONResponse(t, "testdata", fname, &rsp, update)
}
}
func TestTimeConversions(t *testing.T) {
// include millisecond precision
assert.Equal(t,
time.Date(2020, time.September, 14, 15, 22, 25, 479000000, time.UTC),
timeFromFloat(1600096945.479))
ti, err := timeFromLokiString("1645030246277587968")
require.NoError(t, err)
// Loki date parsing
assert.Equal(t,
time.Date(2022, time.February, 16, 16, 50, 46, 277587968, time.UTC),
ti)
ti, err = timeFromLokiString("2000000000000000000")
require.NoError(t, err)
assert.Equal(t,
time.Date(2033, time.May, 18, 3, 33, 20, 0, time.UTC),
ti)
}

View File

@@ -0,0 +1,205 @@
// 🌟 This was machine generated. Do not edit. 🌟
//
// Frame[0] {
// "typeVersion": [
// 0,
// 0
// ],
// "custom": {
// "stats": {
// "ingester": {
// "compressedBytes": 0,
// "decompressedBytes": 0,
// "decompressedLines": 0,
// "headChunkBytes": 0,
// "headChunkLines": 0,
// "totalBatches": 0,
// "totalChunksMatched": 0,
// "totalDuplicates": 0,
// "totalLinesSent": 0,
// "totalReached": 0
// },
// "store": {
// "chunksDownloadTime": 0.000390958,
// "compressedBytes": 31432,
// "decompressedBytes": 7772,
// "decompressedLines": 55,
// "headChunkBytes": 0,
// "headChunkLines": 0,
// "totalChunksDownloaded": 2,
// "totalChunksRef": 2,
// "totalDuplicates": 0
// },
// "summary": {
// "bytesProcessedPerSecond": 3507022,
// "execTime": 0.002216125,
// "linesProcessedPerSecond": 24818,
// "totalBytesProcessed": 7772,
// "totalLinesProcessed": 55
// }
// }
// }
// }
// Name:
// Dimensions: 4 Fields by 6 Rows
// +---------------------------------------+-----------------------------------------+------------------+---------------------+
// | Name: __labels | Name: Time | Name: Line | Name: TS |
// | Labels: | Labels: | Labels: | Labels: |
// | Type: []json.RawMessage | Type: []time.Time | Type: []string | Type: []string |
// +---------------------------------------+-----------------------------------------+------------------+---------------------+
// | {"level":"error","location":"moon🌙"} | 2022-02-16 16:50:44.81075712 +0000 UTC | log line error 1 | 1645030244810757120 |
// | {"level":"info","location":"moon🌙"} | 2022-02-16 16:50:47.02773504 +0000 UTC | log line info 1 | 1645030247027735040 |
// | {"level":"info","location":"moon🌙"} | 2022-02-16 16:50:46.277587968 +0000 UTC | log line info 2 | 1645030246277587968 |
// | {"level":"info","location":"moon🌙"} | 2022-02-16 16:50:46.277587968 +0000 UTC | log line info 2 | 1645030246277587968 |
// | {"level":"info","location":"moon🌙"} | 2022-02-16 16:50:45.539423744 +0000 UTC | log line info 3 | 1645030245539423744 |
// | {"level":"info","location":"moon🌙"} | 2022-02-16 16:50:44.091700992 +0000 UTC | log line info 4 | 1645030244091700992 |
// +---------------------------------------+-----------------------------------------+------------------+---------------------+
//
//
// 🌟 This was machine generated. Do not edit. 🌟
{
"status": 200,
"frames": [
{
"schema": {
"meta": {
"typeVersion": [
0,
0
],
"custom": {
"stats": {
"ingester": {
"compressedBytes": 0,
"decompressedBytes": 0,
"decompressedLines": 0,
"headChunkBytes": 0,
"headChunkLines": 0,
"totalBatches": 0,
"totalChunksMatched": 0,
"totalDuplicates": 0,
"totalLinesSent": 0,
"totalReached": 0
},
"store": {
"chunksDownloadTime": 0.000390958,
"compressedBytes": 31432,
"decompressedBytes": 7772,
"decompressedLines": 55,
"headChunkBytes": 0,
"headChunkLines": 0,
"totalChunksDownloaded": 2,
"totalChunksRef": 2,
"totalDuplicates": 0
},
"summary": {
"bytesProcessedPerSecond": 3507022,
"execTime": 0.002216125,
"linesProcessedPerSecond": 24818,
"totalBytesProcessed": 7772,
"totalLinesProcessed": 55
}
}
}
},
"fields": [
{
"name": "__labels",
"type": "other",
"typeInfo": {
"frame": "json.RawMessage"
}
},
{
"name": "Time",
"type": "time",
"typeInfo": {
"frame": "time.Time"
}
},
{
"name": "Line",
"type": "string",
"typeInfo": {
"frame": "string"
}
},
{
"name": "TS",
"type": "string",
"typeInfo": {
"frame": "string"
}
}
]
},
"data": {
"values": [
[
{
"level": "error",
"location": "moon🌙"
},
{
"level": "info",
"location": "moon🌙"
},
{
"level": "info",
"location": "moon🌙"
},
{
"level": "info",
"location": "moon🌙"
},
{
"level": "info",
"location": "moon🌙"
},
{
"level": "info",
"location": "moon🌙"
}
],
[
1645030244810,
1645030247027,
1645030246277,
1645030246277,
1645030245539,
1645030244091
],
[
"log line error 1",
"log line info 1",
"log line info 2",
"log line info 2",
"log line info 3",
"log line info 4"
],
[
"1645030244810757120",
"1645030247027735040",
"1645030246277587968",
"1645030246277587968",
"1645030245539423744",
"1645030244091700992"
]
],
"nanos": [
null,
[
757120,
735040,
587968,
587968,
423744,
700992
],
null,
null
]
}
}
]
}

View File

@@ -0,0 +1,81 @@
{
"status": "success",
"data": {
"resultType": "streams",
"result": [
{
"stream": {
"level": "error",
"location": "moon🌙"
},
"values": [
[
"1645030244810757120",
"log line error 1"
]
]
},
{
"stream": {
"level": "info",
"location": "moon🌙"
},
"values": [
[
"1645030247027735040",
"log line info 1"
],
[
"1645030246277587968",
"log line info 2"
],
[
"1645030246277587968",
"log line info 2"
],
[
"1645030245539423744",
"log line info 3"
],
[
"1645030244091700992",
"log line info 4"
]
]
}
],
"stats": {
"summary": {
"bytesProcessedPerSecond": 3507022,
"linesProcessedPerSecond": 24818,
"totalBytesProcessed": 7772,
"totalLinesProcessed": 55,
"execTime": 0.002216125
},
"store": {
"totalChunksRef": 2,
"totalChunksDownloaded": 2,
"chunksDownloadTime": 0.000390958,
"headChunkBytes": 0,
"headChunkLines": 0,
"decompressedBytes": 7772,
"decompressedLines": 55,
"compressedBytes": 31432,
"totalDuplicates": 0
},
"ingester": {
"totalReached": 0,
"totalChunksMatched": 0,
"totalBatches": 0,
"totalLinesSent": 0,
"headChunkBytes": 0,
"headChunkLines": 0,
"decompressedBytes": 0,
"decompressedLines": 0,
"compressedBytes": 0,
"totalDuplicates": 0
}
}
}
}

View File

@@ -0,0 +1,196 @@
// 🌟 This was machine generated. Do not edit. 🌟
//
// Frame[0] {
// "typeVersion": [
// 0,
// 0
// ],
// "custom": {
// "stats": {
// "ingester": {
// "compressedBytes": 0,
// "decompressedBytes": 0,
// "decompressedLines": 0,
// "headChunkBytes": 0,
// "headChunkLines": 0,
// "totalBatches": 0,
// "totalChunksMatched": 0,
// "totalDuplicates": 0,
// "totalLinesSent": 0,
// "totalReached": 0
// },
// "store": {
// "chunksDownloadTime": 0.000390958,
// "compressedBytes": 31432,
// "decompressedBytes": 7772,
// "decompressedLines": 55,
// "headChunkBytes": 0,
// "headChunkLines": 0,
// "totalChunksDownloaded": 2,
// "totalChunksRef": 2,
// "totalDuplicates": 0
// },
// "summary": {
// "bytesProcessedPerSecond": 3507022,
// "execTime": 0.002216125,
// "linesProcessedPerSecond": 24818,
// "totalBytesProcessed": 7772,
// "totalLinesProcessed": 55
// }
// }
// }
// }
// Name:
// Dimensions: 4 Fields by 5 Rows
// +-------------------------------------+-----------------------------------------+------------------+---------------------+
// | Name: __labels | Name: Time | Name: Line | Name: TS |
// | Labels: | Labels: | Labels: | Labels: |
// | Type: []json.RawMessage | Type: []time.Time | Type: []string | Type: []string |
// +-------------------------------------+-----------------------------------------+------------------+---------------------+
// | {"level":"error","location":"moon"} | 2022-02-16 16:50:44.81075712 +0000 UTC | log line error 1 | 1645030244810757120 |
// | {"level":"info","location":"moon"} | 2022-02-16 16:50:47.02773504 +0000 UTC | log line info 1 | 1645030247027735040 |
// | {"level":"info","location":"moon"} | 2022-02-16 16:50:46.277587968 +0000 UTC | log line info 2 | 1645030246277587968 |
// | {"level":"info","location":"moon"} | 2022-02-16 16:50:45.539423744 +0000 UTC | log line info 3 | 1645030245539423744 |
// | {"level":"info","location":"moon"} | 2022-02-16 16:50:44.091700992 +0000 UTC | log line info 4 | 1645030244091700992 |
// +-------------------------------------+-----------------------------------------+------------------+---------------------+
//
//
// 🌟 This was machine generated. Do not edit. 🌟
{
"status": 200,
"frames": [
{
"schema": {
"meta": {
"typeVersion": [
0,
0
],
"custom": {
"stats": {
"ingester": {
"compressedBytes": 0,
"decompressedBytes": 0,
"decompressedLines": 0,
"headChunkBytes": 0,
"headChunkLines": 0,
"totalBatches": 0,
"totalChunksMatched": 0,
"totalDuplicates": 0,
"totalLinesSent": 0,
"totalReached": 0
},
"store": {
"chunksDownloadTime": 0.000390958,
"compressedBytes": 31432,
"decompressedBytes": 7772,
"decompressedLines": 55,
"headChunkBytes": 0,
"headChunkLines": 0,
"totalChunksDownloaded": 2,
"totalChunksRef": 2,
"totalDuplicates": 0
},
"summary": {
"bytesProcessedPerSecond": 3507022,
"execTime": 0.002216125,
"linesProcessedPerSecond": 24818,
"totalBytesProcessed": 7772,
"totalLinesProcessed": 55
}
}
}
},
"fields": [
{
"name": "__labels",
"type": "other",
"typeInfo": {
"frame": "json.RawMessage"
}
},
{
"name": "Time",
"type": "time",
"typeInfo": {
"frame": "time.Time"
}
},
{
"name": "Line",
"type": "string",
"typeInfo": {
"frame": "string"
}
},
{
"name": "TS",
"type": "string",
"typeInfo": {
"frame": "string"
}
}
]
},
"data": {
"values": [
[
{
"level": "error",
"location": "moon"
},
{
"level": "info",
"location": "moon"
},
{
"level": "info",
"location": "moon"
},
{
"level": "info",
"location": "moon"
},
{
"level": "info",
"location": "moon"
}
],
[
1645030244810,
1645030247027,
1645030246277,
1645030245539,
1645030244091
],
[
"log line error 1",
"log line info 1",
"log line info 2",
"log line info 3",
"log line info 4"
],
[
"1645030244810757120",
"1645030247027735040",
"1645030246277587968",
"1645030245539423744",
"1645030244091700992"
]
],
"nanos": [
null,
[
757120,
735040,
587968,
423744,
700992
],
null,
null
]
}
}
]
}

View File

@@ -0,0 +1,76 @@
{
"status": "success",
"data": {
"resultType": "streams",
"result": [
{
"stream": {
"level": "error",
"location": "moon"
},
"values": [
[
"1645030244810757120",
"log line error 1"
]
]
},
{
"stream": {
"level": "info",
"location": "moon"
},
"values": [
[
"1645030247027735040",
"log line info 1"
],
[
"1645030246277587968",
"log line info 2"
],
[
"1645030245539423744",
"log line info 3"
],
[
"1645030244091700992",
"log line info 4"
]
]
}
],
"stats": {
"summary": {
"bytesProcessedPerSecond": 3507022,
"linesProcessedPerSecond": 24818,
"totalBytesProcessed": 7772,
"totalLinesProcessed": 55,
"execTime": 0.002216125
},
"store": {
"totalChunksRef": 2,
"totalChunksDownloaded": 2,
"chunksDownloadTime": 0.000390958,
"headChunkBytes": 0,
"headChunkLines": 0,
"decompressedBytes": 7772,
"decompressedLines": 55,
"compressedBytes": 31432,
"totalDuplicates": 0
},
"ingester": {
"totalReached": 0,
"totalChunksMatched": 0,
"totalBatches": 0,
"totalLinesSent": 0,
"headChunkBytes": 0,
"headChunkLines": 0,
"decompressedBytes": 0,
"decompressedLines": 0,
"compressedBytes": 0,
"totalDuplicates": 0
}
}
}
}

View File

@@ -0,0 +1,90 @@
// 🌟 This was machine generated. Do not edit. 🌟
//
// Frame[0] {
// "typeVersion": [
// 0,
// 0
// ]
// }
// Name:
// Dimensions: 4 Fields by 2 Rows
// +-------------------------+-------------------------------+----------------+---------------------+
// | Name: __labels | Name: Time | Name: Line | Name: TS |
// | Labels: | Labels: | Labels: | Labels: |
// | Type: []json.RawMessage | Type: []time.Time | Type: []string | Type: []string |
// +-------------------------+-------------------------------+----------------+---------------------+
// | {"label1":"value1"} | 2022-06-17 06:49:51 +0000 UTC | text1 | 1655448591000000000 |
// | {"label2":"value2"} | 2022-06-17 06:49:52 +0000 UTC | text2 | 1655448592000000000 |
// +-------------------------+-------------------------------+----------------+---------------------+
//
//
// 🌟 This was machine generated. Do not edit. 🌟
{
"status": 200,
"frames": [
{
"schema": {
"meta": {
"typeVersion": [
0,
0
]
},
"fields": [
{
"name": "__labels",
"type": "other",
"typeInfo": {
"frame": "json.RawMessage"
}
},
{
"name": "Time",
"type": "time",
"typeInfo": {
"frame": "time.Time"
}
},
{
"name": "Line",
"type": "string",
"typeInfo": {
"frame": "string"
}
},
{
"name": "TS",
"type": "string",
"typeInfo": {
"frame": "string"
}
}
]
},
"data": {
"values": [
[
{
"label1": "value1"
},
{
"label2": "value2"
}
],
[
1655448591000,
1655448592000
],
[
"text1",
"text2"
],
[
"1655448591000000000",
"1655448592000000000"
]
]
}
}
]
}

View File

@@ -0,0 +1,17 @@
{
"status": "success",
"data": {
"resultType": "streams",
"result": [
{
"stream": { "label1": "value1" },
"values": [["1655448591000000000", "text1"]
]
},
{
"stream": { "label2": "value2"},
"values": [["1655448592000000000", "text2"]]
}
]
}
}

View File

@@ -0,0 +1,97 @@
// 🌟 This was machine generated. Do not edit. 🌟
//
// Frame[0] {
// "typeVersion": [
// 0,
// 0
// ]
// }
// Name:
// Dimensions: 5 Fields by 1 Rows
// +----------------------------------------+-----------------------------------+----------------+---------------------+--------------------------------+
// | Name: __labels | Name: Time | Name: Line | Name: TS | Name: __labelTypes |
// | Labels: | Labels: | Labels: | Labels: | Labels: |
// | Type: []json.RawMessage | Type: []time.Time | Type: []string | Type: []string | Type: []json.RawMessage |
// +----------------------------------------+-----------------------------------+----------------+---------------------+--------------------------------+
// | {"label":"value","nonIndexed":"value"} | 2023-10-11 11:55:10.236 +0000 UTC | text | 1697025310236000000 | {"nonIndexed":"S","label":"I"} |
// +----------------------------------------+-----------------------------------+----------------+---------------------+--------------------------------+
//
//
// 🌟 This was machine generated. Do not edit. 🌟
{
"status": 200,
"frames": [
{
"schema": {
"meta": {
"typeVersion": [
0,
0
]
},
"fields": [
{
"name": "__labels",
"type": "other",
"typeInfo": {
"frame": "json.RawMessage"
}
},
{
"name": "Time",
"type": "time",
"typeInfo": {
"frame": "time.Time"
}
},
{
"name": "Line",
"type": "string",
"typeInfo": {
"frame": "string"
}
},
{
"name": "TS",
"type": "string",
"typeInfo": {
"frame": "string"
}
},
{
"name": "__labelTypes",
"type": "other",
"typeInfo": {
"frame": "json.RawMessage"
}
}
]
},
"data": {
"values": [
[
{
"label": "value",
"nonIndexed": "value"
}
],
[
1697025310236
],
[
"text"
],
[
"1697025310236000000"
],
[
{
"nonIndexed": "S",
"label": "I"
}
]
]
}
}
]
}

View File

@@ -0,0 +1,27 @@
{
"status": "success",
"data": {
"encodingFlags": [
"categorize-labels"
],
"resultType": "streams",
"result": [
{
"stream": {
"label": "value"
},
"values": [
[
"1697025310236000000",
"text",
{
"structuredMetadata": {
"nonIndexed": "value"
}
}
]
]
}
]
}
}

View File

@@ -0,0 +1,5 @@
//
// ERROR: bad_data: invalid parameter "start": cannot parse "" to a valid timestamp🌟 This was machine generated. Do not edit. 🌟
{
"error": "bad_data: invalid parameter \"start\": cannot parse \"\" to a valid timestamp"
}

View File

@@ -0,0 +1,5 @@
{
"status": "error",
"errorType": "bad_data",
"error": "invalid parameter \"start\": cannot parse \"\" to a valid timestamp"
}

View File

@@ -0,0 +1,116 @@
{
"frames": [
{
"schema": {
"meta": {
"custom": {
"resultType": "exemplar"
}
},
"fields": [
{
"name": "Time",
"type": "time",
"typeInfo": {
"frame": "time.Time"
}
},
{
"name": "Value",
"type": "number",
"typeInfo": {
"frame": "float64"
},
"labels": {
"__name__": "test_exemplar_metric_total",
"instance": "localhost:8090",
"job": "prometheus",
"service": "bar"
}
},
{
"name": "traceID",
"type": "string",
"typeInfo": {
"frame": "string"
}
},
{
"name": "a",
"type": "string",
"typeInfo": {
"frame": "string"
}
}
]
},
"data": {
"values": [
[
1600096945479
],
[
6
],
[
"EpTxMJ40fUus7aGY"
],
[
"not in next"
]
]
}
},
{
"schema": {
"meta": {
"custom": {
"resultType": "exemplar"
}
},
"fields": [
{
"name": "Time",
"type": "time",
"typeInfo": {
"frame": "time.Time"
}
},
{
"name": "Value",
"type": "number",
"typeInfo": {
"frame": "float64"
},
"labels": {
"__name__": "test_exemplar_metric_total",
"instance": "localhost:8090",
"job": "prometheus",
"service": "foo"
}
},
{
"name": "traceID",
"type": "string",
"typeInfo": {
"frame": "string"
}
}
]
},
"data": {
"values": [
[
1600096955479,1600096965489
],
[
19,20
],
[
"Olp9XHlq763ccsfa","hCtjygkIHwAN9vs4"
]
]
}
}
]
}

View File

@@ -0,0 +1,173 @@
// 🌟 This was machine generated. Do not edit. 🌟
//
// Frame[0] {
// "typeVersion": [
// 0,
// 0
// ],
// "custom": {
// "resultType": "exemplar"
// }
// }
// Name:
// Dimensions: 4 Fields by 1 Rows
// +-----------------------------------+---------------------------------------------------------------------------------------------------+------------------+----------------+
// | Name: Time | Name: Value | Name: traceID | Name: a |
// | Labels: | Labels: __name__=test_exemplar_metric_total, instance=localhost:8090, job=prometheus, service=bar | Labels: | Labels: |
// | Type: []time.Time | Type: []float64 | Type: []string | Type: []string |
// +-----------------------------------+---------------------------------------------------------------------------------------------------+------------------+----------------+
// | 2020-09-14 15:22:25.479 +0000 UTC | 6 | EpTxMJ40fUus7aGY | not in next |
// +-----------------------------------+---------------------------------------------------------------------------------------------------+------------------+----------------+
//
//
//
// Frame[1] {
// "typeVersion": [
// 0,
// 0
// ],
// "custom": {
// "resultType": "exemplar"
// }
// }
// Name:
// Dimensions: 3 Fields by 2 Rows
// +-----------------------------------+---------------------------------------------------------------------------------------------------+------------------+
// | Name: Time | Name: Value | Name: traceID |
// | Labels: | Labels: __name__=test_exemplar_metric_total, instance=localhost:8090, job=prometheus, service=foo | Labels: |
// | Type: []time.Time | Type: []float64 | Type: []string |
// +-----------------------------------+---------------------------------------------------------------------------------------------------+------------------+
// | 2020-09-14 15:22:35.479 +0000 UTC | 19 | Olp9XHlq763ccsfa |
// | 2020-09-14 15:22:45.489 +0000 UTC | 20 | hCtjygkIHwAN9vs4 |
// +-----------------------------------+---------------------------------------------------------------------------------------------------+------------------+
//
//
// 🌟 This was machine generated. Do not edit. 🌟
{
"status": 200,
"frames": [
{
"schema": {
"meta": {
"typeVersion": [
0,
0
],
"custom": {
"resultType": "exemplar"
}
},
"fields": [
{
"name": "Time",
"type": "time",
"typeInfo": {
"frame": "time.Time"
}
},
{
"name": "Value",
"type": "number",
"typeInfo": {
"frame": "float64"
},
"labels": {
"__name__": "test_exemplar_metric_total",
"instance": "localhost:8090",
"job": "prometheus",
"service": "bar"
}
},
{
"name": "traceID",
"type": "string",
"typeInfo": {
"frame": "string"
}
},
{
"name": "a",
"type": "string",
"typeInfo": {
"frame": "string"
}
}
]
},
"data": {
"values": [
[
1600096945479
],
[
6
],
[
"EpTxMJ40fUus7aGY"
],
[
"not in next"
]
]
}
},
{
"schema": {
"meta": {
"typeVersion": [
0,
0
],
"custom": {
"resultType": "exemplar"
}
},
"fields": [
{
"name": "Time",
"type": "time",
"typeInfo": {
"frame": "time.Time"
}
},
{
"name": "Value",
"type": "number",
"typeInfo": {
"frame": "float64"
},
"labels": {
"__name__": "test_exemplar_metric_total",
"instance": "localhost:8090",
"job": "prometheus",
"service": "foo"
}
},
{
"name": "traceID",
"type": "string",
"typeInfo": {
"frame": "string"
}
}
]
},
"data": {
"values": [
[
1600096955479,
1600096965489
],
[
19,
20
],
[
"Olp9XHlq763ccsfa",
"hCtjygkIHwAN9vs4"
]
]
}
}
]
}

View File

@@ -0,0 +1,39 @@
🌟 This was machine generated. Do not edit. 🌟
Frame[0] {
"custom": {
"resultType": "exemplar"
}
}
Name:
Dimensions: 4 Fields by 1 Rows
+-----------------------------------+---------------------------------------------------------------------------------------------------+------------------+----------------+
| Name: Time | Name: Value | Name: traceID | Name: a |
| Labels: | Labels: __name__=test_exemplar_metric_total, instance=localhost:8090, job=prometheus, service=bar | Labels: | Labels: |
| Type: []time.Time | Type: []float64 | Type: []string | Type: []string |
+-----------------------------------+---------------------------------------------------------------------------------------------------+------------------+----------------+
| 2020-09-14 15:22:25.479 +0000 UTC | 6 | EpTxMJ40fUus7aGY | not in next |
+-----------------------------------+---------------------------------------------------------------------------------------------------+------------------+----------------+
Frame[1] {
"custom": {
"resultType": "exemplar"
}
}
Name:
Dimensions: 3 Fields by 2 Rows
+-----------------------------------+---------------------------------------------------------------------------------------------------+------------------+
| Name: Time | Name: Value | Name: traceID |
| Labels: | Labels: __name__=test_exemplar_metric_total, instance=localhost:8090, job=prometheus, service=foo | Labels: |
| Type: []time.Time | Type: []float64 | Type: []string |
+-----------------------------------+---------------------------------------------------------------------------------------------------+------------------+
| 2020-09-14 15:22:35.479 +0000 UTC | 19 | Olp9XHlq763ccsfa |
| 2020-09-14 15:22:45.489 +0000 UTC | 20 | hCtjygkIHwAN9vs4 |
+-----------------------------------+---------------------------------------------------------------------------------------------------+------------------+
====== TEST DATA RESPONSE (arrow base64) ======
FRAME=QVJST1cxAAD/////8AIAABAAAAAAAAoADgAMAAsABAAKAAAAFAAAAAAAAAEEAAoADAAAAAgABAAKAAAACAAAAJgAAAADAAAATAAAACgAAAAEAAAApP3//wgAAAAMAAAAAAAAAAAAAAAFAAAAcmVmSWQAAADE/f//CAAAAAwAAAAAAAAAAAAAAAQAAABuYW1lAAAAAOT9//8IAAAAMAAAACQAAAB7ImN1c3RvbSI6eyJyZXN1bHRUeXBlIjoiZXhlbXBsYXIifX0AAAAABAAAAG1ldGEAAAAABAAAALQBAAC4AAAAWAAAAAQAAABu/v//FAAAADgAAAA4AAAAAAAABTQAAAABAAAABAAAAFz+//8IAAAADAAAAAEAAABhAAAABAAAAG5hbWUAAAAAAAAAAKz///8BAAAAYQAAAL7+//8UAAAAPAAAAEAAAAAAAAAFPAAAAAEAAAAEAAAArP7//wgAAAAQAAAABwAAAHRyYWNlSUQABAAAAG5hbWUAAAAAAAAAAAQABAAEAAAABwAAAHRyYWNlSUQAGv///xQAAADIAAAAyAAAAAAAAAPIAAAAAgAAACwAAAAEAAAADP///wgAAAAQAAAABQAAAFZhbHVlAAAABAAAAG5hbWUAAAAAMP///wgAAAB0AAAAaAAAAHsiX19uYW1lX18iOiJ0ZXN0X2V4ZW1wbGFyX21ldHJpY190b3RhbCIsImluc3RhbmNlIjoibG9jYWxob3N0OjgwOTAiLCJqb2IiOiJwcm9tZXRoZXVzIiwic2VydmljZSI6ImJhciJ9AAAAAAYAAABsYWJlbHMAAAAAAACK////AAACAAUAAABWYWx1ZQASABgAFAAAABMADAAAAAgABAASAAAAFAAAAEQAAABMAAAAAAAACkwAAAABAAAADAAAAAgADAAIAAQACAAAAAgAAAAQAAAABAAAAFRpbWUAAAAABAAAAG5hbWUAAAAAAAAAAAAABgAIAAYABgAAAAAAAwAEAAAAVGltZQAAAAAAAAAA/////zgBAAAUAAAAAAAAAAwAFgAUABMADAAEAAwAAABAAAAAAAAAABQAAAAAAAADBAAKABgADAAIAAQACgAAABQAAAC4AAAAAQAAAAAAAAAAAAAACgAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAgAAAAAAAAACAAAAAAAAAAAAAAAAAAAAAgAAAAAAAAACAAAAAAAAAAQAAAAAAAAAAAAAAAAAAAAEAAAAAAAAAAIAAAAAAAAABgAAAAAAAAAEAAAAAAAAAAoAAAAAAAAAAAAAAAAAAAAKAAAAAAAAAAIAAAAAAAAADAAAAAAAAAACwAAAAAAAAAAAAAABAAAAAEAAAAAAAAAAAAAAAAAAAABAAAAAAAAAAAAAAAAAAAAAQAAAAAAAAAAAAAAAAAAAAEAAAAAAAAAAAAAAAAAAADAn3a5sa80FgAAAAAAABhAAAAAABAAAABFcFR4TUo0MGZVdXM3YUdZAAAAAAsAAABub3QgaW4gbmV4dAAAAAAAEAAAAAwAFAASAAwACAAEAAwAAAAQAAAALAAAADgAAAAAAAQAAQAAAAADAAAAAAAAQAEAAAAAAABAAAAAAAAAAAAAAAAAAAAAAAAKAAwAAAAIAAQACgAAAAgAAACYAAAAAwAAAEwAAAAoAAAABAAAAKT9//8IAAAADAAAAAAAAAAAAAAABQAAAHJlZklkAAAAxP3//wgAAAAMAAAAAAAAAAAAAAAEAAAAbmFtZQAAAADk/f//CAAAADAAAAAkAAAAeyJjdXN0b20iOnsicmVzdWx0VHlwZSI6ImV4ZW1wbGFyIn19AAAAAAQAAABtZXRhAAAAAAQAAAC0AQAAuAAAAFgAAAAEAAAAbv7//xQAAAA4AAAAOAAAAAAAAAU0AAAAAQAAAAQAAABc/v//CAAAAAwAAAABAAAAYQAAAAQAAABuYW1lAAAAAAAAAACs////AQAAAGEAAAC+/v//FAAAADwAAABAAAAAAAAABTwAAAABAAAABAAAAKz+//8IAAAAEAAAAAcAAAB0cmFjZUlEAAQAAABuYW1lAAAAAAAAAAAEAAQABAAAAAcAAAB0cmFjZUlEABr///8UAAAAyAAAAMgAAAAAAAADyAAAAAIAAAAsAAAABAAAAAz///8IAAAAEAAAAAUAAABWYWx1ZQAAAAQAAABuYW1lAAAAADD///8IAAAAdAAAAGgAAAB7Il9fbmFtZV9fIjoidGVzdF9leGVtcGxhcl9tZXRyaWNfdG90YWwiLCJpbnN0YW5jZSI6ImxvY2FsaG9zdDo4MDkwIiwiam9iIjoicHJvbWV0aGV1cyIsInNlcnZpY2UiOiJiYXIifQAAAAAGAAAAbGFiZWxzAAAAAAAAiv///wAAAgAFAAAAVmFsdWUAEgAYABQAAAATAAwAAAAIAAQAEgAAABQAAABEAAAATAAAAAAAAApMAAAAAQAAAAwAAAAIAAwACAAEAAgAAAAIAAAAEAAAAAQAAABUaW1lAAAAAAQAAABuYW1lAAAAAAAAAAAAAAYACAAGAAYAAAAAAAMABAAAAFRpbWUAAAAAGAMAAEFSUk9XMQ==
FRAME=QVJST1cxAAD/////mAIAABAAAAAAAAoADgAMAAsABAAKAAAAFAAAAAAAAAEEAAoADAAAAAgABAAKAAAACAAAAJgAAAADAAAATAAAACgAAAAEAAAA+P3//wgAAAAMAAAAAAAAAAAAAAAFAAAAcmVmSWQAAAAY/v//CAAAAAwAAAAAAAAAAAAAAAQAAABuYW1lAAAAADj+//8IAAAAMAAAACQAAAB7ImN1c3RvbSI6eyJyZXN1bHRUeXBlIjoiZXhlbXBsYXIifX0AAAAABAAAAG1ldGEAAAAAAwAAAGABAABkAAAABAAAAL7+//8UAAAAPAAAAEAAAAAAAAAFPAAAAAEAAAAEAAAArP7//wgAAAAQAAAABwAAAHRyYWNlSUQABAAAAG5hbWUAAAAAAAAAAAQABAAEAAAABwAAAHRyYWNlSUQAGv///xQAAADIAAAAyAAAAAAAAAPIAAAAAgAAACwAAAAEAAAADP///wgAAAAQAAAABQAAAFZhbHVlAAAABAAAAG5hbWUAAAAAMP///wgAAAB0AAAAaAAAAHsiX19uYW1lX18iOiJ0ZXN0X2V4ZW1wbGFyX21ldHJpY190b3RhbCIsImluc3RhbmNlIjoibG9jYWxob3N0OjgwOTAiLCJqb2IiOiJwcm9tZXRoZXVzIiwic2VydmljZSI6ImZvbyJ9AAAAAAYAAABsYWJlbHMAAAAAAACK////AAACAAUAAABWYWx1ZQASABgAFAAAABMADAAAAAgABAASAAAAFAAAAEQAAABMAAAAAAAACkwAAAABAAAADAAAAAgADAAIAAQACAAAAAgAAAAQAAAABAAAAFRpbWUAAAAABAAAAG5hbWUAAAAAAAAAAAAABgAIAAYABgAAAAAAAwAEAAAAVGltZQAAAAD/////+AAAABQAAAAAAAAADAAWABQAEwAMAAQADAAAAFAAAAAAAAAAFAAAAAAAAAMEAAoAGAAMAAgABAAKAAAAFAAAAIgAAAACAAAAAAAAAAAAAAAHAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAEAAAAAAAAAAQAAAAAAAAAAAAAAAAAAAAEAAAAAAAAAAQAAAAAAAAACAAAAAAAAAAAAAAAAAAAAAgAAAAAAAAAAwAAAAAAAAAMAAAAAAAAAAgAAAAAAAAAAAAAAADAAAAAgAAAAAAAAAAAAAAAAAAAAIAAAAAAAAAAAAAAAAAAAACAAAAAAAAAAAAAAAAAAAAwIOCDbSvNBZA/iZitq80FgAAAAAAADNAAAAAAAAANEAAAAAAEAAAACAAAAAAAAAAT2xwOVhIbHE3NjNjY3NmYWhDdGp5Z2tJSHdBTjl2czQQAAAADAAUABIADAAIAAQADAAAABAAAAAsAAAAPAAAAAAABAABAAAAqAIAAAAAAAAAAQAAAAAAAFAAAAAAAAAAAAAAAAAAAAAAAAAAAAAKAAwAAAAIAAQACgAAAAgAAACYAAAAAwAAAEwAAAAoAAAABAAAAPj9//8IAAAADAAAAAAAAAAAAAAABQAAAHJlZklkAAAAGP7//wgAAAAMAAAAAAAAAAAAAAAEAAAAbmFtZQAAAAA4/v//CAAAADAAAAAkAAAAeyJjdXN0b20iOnsicmVzdWx0VHlwZSI6ImV4ZW1wbGFyIn19AAAAAAQAAABtZXRhAAAAAAMAAABgAQAAZAAAAAQAAAC+/v//FAAAADwAAABAAAAAAAAABTwAAAABAAAABAAAAKz+//8IAAAAEAAAAAcAAAB0cmFjZUlEAAQAAABuYW1lAAAAAAAAAAAEAAQABAAAAAcAAAB0cmFjZUlEABr///8UAAAAyAAAAMgAAAAAAAADyAAAAAIAAAAsAAAABAAAAAz///8IAAAAEAAAAAUAAABWYWx1ZQAAAAQAAABuYW1lAAAAADD///8IAAAAdAAAAGgAAAB7Il9fbmFtZV9fIjoidGVzdF9leGVtcGxhcl9tZXRyaWNfdG90YWwiLCJpbnN0YW5jZSI6ImxvY2FsaG9zdDo4MDkwIiwiam9iIjoicHJvbWV0aGV1cyIsInNlcnZpY2UiOiJmb28ifQAAAAAGAAAAbGFiZWxzAAAAAAAAiv///wAAAgAFAAAAVmFsdWUAEgAYABQAAAATAAwAAAAIAAQAEgAAABQAAABEAAAATAAAAAAAAApMAAAAAQAAAAwAAAAIAAwACAAEAAgAAAAIAAAAEAAAAAQAAABUaW1lAAAAAAQAAABuYW1lAAAAAAAAAAAAAAYACAAGAAYAAAAAAAMABAAAAFRpbWUAAAAAyAIAAEFSUk9XMQ==

View File

@@ -0,0 +1,47 @@
{
"status": "success",
"data": [
{
"seriesLabels": {
"__name__": "test_exemplar_metric_total",
"instance": "localhost:8090",
"job": "prometheus",
"service": "bar"
},
"exemplars": [
{
"labels": {
"traceID": "EpTxMJ40fUus7aGY",
"a": "not in next"
},
"value": "6",
"timestamp": 1600096945.479
}
]
},
{
"seriesLabels": {
"__name__": "test_exemplar_metric_total",
"instance": "localhost:8090",
"job": "prometheus",
"service": "foo"
},
"exemplars": [
{
"labels": {
"traceID": "Olp9XHlq763ccsfa"
},
"value": "19",
"timestamp": 1600096955.479
},
{
"labels": {
"traceID": "hCtjygkIHwAN9vs4"
},
"value": "20",
"timestamp": 1600096965.489
}
]
}
]
}

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

View File

@@ -0,0 +1,93 @@
// 🌟 This was machine generated. Do not edit. 🌟
//
// Frame[0] {
// "typeVersion": [
// 0,
// 0
// ],
// "custom": {
// "resultType": "exemplar"
// }
// }
// Name:
// Dimensions: 4 Fields by 2 Rows
// +-----------------------------------+-----------------+------------------+------------------+
// | Name: Time | Name: Value | Name: traceID | Name: ztraceID |
// | Labels: | Labels: | Labels: | Labels: |
// | Type: []time.Time | Type: []float64 | Type: []string | Type: []string |
// +-----------------------------------+-----------------+------------------+------------------+
// | 2020-09-14 15:22:35.479 +0000 UTC | 19 | Olp9XHlq763ccsfa | |
// | 2020-09-14 15:22:45.489 +0000 UTC | 20 | | hCtjygkIHwAN9vs4 |
// +-----------------------------------+-----------------+------------------+------------------+
//
//
// 🌟 This was machine generated. Do not edit. 🌟
{
"status": 200,
"frames": [
{
"schema": {
"meta": {
"typeVersion": [
0,
0
],
"custom": {
"resultType": "exemplar"
}
},
"fields": [
{
"name": "Time",
"type": "time",
"typeInfo": {
"frame": "time.Time"
}
},
{
"name": "Value",
"type": "number",
"typeInfo": {
"frame": "float64"
},
"labels": {}
},
{
"name": "traceID",
"type": "string",
"typeInfo": {
"frame": "string"
}
},
{
"name": "ztraceID",
"type": "string",
"typeInfo": {
"frame": "string"
}
}
]
},
"data": {
"values": [
[
1600096955479,
1600096965489
],
[
19,
20
],
[
"Olp9XHlq763ccsfa",
""
],
[
"",
"hCtjygkIHwAN9vs4"
]
]
}
}
]
}

View File

@@ -0,0 +1,23 @@
{
"status": "success",
"data": [
{
"exemplars": [
{
"labels": {
"traceID": "Olp9XHlq763ccsfa"
},
"value": "19",
"timestamp": 1600096955.479
},
{
"labels": {
"ztraceID": "hCtjygkIHwAN9vs4"
},
"value": "20",
"timestamp": 1600096965.489
}
]
}
]
}

View File

@@ -0,0 +1,156 @@
// 🌟 This was machine generated. Do not edit. 🌟
//
// Frame[0] {
// "custom": {
// "resultType": "exemplar"
// }
// }
// Name:
// Dimensions: 4 Fields by 1 Rows
// +-----------------------------------+---------------------------------------------------------------------------------------------------+------------------+----------------+
// | Name: Time | Name: Value | Name: traceID | Name: a |
// | Labels: | Labels: __name__=test_exemplar_metric_total, instance=localhost:8090, job=prometheus, service=bar | Labels: | Labels: |
// | Type: []time.Time | Type: []float64 | Type: []string | Type: []string |
// +-----------------------------------+---------------------------------------------------------------------------------------------------+------------------+----------------+
// | 2020-09-14 15:22:25.479 +0000 UTC | 6 | EpTxMJ40fUus7aGY | not in next |
// +-----------------------------------+---------------------------------------------------------------------------------------------------+------------------+----------------+
//
//
//
// Frame[1] {
// "custom": {
// "resultType": "exemplar"
// }
// }
// Name:
// Dimensions: 3 Fields by 2 Rows
// +-----------------------------------+---------------------------------------------------------------------------------------------------+------------------+
// | Name: Time | Name: Value | Name: traceID |
// | Labels: | Labels: __name__=test_exemplar_metric_total, instance=localhost:8090, job=prometheus, service=foo | Labels: |
// | Type: []time.Time | Type: []float64 | Type: []string |
// +-----------------------------------+---------------------------------------------------------------------------------------------------+------------------+
// | 2020-09-14 15:22:35.479 +0000 UTC | 19 | Olp9XHlq763ccsfa |
// | 2020-09-14 15:22:45.489 +0000 UTC | 20 | hCtjygkIHwAN9vs4 |
// +-----------------------------------+---------------------------------------------------------------------------------------------------+------------------+
//
//
// 🌟 This was machine generated. Do not edit. 🌟
{
"frames": [
{
"schema": {
"meta": {
"custom": {
"resultType": "exemplar"
}
},
"fields": [
{
"name": "Time",
"type": "time",
"typeInfo": {
"frame": "time.Time"
}
},
{
"name": "Value",
"type": "number",
"typeInfo": {
"frame": "float64"
},
"labels": {
"__name__": "test_exemplar_metric_total",
"instance": "localhost:8090",
"job": "prometheus",
"service": "bar"
}
},
{
"name": "traceID",
"type": "string",
"typeInfo": {
"frame": "string"
}
},
{
"name": "a",
"type": "string",
"typeInfo": {
"frame": "string"
}
}
]
},
"data": {
"values": [
[
1600096945479
],
[
6
],
[
"EpTxMJ40fUus7aGY"
],
[
"not in next"
]
]
}
},
{
"schema": {
"meta": {
"custom": {
"resultType": "exemplar"
}
},
"fields": [
{
"name": "Time",
"type": "time",
"typeInfo": {
"frame": "time.Time"
}
},
{
"name": "Value",
"type": "number",
"typeInfo": {
"frame": "float64"
},
"labels": {
"__name__": "test_exemplar_metric_total",
"instance": "localhost:8090",
"job": "prometheus",
"service": "foo"
}
},
{
"name": "traceID",
"type": "string",
"typeInfo": {
"frame": "string"
}
}
]
},
"data": {
"values": [
[
1600096955479,
1600096965489
],
[
19,
20
],
[
"Olp9XHlq763ccsfa",
"hCtjygkIHwAN9vs4"
]
]
}
}
]
}

View File

@@ -0,0 +1,47 @@
{
"status": "success",
"data": [
{
"seriesLabels": {
"__name__": "test_exemplar_metric_total",
"instance": "localhost:8090",
"job": "prometheus",
"service": "bar"
},
"exemplars": [
{
"labels": {
"traceID": "EpTxMJ40fUus7aGY",
"a": "not in next"
},
"value": "6",
"timestamp": 1600096945.479
}
]
},
{
"seriesLabels": {
"__name__": "test_exemplar_metric_total",
"instance": "localhost:8090",
"job": "prometheus",
"service": "foo"
},
"exemplars": [
{
"labels": {
"traceID": "Olp9XHlq763ccsfa"
},
"value": "19",
"timestamp": 1600096955.479
},
{
"labels": {
"traceID": "hCtjygkIHwAN9vs4"
},
"value": "20",
"timestamp": 1600096965.489
}
]
}
]
}

View File

@@ -0,0 +1,69 @@
// 🌟 This was machine generated. Do not edit. 🌟
//
// Frame[0]
// Name:
// Dimensions: 1 Fields by 21 Rows
// +----------------+
// | Name: Value |
// | Labels: |
// | Type: []string |
// +----------------+
// | __name__ |
// | call |
// | code |
// | config |
// | dialer_name |
// | endpoint |
// | event |
// | goversion |
// | handler |
// | ... |
// +----------------+
//
//
// 🌟 This was machine generated. Do not edit. 🌟
{
"status": 200,
"frames": [
{
"schema": {
"fields": [
{
"name": "Value",
"type": "string",
"typeInfo": {
"frame": "string"
}
}
]
},
"data": {
"values": [
[
"__name__",
"call",
"code",
"config",
"dialer_name",
"endpoint",
"event",
"goversion",
"handler",
"instance",
"interval",
"job",
"le",
"listener_name",
"name",
"quantile",
"reason",
"role",
"scrape_job",
"slice",
"version"
]
]
}
}
]
}

View File

@@ -0,0 +1,26 @@
{
"status": "success",
"data": [
"__name__",
"call",
"code",
"config",
"dialer_name",
"endpoint",
"event",
"goversion",
"handler",
"instance",
"interval",
"job",
"le",
"listener_name",
"name",
"quantile",
"reason",
"role",
"scrape_job",
"slice",
"version"
]
}

View File

@@ -0,0 +1,153 @@
// 🌟 This was machine generated. Do not edit. 🌟
//
// Frame[0] {
// "type": "timeseries-multi",
// "typeVersion": [
// 0,
// 0
// ],
// "custom": {
// "resultType": "matrix"
// }
// }
// Name:
// Dimensions: 2 Fields by 3 Rows
// +-----------------------------------+--------------------------------------------------------------+
// | Name: Time | Name: Value |
// | Labels: | Labels: __name__=up, instance=localhost:9090, job=prometheus |
// | Type: []time.Time | Type: []float64 |
// +-----------------------------------+--------------------------------------------------------------+
// | 2015-07-01 20:10:30.781 +0000 UTC | 1 |
// | 2015-07-01 20:10:45.781 +0000 UTC | 1 |
// | 2015-07-01 20:11:00.781 +0000 UTC | 1 |
// +-----------------------------------+--------------------------------------------------------------+
//
//
//
// Frame[1] {
// "type": "timeseries-multi",
// "typeVersion": [
// 0,
// 0
// ],
// "custom": {
// "resultType": "matrix"
// }
// }
// Name:
// Dimensions: 2 Fields by 3 Rows
// +-----------------------------------+--------------------------------------------------------+
// | Name: Time | Name: Value |
// | Labels: | Labels: __name__=up, instance=localhost:9091, job=node |
// | Type: []time.Time | Type: []float64 |
// +-----------------------------------+--------------------------------------------------------+
// | 2015-07-01 20:10:30.781 +0000 UTC | 0 |
// | 2015-07-01 20:10:45.781 +0000 UTC | 0 |
// | 2015-07-01 20:11:00.781 +0000 UTC | 1 |
// +-----------------------------------+--------------------------------------------------------+
//
//
// 🌟 This was machine generated. Do not edit. 🌟
{
"status": 200,
"frames": [
{
"schema": {
"meta": {
"type": "timeseries-multi",
"typeVersion": [
0,
0
],
"custom": {
"resultType": "matrix"
}
},
"fields": [
{
"name": "Time",
"type": "time",
"typeInfo": {
"frame": "time.Time"
}
},
{
"name": "Value",
"type": "number",
"typeInfo": {
"frame": "float64"
},
"labels": {
"__name__": "up",
"instance": "localhost:9090",
"job": "prometheus"
}
}
]
},
"data": {
"values": [
[
1435781430781,
1435781445781,
1435781460781
],
[
1,
1,
1
]
]
}
},
{
"schema": {
"meta": {
"type": "timeseries-multi",
"typeVersion": [
0,
0
],
"custom": {
"resultType": "matrix"
}
},
"fields": [
{
"name": "Time",
"type": "time",
"typeInfo": {
"frame": "time.Time"
}
},
{
"name": "Value",
"type": "number",
"typeInfo": {
"frame": "float64"
},
"labels": {
"__name__": "up",
"instance": "localhost:9091",
"job": "node"
}
}
]
},
"data": {
"values": [
[
1435781430781,
1435781445781,
1435781460781
],
[
0,
0,
1
]
]
}
}
]
}

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,93 @@
// 🌟 This was machine generated. Do not edit. 🌟
//
// Frame[0] {
// "type": "timeseries-multi",
// "typeVersion": [
// 0,
// 0
// ],
// "custom": {
// "resultType": "matrix"
// }
// }
// Name:
// Dimensions: 2 Fields by 3 Rows
// +-------------------------------+-----------------------------------------------------+
// | Name: Time | Name: Value |
// | Labels: | Labels: handler=/api/v1/query_range, job=prometheus |
// | Type: []time.Time | Type: []float64 |
// +-------------------------------+-----------------------------------------------------+
// | 2022-01-11 08:25:30 +0000 UTC | +Inf |
// | 2022-01-11 08:25:31 +0000 UTC | NaN |
// | 2022-01-11 08:25:32 +0000 UTC | -Inf |
// +-------------------------------+-----------------------------------------------------+
//
//
// 🌟 This was machine generated. Do not edit. 🌟
{
"status": 200,
"frames": [
{
"schema": {
"meta": {
"type": "timeseries-multi",
"typeVersion": [
0,
0
],
"custom": {
"resultType": "matrix"
}
},
"fields": [
{
"name": "Time",
"type": "time",
"typeInfo": {
"frame": "time.Time"
}
},
{
"name": "Value",
"type": "number",
"typeInfo": {
"frame": "float64"
},
"labels": {
"handler": "/api/v1/query_range",
"job": "prometheus"
}
}
]
},
"data": {
"values": [
[
1641889530000,
1641889531000,
1641889532000
],
[
null,
null,
null
]
],
"entities": [
null,
{
"NaN": [
1
],
"Inf": [
0
],
"NegInf": [
2
]
}
]
}
}
]
}

View File

@@ -0,0 +1,16 @@
{
"status": "success",
"data": {
"resultType": "matrix",
"result": [
{
"metric": { "handler": "/api/v1/query_range", "job": "prometheus" },
"values": [
[1641889530, "+Inf"],
[1641889531, "NaN"],
[1641889532, "-Inf"]
]
}
]
}
}

View File

@@ -0,0 +1,33 @@
{
"status" : "success",
"data" : {
"resultType" : "matrix",
"result" : [
{
"metric" : {
"__name__" : "up",
"job" : "prometheus",
"instance" : "localhost:9090"
},
"values" : [
[ 1435781430.781, "1" ],
[ 1435781445.781, "1" ],
[ 1435781460.781, "1" ]
]
},
{
"metric" : {
"__name__" : "up",
"job" : "node",
"instance" : "localhost:9091"
},
"values" : [
[ 1435781430.781, "0" ],
[ 1435781445.781, "0" ],
[ 1435781460.781, "1" ]
]
}
]
}
}

View File

@@ -0,0 +1,70 @@
// 🌟 This was machine generated. Do not edit. 🌟
//
// Frame[0] {
// "type": "numeric-multi",
// "typeVersion": [
// 0,
// 0
// ],
// "custom": {
// "resultType": "scalar"
// }
// }
// Name:
// Dimensions: 2 Fields by 1 Rows
// +-----------------------------------+-----------------+
// | Name: Time | Name: Value |
// | Labels: | Labels: |
// | Type: []time.Time | Type: []float64 |
// +-----------------------------------+-----------------+
// | 2022-05-04 16:02:19.104 +0000 UTC | 2.482e-05 |
// +-----------------------------------+-----------------+
//
//
// 🌟 This was machine generated. Do not edit. 🌟
{
"status": 200,
"frames": [
{
"schema": {
"meta": {
"type": "numeric-multi",
"typeVersion": [
0,
0
],
"custom": {
"resultType": "scalar"
}
},
"fields": [
{
"name": "Time",
"type": "time",
"typeInfo": {
"frame": "time.Time"
}
},
{
"name": "Value",
"type": "number",
"typeInfo": {
"frame": "float64"
},
"labels": {}
}
]
},
"data": {
"values": [
[
1651680139104
],
[
0.00002482
]
]
}
}
]
}

View File

@@ -0,0 +1,10 @@
{
"status": "success",
"data": {
"resultType": "scalar",
"result": [
1651680139.104,
"0.00002482"
]
}
}

View File

@@ -0,0 +1,68 @@
// 🌟 This was machine generated. Do not edit. 🌟
//
// Frame[0]
// Name:
// Dimensions: 3 Fields by 3 Rows
// +----------------------------+----------------+----------------+
// | Name: __name__ | Name: job | Name: instance |
// | Labels: | Labels: | Labels: |
// | Type: []string | Type: []string | Type: []string |
// +----------------------------+----------------+----------------+
// | up | prometheus | localhost:9090 |
// | up | node | localhost:9091 |
// | process_start_time_seconds | prometheus | localhost:9090 |
// +----------------------------+----------------+----------------+
//
//
// 🌟 This was machine generated. Do not edit. 🌟
{
"status": 200,
"frames": [
{
"schema": {
"fields": [
{
"name": "__name__",
"type": "string",
"typeInfo": {
"frame": "string"
}
},
{
"name": "job",
"type": "string",
"typeInfo": {
"frame": "string"
}
},
{
"name": "instance",
"type": "string",
"typeInfo": {
"frame": "string"
}
}
]
},
"data": {
"values": [
[
"up",
"up",
"process_start_time_seconds"
],
[
"prometheus",
"node",
"prometheus"
],
[
"localhost:9090",
"localhost:9091",
"localhost:9090"
]
]
}
}
]
}

View File

@@ -0,0 +1,21 @@
{
"status" : "success",
"data" : [
{
"__name__" : "up",
"job" : "prometheus",
"instance" : "localhost:9090"
},
{
"__name__" : "up",
"job" : "node",
"instance" : "localhost:9091"
},
{
"__name__" : "process_start_time_seconds",
"job" : "prometheus",
"instance" : "localhost:9090"
}
]
}

View File

@@ -0,0 +1,70 @@
// 🌟 This was machine generated. Do not edit. 🌟
//
// Frame[0] {
// "type": "timeseries-multi",
// "typeVersion": [
// 0,
// 0
// ],
// "custom": {
// "resultType": "string"
// }
// }
// Name:
// Dimensions: 2 Fields by 1 Rows
// +-----------------------------------+----------------+
// | Name: Time | Name: Value |
// | Labels: | Labels: |
// | Type: []time.Time | Type: []string |
// +-----------------------------------+----------------+
// | 2022-05-04 16:02:19.104 +0000 UTC | example |
// +-----------------------------------+----------------+
//
//
// 🌟 This was machine generated. Do not edit. 🌟
{
"status": 200,
"frames": [
{
"schema": {
"meta": {
"type": "timeseries-multi",
"typeVersion": [
0,
0
],
"custom": {
"resultType": "string"
}
},
"fields": [
{
"name": "Time",
"type": "time",
"typeInfo": {
"frame": "time.Time"
}
},
{
"name": "Value",
"type": "string",
"typeInfo": {
"frame": "string"
},
"labels": {}
}
]
},
"data": {
"values": [
[
1651680139104
],
[
"example"
]
]
}
}
]
}

View File

@@ -0,0 +1,10 @@
{
"status": "success",
"data": {
"resultType": "string",
"result": [
1651680139.104,
"example"
]
}
}

View File

@@ -0,0 +1,363 @@
// 🌟 This was machine generated. Do not edit. 🌟
//
// Frame[0] {
// "type": "timeseries-multi",
// "typeVersion": [
// 0,
// 0
// ],
// "custom": {
// "resultType": "vector"
// }
// }
// Name:
// Dimensions: 2 Fields by 1 Rows
// +-----------------------------------+--------------------------------------------------------------+
// | Name: Time | Name: Value |
// | Labels: | Labels: __name__=up, instance=localhost:9090, job=prometheus |
// | Type: []time.Time | Type: []float64 |
// +-----------------------------------+--------------------------------------------------------------+
// | 2015-07-01 20:10:51.781 +0000 UTC | 1 |
// +-----------------------------------+--------------------------------------------------------------+
//
//
//
// Frame[1] {
// "type": "timeseries-multi",
// "typeVersion": [
// 0,
// 0
// ],
// "custom": {
// "resultType": "vector"
// }
// }
// Name:
// Dimensions: 2 Fields by 1 Rows
// +-----------------------------------+--------------------------------------------------------+
// | Name: Time | Name: Value |
// | Labels: | Labels: __name__=up, instance=localhost:9100, job=node |
// | Type: []time.Time | Type: []float64 |
// +-----------------------------------+--------------------------------------------------------+
// | 2015-07-01 20:10:51.781 +0000 UTC | 0 |
// +-----------------------------------+--------------------------------------------------------+
//
//
//
// Frame[2] {
// "type": "timeseries-multi",
// "typeVersion": [
// 0,
// 0
// ],
// "custom": {
// "resultType": "vector"
// }
// }
// Name:
// Dimensions: 2 Fields by 1 Rows
// +-------------------------------+------------------------------------+
// | Name: Time | Name: Value |
// | Labels: | Labels: level=error, location=moon |
// | Type: []time.Time | Type: []float64 |
// +-------------------------------+------------------------------------+
// | 2022-02-16 16:41:39 +0000 UTC | +Inf |
// +-------------------------------+------------------------------------+
//
//
//
// Frame[3] {
// "type": "timeseries-multi",
// "typeVersion": [
// 0,
// 0
// ],
// "custom": {
// "resultType": "vector"
// }
// }
// Name:
// Dimensions: 2 Fields by 1 Rows
// +-------------------------------+-----------------------------------+
// | Name: Time | Name: Value |
// | Labels: | Labels: level=info, location=moon |
// | Type: []time.Time | Type: []float64 |
// +-------------------------------+-----------------------------------+
// | 2022-02-16 16:41:39 +0000 UTC | -Inf |
// +-------------------------------+-----------------------------------+
//
//
//
// Frame[4] {
// "type": "timeseries-multi",
// "typeVersion": [
// 0,
// 0
// ],
// "custom": {
// "resultType": "vector"
// }
// }
// Name:
// Dimensions: 2 Fields by 1 Rows
// +-------------------------------+------------------------------------+
// | Name: Time | Name: Value |
// | Labels: | Labels: level=debug, location=moon |
// | Type: []time.Time | Type: []float64 |
// +-------------------------------+------------------------------------+
// | 2022-02-16 16:41:39 +0000 UTC | NaN |
// +-------------------------------+------------------------------------+
//
//
// 🌟 This was machine generated. Do not edit. 🌟
{
"status": 200,
"frames": [
{
"schema": {
"meta": {
"type": "timeseries-multi",
"typeVersion": [
0,
0
],
"custom": {
"resultType": "vector"
}
},
"fields": [
{
"name": "Time",
"type": "time",
"typeInfo": {
"frame": "time.Time"
}
},
{
"name": "Value",
"type": "number",
"typeInfo": {
"frame": "float64"
},
"labels": {
"__name__": "up",
"instance": "localhost:9090",
"job": "prometheus"
}
}
]
},
"data": {
"values": [
[
1435781451781
],
[
1
]
]
}
},
{
"schema": {
"meta": {
"type": "timeseries-multi",
"typeVersion": [
0,
0
],
"custom": {
"resultType": "vector"
}
},
"fields": [
{
"name": "Time",
"type": "time",
"typeInfo": {
"frame": "time.Time"
}
},
{
"name": "Value",
"type": "number",
"typeInfo": {
"frame": "float64"
},
"labels": {
"__name__": "up",
"instance": "localhost:9100",
"job": "node"
}
}
]
},
"data": {
"values": [
[
1435781451781
],
[
0
]
]
}
},
{
"schema": {
"meta": {
"type": "timeseries-multi",
"typeVersion": [
0,
0
],
"custom": {
"resultType": "vector"
}
},
"fields": [
{
"name": "Time",
"type": "time",
"typeInfo": {
"frame": "time.Time"
}
},
{
"name": "Value",
"type": "number",
"typeInfo": {
"frame": "float64"
},
"labels": {
"level": "error",
"location": "moon"
}
}
]
},
"data": {
"values": [
[
1645029699000
],
[
null
]
],
"entities": [
null,
{
"Inf": [
0
]
}
]
}
},
{
"schema": {
"meta": {
"type": "timeseries-multi",
"typeVersion": [
0,
0
],
"custom": {
"resultType": "vector"
}
},
"fields": [
{
"name": "Time",
"type": "time",
"typeInfo": {
"frame": "time.Time"
}
},
{
"name": "Value",
"type": "number",
"typeInfo": {
"frame": "float64"
},
"labels": {
"level": "info",
"location": "moon"
}
}
]
},
"data": {
"values": [
[
1645029699000
],
[
null
]
],
"entities": [
null,
{
"NegInf": [
0
]
}
]
}
},
{
"schema": {
"meta": {
"type": "timeseries-multi",
"typeVersion": [
0,
0
],
"custom": {
"resultType": "vector"
}
},
"fields": [
{
"name": "Time",
"type": "time",
"typeInfo": {
"frame": "time.Time"
}
},
{
"name": "Value",
"type": "number",
"typeInfo": {
"frame": "float64"
},
"labels": {
"level": "debug",
"location": "moon"
}
}
]
},
"data": {
"values": [
[
1645029699000
],
[
null
]
],
"entities": [
null,
{
"NaN": [
0
]
}
]
}
}
]
}

View File

@@ -0,0 +1,768 @@
// 🌟 This was machine generated. Do not edit. 🌟
//
// Frame[0] {
// "type": "heatmap-cells",
// "typeVersion": [
// 0,
// 0
// ]
// }
// Name:
// Dimensions: 5 Fields by 134 Rows
// +-----------------------------------+------------------------+------------------------+---------------------+---------------+
// | Name: xMax | Name: yMin | Name: yMax | Name: count | Name: yLayout |
// | Labels: | Labels: | Labels: | Labels: | Labels: |
// | Type: []time.Time | Type: []float64 | Type: []float64 | Type: []float64 | Type: []int8 |
// +-----------------------------------+------------------------+------------------------+---------------------+---------------+
// | 2022-04-14 20:21:08.042 +0000 UTC | 4.536465129862675e-06 | 4.947050303081549e-06 | 0.13333333333333333 | 0 |
// | 2022-04-14 20:21:08.042 +0000 UTC | 4.947050303081549e-06 | 5.394796609394436e-06 | 2.0982456140350876 | 0 |
// | 2022-04-14 20:21:08.042 +0000 UTC | 5.394796609394436e-06 | 5.883067418700946e-06 | 4.224561403508771 | 0 |
// | 2022-04-14 20:21:08.042 +0000 UTC | 5.883067418700946e-06 | 6.415530511884418e-06 | 4.101754385964911 | 0 |
// | 2022-04-14 20:21:08.042 +0000 UTC | 6.415530511884418e-06 | 6.9961856323598564e-06 | 3.5438596491228074 | 0 |
// | 2022-04-14 20:21:08.042 +0000 UTC | 6.9961856323598564e-06 | 7.62939453125e-06 | 3.922807017543859 | 0 |
// | 2022-04-14 20:21:08.042 +0000 UTC | 7.62939453125e-06 | 8.319913731882154e-06 | 3.8877192982456137 | 0 |
// | 2022-04-14 20:21:08.042 +0000 UTC | 8.319913731882154e-06 | 9.07293025972535e-06 | 3.480701754385965 | 0 |
// | 2022-04-14 20:21:08.042 +0000 UTC | 9.07293025972535e-06 | 9.894100606163098e-06 | 3.392982456140351 | 0 |
// | ... | ... | ... | ... | ... |
// +-----------------------------------+------------------------+------------------------+---------------------+---------------+
//
//
// 🌟 This was machine generated. Do not edit. 🌟
{
"status": 200,
"frames": [
{
"schema": {
"meta": {
"type": "heatmap-cells",
"typeVersion": [
0,
0
]
},
"fields": [
{
"name": "xMax",
"type": "time",
"typeInfo": {
"frame": "time.Time"
}
},
{
"name": "yMin",
"type": "number",
"typeInfo": {
"frame": "float64"
},
"labels": {}
},
{
"name": "yMax",
"type": "number",
"typeInfo": {
"frame": "float64"
}
},
{
"name": "count",
"type": "number",
"typeInfo": {
"frame": "float64"
}
},
{
"name": "yLayout",
"type": "number",
"typeInfo": {
"frame": "int8"
}
}
]
},
"data": {
"values": [
[
1649967668042,
1649967668042,
1649967668042,
1649967668042,
1649967668042,
1649967668042,
1649967668042,
1649967668042,
1649967668042,
1649967668042,
1649967668042,
1649967668042,
1649967668042,
1649967668042,
1649967668042,
1649967668042,
1649967668042,
1649967668042,
1649967668042,
1649967668042,
1649967668042,
1649967668042,
1649967668042,
1649967668042,
1649967668042,
1649967668042,
1649967668042,
1649967668042,
1649967668042,
1649967668042,
1649967668042,
1649967668042,
1649967668042,
1649967668042,
1649967668042,
1649967668042,
1649967668042,
1649967668042,
1649967668042,
1649967668042,
1649967668042,
1649967668042,
1649967668042,
1649967668042,
1649967668042,
1649967668042,
1649967668042,
1649967668042,
1649967668042,
1649967668042,
1649967668042,
1649967668042,
1649967668042,
1649967668042,
1649967668042,
1649967668042,
1649967668042,
1649967668042,
1649967668042,
1649967668042,
1649967668042,
1649967668042,
1649967668042,
1649967668042,
1649967668042,
1649967668042,
1649967668042,
1649967668042,
1649967668042,
1649967668042,
1649967668042,
1649967668042,
1649967668042,
1649967668042,
1649967668042,
1649967668042,
1649967668042,
1649967668042,
1649967668042,
1649967668042,
1649967668042,
1649967668042,
1649967668042,
1649967668042,
1649967668042,
1649967668042,
1649967668042,
1649967668042,
1649967668042,
1649967668042,
1649967668042,
1649967668042,
1649967668042,
1649967668042,
1649967668042,
1649967668042,
1649967668042,
1649967668042,
1649967668042,
1649967668042,
1649967668042,
1649967668042,
1649967668042,
1649967668042,
1649967668042,
1649967668042,
1649967668042,
1649967668042,
1649967668042,
1649967668042,
1649967668042,
1649967668042,
1649967668042,
1649967668042,
1649967668042,
1649967668042,
1649967668042,
1649967668042,
1649967668042,
1649967668042,
1649967668042,
1649967668042,
1649967668042,
1649967668042,
1649967668042,
1649967668042,
1649967668042,
1649967668042,
1649967668042,
1649967668042,
1649967668042,
1649967668042,
1649967668042,
1649967668042
],
[
0.000004536465129862675,
0.000004947050303081549,
0.000005394796609394436,
0.000005883067418700946,
0.000006415530511884418,
0.0000069961856323598564,
0.00000762939453125,
0.000008319913731882154,
0.00000907293025972535,
0.000009894100606163098,
0.000010789593218788871,
0.000011766134837401892,
0.000012831061023768835,
0.000013992371264719713,
0.0000152587890625,
0.000016639827463764308,
0.0000181458605194507,
0.000019788201212326197,
0.000021579186437577742,
0.000023532269674803783,
0.00002566212204753767,
0.000027984742529439426,
0.000030517578125,
0.000033279654927528616,
0.0000362917210389014,
0.000039576402424652394,
0.000043158372875155485,
0.00004706453934960757,
0.00005132424409507534,
0.00005596948505887885,
0.00006103515625,
0.00006655930985505723,
0.00009412907869921513,
0.00010264848819015068,
0.0004477558804710308,
0.00048828125,
0.0005324744788404579,
0.0005806675366224224,
0.0006332224387944383,
0.0006905339660024878,
0.0007530326295937211,
0.0008211879055212055,
0.0008955117609420616,
0.0009765625,
0.0010649489576809157,
0.0011613350732448448,
0.0012664448775888766,
0.0013810679320049755,
0.0015060652591874421,
0.001642375811042411,
0.0017910235218841233,
0.001953125,
0.0021298979153618314,
0.0023226701464896895,
0.002532889755177753,
0.002762135864009951,
0.0030121305183748843,
0.003284751622084822,
0.0035820470437682465,
0.00390625,
0.004259795830723663,
0.004645340292979379,
0.005065779510355506,
0.005524271728019902,
0.0060242610367497685,
0.006569503244169644,
0.007164094087536493,
0.0078125,
0.008519591661447326,
0.009290680585958758,
0.010131559020711013,
0.011048543456039804,
0.012048522073499537,
0.013139006488339287,
0.014328188175072986,
0.015625,
0.01703918332289465,
0.018581361171917516,
0.020263118041422026,
0.022097086912079608,
0.024097044146999074,
0.026278012976678575,
0.028656376350145972,
0.03125,
0.0340783666457893,
0.03716272234383503,
0.04052623608284405,
0.044194173824159216,
0.04819408829399815,
0.05255602595335715,
0.057312752700291944,
0.0625,
0.0681567332915786,
0.07432544468767006,
0.0810524721656881,
0.08838834764831843,
0.0963881765879963,
0.1051120519067143,
0.11462550540058389,
0.125,
0.1363134665831572,
0.14865088937534013,
0.1621049443313762,
0.17677669529663687,
0.1927763531759926,
0.2102241038134286,
0.22925101080116778,
0.25,
0.2726269331663144,
0.29730177875068026,
0.3242098886627524,
0.35355339059327373,
0.3855527063519852,
0.4204482076268572,
0.45850202160233555,
0.5,
0.5452538663326288,
0.5946035575013605,
0.6484197773255048,
0.7071067811865475,
0.7711054127039704,
1,
1.0905077326652577,
1.189207115002721,
1.2968395546510096,
1.414213562373095,
1.5422108254079407,
1.6817928305074288,
1.8340080864093422,
2,
2.1810154653305154,
2.378414230005442,
2.5936791093020193,
2.82842712474619
],
[
0.000004947050303081549,
0.000005394796609394436,
0.000005883067418700946,
0.000006415530511884418,
0.0000069961856323598564,
0.00000762939453125,
0.000008319913731882154,
0.00000907293025972535,
0.000009894100606163098,
0.000010789593218788871,
0.000011766134837401892,
0.000012831061023768835,
0.000013992371264719713,
0.0000152587890625,
0.000016639827463764308,
0.0000181458605194507,
0.000019788201212326197,
0.000021579186437577742,
0.000023532269674803783,
0.00002566212204753767,
0.000027984742529439426,
0.000030517578125,
0.000033279654927528616,
0.0000362917210389014,
0.000039576402424652394,
0.000043158372875155485,
0.00004706453934960757,
0.00005132424409507534,
0.00005596948505887885,
0.00006103515625,
0.00006655930985505723,
0.0000725834420778028,
0.00010264848819015068,
0.0001119389701177577,
0.00048828125,
0.0005324744788404579,
0.0005806675366224224,
0.0006332224387944383,
0.0006905339660024878,
0.0007530326295937211,
0.0008211879055212055,
0.0008955117609420616,
0.0009765625,
0.0010649489576809157,
0.0011613350732448448,
0.0012664448775888766,
0.0013810679320049755,
0.0015060652591874421,
0.001642375811042411,
0.0017910235218841233,
0.001953125,
0.0021298979153618314,
0.0023226701464896895,
0.002532889755177753,
0.002762135864009951,
0.0030121305183748843,
0.003284751622084822,
0.0035820470437682465,
0.00390625,
0.004259795830723663,
0.004645340292979379,
0.005065779510355506,
0.005524271728019902,
0.0060242610367497685,
0.006569503244169644,
0.007164094087536493,
0.0078125,
0.008519591661447326,
0.009290680585958758,
0.010131559020711013,
0.011048543456039804,
0.012048522073499537,
0.013139006488339287,
0.014328188175072986,
0.015625,
0.01703918332289465,
0.018581361171917516,
0.020263118041422026,
0.022097086912079608,
0.024097044146999074,
0.026278012976678575,
0.028656376350145972,
0.03125,
0.0340783666457893,
0.03716272234383503,
0.04052623608284405,
0.044194173824159216,
0.04819408829399815,
0.05255602595335715,
0.057312752700291944,
0.0625,
0.0681567332915786,
0.07432544468767006,
0.0810524721656881,
0.08838834764831843,
0.0963881765879963,
0.1051120519067143,
0.11462550540058389,
0.125,
0.1363134665831572,
0.14865088937534013,
0.1621049443313762,
0.17677669529663687,
0.1927763531759926,
0.2102241038134286,
0.22925101080116778,
0.25,
0.2726269331663144,
0.29730177875068026,
0.3242098886627524,
0.35355339059327373,
0.3855527063519852,
0.4204482076268572,
0.45850202160233555,
0.5,
0.5452538663326288,
0.5946035575013605,
0.6484197773255048,
0.7071067811865475,
0.7711054127039704,
0.8408964152537144,
1.0905077326652577,
1.189207115002721,
1.2968395546510096,
1.414213562373095,
1.5422108254079407,
1.6817928305074288,
1.8340080864093422,
2,
2.1810154653305154,
2.378414230005442,
2.5936791093020193,
2.82842712474619,
3.0844216508158815
],
[
0.13333333333333333,
2.0982456140350876,
4.224561403508771,
4.101754385964911,
3.5438596491228074,
3.922807017543859,
3.8877192982456137,
3.480701754385965,
3.392982456140351,
2.971929824561403,
2.028070175438596,
1.7087719298245612,
1.1614035087719294,
0.6210526315789473,
0.42105263157894735,
0.3192982456140351,
0.21052631578947364,
0.16842105263157892,
0.1333333333333333,
0.09122807017543859,
0.08771929824561403,
0.08421052631578947,
0.0631578947368421,
0.05964912280701754,
0.014035087719298244,
0.024561403508771926,
0.02456140350877193,
0.007017543859649122,
0,
0.010526315789473682,
0.003508771929824561,
0.003508771929824561,
0.003508771929824561,
0.003508771929824561,
0.003508771929824561,
0.007017543859649122,
0.007017543859649122,
0.03859649122807017,
0.07368421052631578,
0.04912280701754385,
0.09122807017543859,
0.03859649122807017,
0.02456140350877193,
0.017543859649122806,
0.031578947368421054,
0.06315789473684211,
0.10526315789473682,
0.11929824561403507,
0.10877192982456138,
0.11929824561403507,
0.19649122807017538,
0.24561403508771926,
0.19999999999999998,
0.18245614035087715,
0.2771929824561403,
0.5228070175438596,
0.663157894736842,
0.4070175438596491,
0.4421052631578947,
1.0070175438596491,
1.4210526315789476,
1.3859649122807016,
1.0035087719298246,
0.8771929824561402,
8.273684210526316,
32.50877192982456,
57.18245614035087,
53.82105263157894,
39.508771929824555,
22.824561403508767,
11.554385964912282,
6.50877192982456,
4.529824561403507,
3.5298245614035086,
2.7438596491228067,
2.284210526315789,
1.9192982456140348,
2.028070175438596,
1.9403508771929823,
1.4701754385964911,
1.1964912280701754,
0.9228070175438596,
0.8421052631578947,
2.8421052631578942,
2.508771929824561,
1.0842105263157893,
0.3473684210526316,
0.2526315789473684,
0.1964912280701754,
0.09473684210526315,
0.07017543859649121,
0.10526315789473684,
0.07368421052631578,
0.07368421052631577,
0.06666666666666665,
0.04912280701754385,
0.05263157894736841,
0.38947368421052625,
2.9157894736842103,
0.5052631578947367,
0.11228070175438594,
0.05263157894736842,
0.04210526315789473,
0.02807017543859649,
0.010526315789473682,
0.010526315789473682,
0.003508771929824561,
0.010526315789473682,
0.003508771929824561,
0.007017543859649122,
0.007017543859649122,
0.024561403508771926,
0.017543859649122806,
0.02456140350877193,
0.04561403508771929,
0.014035087719298244,
0.021052631578947364,
0.007017543859649122,
0.003508771929824561,
0.003508771929824561,
0.007017543859649122,
0.003508771929824561,
0.010526315789473682,
0.010526315789473682,
0.021052631578947368,
0.003508771929824561,
0.003508771929824561,
0,
0.003508771929824561,
0,
0,
0,
0.003508771929824561,
0.003508771929824561
],
[
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
]
]
}
}
]
}

View File

@@ -0,0 +1,824 @@
{
"status": "success",
"data": {
"resultType": "vector",
"result": [
{
"metric": {},
"histogram": [
1649967668.042,
{
"count": "316.47719298245624",
"sum": "3.3474505904112015",
"buckets": [
[
0,
"0.000004536465129862675",
"0.000004947050303081549",
"0.13333333333333333"
],
[
0,
"0.000004947050303081549",
"0.000005394796609394436",
"2.0982456140350876"
],
[
0,
"0.000005394796609394436",
"0.000005883067418700946",
"4.224561403508771"
],
[
0,
"0.000005883067418700946",
"0.000006415530511884418",
"4.101754385964911"
],
[
0,
"0.000006415530511884418",
"0.0000069961856323598564",
"3.5438596491228074"
],
[
0,
"0.0000069961856323598564",
"0.00000762939453125",
"3.922807017543859"
],
[
0,
"0.00000762939453125",
"0.000008319913731882154",
"3.8877192982456137"
],
[
0,
"0.000008319913731882154",
"0.00000907293025972535",
"3.480701754385965"
],
[
0,
"0.00000907293025972535",
"0.000009894100606163098",
"3.392982456140351"
],
[
0,
"0.000009894100606163098",
"0.000010789593218788871",
"2.971929824561403"
],
[
0,
"0.000010789593218788871",
"0.000011766134837401892",
"2.028070175438596"
],
[
0,
"0.000011766134837401892",
"0.000012831061023768835",
"1.7087719298245612"
],
[
0,
"0.000012831061023768835",
"0.000013992371264719713",
"1.1614035087719294"
],
[
0,
"0.000013992371264719713",
"0.0000152587890625",
"0.6210526315789473"
],
[
0,
"0.0000152587890625",
"0.000016639827463764308",
"0.42105263157894735"
],
[
0,
"0.000016639827463764308",
"0.0000181458605194507",
"0.3192982456140351"
],
[
0,
"0.0000181458605194507",
"0.000019788201212326197",
"0.21052631578947364"
],
[
0,
"0.000019788201212326197",
"0.000021579186437577742",
"0.16842105263157892"
],
[
0,
"0.000021579186437577742",
"0.000023532269674803783",
"0.1333333333333333"
],
[
0,
"0.000023532269674803783",
"0.00002566212204753767",
"0.09122807017543859"
],
[
0,
"0.00002566212204753767",
"0.000027984742529439426",
"0.08771929824561403"
],
[
0,
"0.000027984742529439426",
"0.000030517578125",
"0.08421052631578947"
],
[
0,
"0.000030517578125",
"0.000033279654927528616",
"0.0631578947368421"
],
[
0,
"0.000033279654927528616",
"0.0000362917210389014",
"0.05964912280701754"
],
[
0,
"0.0000362917210389014",
"0.000039576402424652394",
"0.014035087719298244"
],
[
0,
"0.000039576402424652394",
"0.000043158372875155485",
"0.024561403508771926"
],
[
0,
"0.000043158372875155485",
"0.00004706453934960757",
"0.02456140350877193"
],
[
0,
"0.00004706453934960757",
"0.00005132424409507534",
"0.007017543859649122"
],
[
0,
"0.00005132424409507534",
"0.00005596948505887885",
"0"
],
[
0,
"0.00005596948505887885",
"0.00006103515625",
"0.010526315789473682"
],
[
0,
"0.00006103515625",
"0.00006655930985505723",
"0.003508771929824561"
],
[
0,
"0.00006655930985505723",
"0.0000725834420778028",
"0.003508771929824561"
],
[
0,
"0.00009412907869921513",
"0.00010264848819015068",
"0.003508771929824561"
],
[
0,
"0.00010264848819015068",
"0.0001119389701177577",
"0.003508771929824561"
],
[
0,
"0.0004477558804710308",
"0.00048828125",
"0.003508771929824561"
],
[
0,
"0.00048828125",
"0.0005324744788404579",
"0.007017543859649122"
],
[
0,
"0.0005324744788404579",
"0.0005806675366224224",
"0.007017543859649122"
],
[
0,
"0.0005806675366224224",
"0.0006332224387944383",
"0.03859649122807017"
],
[
0,
"0.0006332224387944383",
"0.0006905339660024878",
"0.07368421052631578"
],
[
0,
"0.0006905339660024878",
"0.0007530326295937211",
"0.04912280701754385"
],
[
0,
"0.0007530326295937211",
"0.0008211879055212055",
"0.09122807017543859"
],
[
0,
"0.0008211879055212055",
"0.0008955117609420616",
"0.03859649122807017"
],
[
0,
"0.0008955117609420616",
"0.0009765625",
"0.02456140350877193"
],
[
0,
"0.0009765625",
"0.0010649489576809157",
"0.017543859649122806"
],
[
0,
"0.0010649489576809157",
"0.0011613350732448448",
"0.031578947368421054"
],
[
0,
"0.0011613350732448448",
"0.0012664448775888766",
"0.06315789473684211"
],
[
0,
"0.0012664448775888766",
"0.0013810679320049755",
"0.10526315789473682"
],
[
0,
"0.0013810679320049755",
"0.0015060652591874421",
"0.11929824561403507"
],
[
0,
"0.0015060652591874421",
"0.001642375811042411",
"0.10877192982456138"
],
[
0,
"0.001642375811042411",
"0.0017910235218841233",
"0.11929824561403507"
],
[
0,
"0.0017910235218841233",
"0.001953125",
"0.19649122807017538"
],
[
0,
"0.001953125",
"0.0021298979153618314",
"0.24561403508771926"
],
[
0,
"0.0021298979153618314",
"0.0023226701464896895",
"0.19999999999999998"
],
[
0,
"0.0023226701464896895",
"0.002532889755177753",
"0.18245614035087715"
],
[
0,
"0.002532889755177753",
"0.002762135864009951",
"0.2771929824561403"
],
[
0,
"0.002762135864009951",
"0.0030121305183748843",
"0.5228070175438596"
],
[
0,
"0.0030121305183748843",
"0.003284751622084822",
"0.663157894736842"
],
[
0,
"0.003284751622084822",
"0.0035820470437682465",
"0.4070175438596491"
],
[
0,
"0.0035820470437682465",
"0.00390625",
"0.4421052631578947"
],
[
0,
"0.00390625",
"0.004259795830723663",
"1.0070175438596491"
],
[
0,
"0.004259795830723663",
"0.004645340292979379",
"1.4210526315789476"
],
[
0,
"0.004645340292979379",
"0.005065779510355506",
"1.3859649122807016"
],
[
0,
"0.005065779510355506",
"0.005524271728019902",
"1.0035087719298246"
],
[
0,
"0.005524271728019902",
"0.0060242610367497685",
"0.8771929824561402"
],
[
0,
"0.0060242610367497685",
"0.006569503244169644",
"8.273684210526316"
],
[
0,
"0.006569503244169644",
"0.007164094087536493",
"32.50877192982456"
],
[
0,
"0.007164094087536493",
"0.0078125",
"57.18245614035087"
],
[
0,
"0.0078125",
"0.008519591661447326",
"53.82105263157894"
],
[
0,
"0.008519591661447326",
"0.009290680585958758",
"39.508771929824555"
],
[
0,
"0.009290680585958758",
"0.010131559020711013",
"22.824561403508767"
],
[
0,
"0.010131559020711013",
"0.011048543456039804",
"11.554385964912282"
],
[
0,
"0.011048543456039804",
"0.012048522073499537",
"6.50877192982456"
],
[
0,
"0.012048522073499537",
"0.013139006488339287",
"4.529824561403507"
],
[
0,
"0.013139006488339287",
"0.014328188175072986",
"3.5298245614035086"
],
[
0,
"0.014328188175072986",
"0.015625",
"2.7438596491228067"
],
[
0,
"0.015625",
"0.01703918332289465",
"2.284210526315789"
],
[
0,
"0.01703918332289465",
"0.018581361171917516",
"1.9192982456140348"
],
[
0,
"0.018581361171917516",
"0.020263118041422026",
"2.028070175438596"
],
[
0,
"0.020263118041422026",
"0.022097086912079608",
"1.9403508771929823"
],
[
0,
"0.022097086912079608",
"0.024097044146999074",
"1.4701754385964911"
],
[
0,
"0.024097044146999074",
"0.026278012976678575",
"1.1964912280701754"
],
[
0,
"0.026278012976678575",
"0.028656376350145972",
"0.9228070175438596"
],
[
0,
"0.028656376350145972",
"0.03125",
"0.8421052631578947"
],
[
0,
"0.03125",
"0.0340783666457893",
"2.8421052631578942"
],
[
0,
"0.0340783666457893",
"0.03716272234383503",
"2.508771929824561"
],
[
0,
"0.03716272234383503",
"0.04052623608284405",
"1.0842105263157893"
],
[
0,
"0.04052623608284405",
"0.044194173824159216",
"0.3473684210526316"
],
[
0,
"0.044194173824159216",
"0.04819408829399815",
"0.2526315789473684"
],
[
0,
"0.04819408829399815",
"0.05255602595335715",
"0.1964912280701754"
],
[
0,
"0.05255602595335715",
"0.057312752700291944",
"0.09473684210526315"
],
[
0,
"0.057312752700291944",
"0.0625",
"0.07017543859649121"
],
[
0,
"0.0625",
"0.0681567332915786",
"0.10526315789473684"
],
[
0,
"0.0681567332915786",
"0.07432544468767006",
"0.07368421052631578"
],
[
0,
"0.07432544468767006",
"0.0810524721656881",
"0.07368421052631577"
],
[
0,
"0.0810524721656881",
"0.08838834764831843",
"0.06666666666666665"
],
[
0,
"0.08838834764831843",
"0.0963881765879963",
"0.04912280701754385"
],
[
0,
"0.0963881765879963",
"0.1051120519067143",
"0.05263157894736841"
],
[
0,
"0.1051120519067143",
"0.11462550540058389",
"0.38947368421052625"
],
[
0,
"0.11462550540058389",
"0.125",
"2.9157894736842103"
],
[
0,
"0.125",
"0.1363134665831572",
"0.5052631578947367"
],
[
0,
"0.1363134665831572",
"0.14865088937534013",
"0.11228070175438594"
],
[
0,
"0.14865088937534013",
"0.1621049443313762",
"0.05263157894736842"
],
[
0,
"0.1621049443313762",
"0.17677669529663687",
"0.04210526315789473"
],
[
0,
"0.17677669529663687",
"0.1927763531759926",
"0.02807017543859649"
],
[
0,
"0.1927763531759926",
"0.2102241038134286",
"0.010526315789473682"
],
[
0,
"0.2102241038134286",
"0.22925101080116778",
"0.010526315789473682"
],
[
0,
"0.22925101080116778",
"0.25",
"0.003508771929824561"
],
[
0,
"0.25",
"0.2726269331663144",
"0.010526315789473682"
],
[
0,
"0.2726269331663144",
"0.29730177875068026",
"0.003508771929824561"
],
[
0,
"0.29730177875068026",
"0.3242098886627524",
"0.007017543859649122"
],
[
0,
"0.3242098886627524",
"0.35355339059327373",
"0.007017543859649122"
],
[
0,
"0.35355339059327373",
"0.3855527063519852",
"0.024561403508771926"
],
[
0,
"0.3855527063519852",
"0.4204482076268572",
"0.017543859649122806"
],
[
0,
"0.4204482076268572",
"0.45850202160233555",
"0.02456140350877193"
],
[
0,
"0.45850202160233555",
"0.5",
"0.04561403508771929"
],
[
0,
"0.5",
"0.5452538663326288",
"0.014035087719298244"
],
[
0,
"0.5452538663326288",
"0.5946035575013605",
"0.021052631578947364"
],
[
0,
"0.5946035575013605",
"0.6484197773255048",
"0.007017543859649122"
],
[
0,
"0.6484197773255048",
"0.7071067811865475",
"0.003508771929824561"
],
[
0,
"0.7071067811865475",
"0.7711054127039704",
"0.003508771929824561"
],
[
0,
"0.7711054127039704",
"0.8408964152537144",
"0.007017543859649122"
],
[
0,
"1",
"1.0905077326652577",
"0.003508771929824561"
],
[
0,
"1.0905077326652577",
"1.189207115002721",
"0.010526315789473682"
],
[
0,
"1.189207115002721",
"1.2968395546510096",
"0.010526315789473682"
],
[
0,
"1.2968395546510096",
"1.414213562373095",
"0.021052631578947368"
],
[
0,
"1.414213562373095",
"1.5422108254079407",
"0.003508771929824561"
],
[
0,
"1.5422108254079407",
"1.6817928305074288",
"0.003508771929824561"
],
[
0,
"1.6817928305074288",
"1.8340080864093422",
"0"
],
[
0,
"1.8340080864093422",
"2",
"0.003508771929824561"
],
[
0,
"2",
"2.1810154653305154",
"0"
],
[
0,
"2.1810154653305154",
"2.378414230005442",
"0"
],
[
0,
"2.378414230005442",
"2.5936791093020193",
"0"
],
[
0,
"2.5936791093020193",
"2.82842712474619",
"0.003508771929824561"
],
[
0,
"2.82842712474619",
"3.0844216508158815",
"0.003508771929824561"
]
]
}
]
}
]
}
}

View File

@@ -0,0 +1,36 @@
{
"status" : "success",
"data" : {
"resultType" : "vector",
"result" : [
{
"metric" : {
"__name__" : "up",
"job" : "prometheus",
"instance" : "localhost:9090"
},
"value": [ 1435781451.781, "1" ]
},
{
"metric" : {
"__name__" : "up",
"job" : "node",
"instance" : "localhost:9100"
},
"value" : [ 1435781451.781, "0" ]
},
{
"metric": { "level": "error", "location": "moon"},
"value": [1645029699, "+Inf"]
},
{
"metric": { "level": "info", "location": "moon" },
"value": [1645029699, "-Inf"]
},
{
"metric": { "level": "debug", "location": "moon" },
"value": [1645029699, "NaN"]
}
]
}
}

View File

@@ -0,0 +1,463 @@
// 🌟 This was machine generated. Do not edit. 🌟
//
// Frame[0] {
// "type": "timeseries-multi",
// "typeVersion": [
// 0,
// 0
// ],
// "custom": {
// "resultType": "vector"
// },
// "notices": [
// {
// "severity": "warning",
// "text": "warning 1"
// },
// {
// "severity": "warning",
// "text": "warning 2"
// }
// ]
// }
// Name:
// Dimensions: 2 Fields by 1 Rows
// +-----------------------------------+--------------------------------------------------------------+
// | Name: Time | Name: Value |
// | Labels: | Labels: __name__=up, instance=localhost:9090, job=prometheus |
// | Type: []time.Time | Type: []float64 |
// +-----------------------------------+--------------------------------------------------------------+
// | 2015-07-01 20:10:51.781 +0000 UTC | 1 |
// +-----------------------------------+--------------------------------------------------------------+
//
//
//
// Frame[1] {
// "type": "timeseries-multi",
// "typeVersion": [
// 0,
// 0
// ],
// "custom": {
// "resultType": "vector"
// },
// "notices": [
// {
// "severity": "warning",
// "text": "warning 1"
// },
// {
// "severity": "warning",
// "text": "warning 2"
// }
// ]
// }
// Name:
// Dimensions: 2 Fields by 1 Rows
// +-----------------------------------+--------------------------------------------------------+
// | Name: Time | Name: Value |
// | Labels: | Labels: __name__=up, instance=localhost:9100, job=node |
// | Type: []time.Time | Type: []float64 |
// +-----------------------------------+--------------------------------------------------------+
// | 2015-07-01 20:10:51.781 +0000 UTC | 0 |
// +-----------------------------------+--------------------------------------------------------+
//
//
//
// Frame[2] {
// "type": "timeseries-multi",
// "typeVersion": [
// 0,
// 0
// ],
// "custom": {
// "resultType": "vector"
// },
// "notices": [
// {
// "severity": "warning",
// "text": "warning 1"
// },
// {
// "severity": "warning",
// "text": "warning 2"
// }
// ]
// }
// Name:
// Dimensions: 2 Fields by 1 Rows
// +-------------------------------+------------------------------------+
// | Name: Time | Name: Value |
// | Labels: | Labels: level=error, location=moon |
// | Type: []time.Time | Type: []float64 |
// +-------------------------------+------------------------------------+
// | 2022-02-16 16:41:39 +0000 UTC | +Inf |
// +-------------------------------+------------------------------------+
//
//
//
// Frame[3] {
// "type": "timeseries-multi",
// "typeVersion": [
// 0,
// 0
// ],
// "custom": {
// "resultType": "vector"
// },
// "notices": [
// {
// "severity": "warning",
// "text": "warning 1"
// },
// {
// "severity": "warning",
// "text": "warning 2"
// }
// ]
// }
// Name:
// Dimensions: 2 Fields by 1 Rows
// +-------------------------------+-----------------------------------+
// | Name: Time | Name: Value |
// | Labels: | Labels: level=info, location=moon |
// | Type: []time.Time | Type: []float64 |
// +-------------------------------+-----------------------------------+
// | 2022-02-16 16:41:39 +0000 UTC | -Inf |
// +-------------------------------+-----------------------------------+
//
//
//
// Frame[4] {
// "type": "timeseries-multi",
// "typeVersion": [
// 0,
// 0
// ],
// "custom": {
// "resultType": "vector"
// },
// "notices": [
// {
// "severity": "warning",
// "text": "warning 1"
// },
// {
// "severity": "warning",
// "text": "warning 2"
// }
// ]
// }
// Name:
// Dimensions: 2 Fields by 1 Rows
// +-------------------------------+------------------------------------+
// | Name: Time | Name: Value |
// | Labels: | Labels: level=debug, location=moon |
// | Type: []time.Time | Type: []float64 |
// +-------------------------------+------------------------------------+
// | 2022-02-16 16:41:39 +0000 UTC | NaN |
// +-------------------------------+------------------------------------+
//
//
// 🌟 This was machine generated. Do not edit. 🌟
{
"status": 200,
"frames": [
{
"schema": {
"meta": {
"type": "timeseries-multi",
"typeVersion": [
0,
0
],
"custom": {
"resultType": "vector"
},
"notices": [
{
"severity": "warning",
"text": "warning 1"
},
{
"severity": "warning",
"text": "warning 2"
}
]
},
"fields": [
{
"name": "Time",
"type": "time",
"typeInfo": {
"frame": "time.Time"
}
},
{
"name": "Value",
"type": "number",
"typeInfo": {
"frame": "float64"
},
"labels": {
"__name__": "up",
"instance": "localhost:9090",
"job": "prometheus"
}
}
]
},
"data": {
"values": [
[
1435781451781
],
[
1
]
]
}
},
{
"schema": {
"meta": {
"type": "timeseries-multi",
"typeVersion": [
0,
0
],
"custom": {
"resultType": "vector"
},
"notices": [
{
"severity": "warning",
"text": "warning 1"
},
{
"severity": "warning",
"text": "warning 2"
}
]
},
"fields": [
{
"name": "Time",
"type": "time",
"typeInfo": {
"frame": "time.Time"
}
},
{
"name": "Value",
"type": "number",
"typeInfo": {
"frame": "float64"
},
"labels": {
"__name__": "up",
"instance": "localhost:9100",
"job": "node"
}
}
]
},
"data": {
"values": [
[
1435781451781
],
[
0
]
]
}
},
{
"schema": {
"meta": {
"type": "timeseries-multi",
"typeVersion": [
0,
0
],
"custom": {
"resultType": "vector"
},
"notices": [
{
"severity": "warning",
"text": "warning 1"
},
{
"severity": "warning",
"text": "warning 2"
}
]
},
"fields": [
{
"name": "Time",
"type": "time",
"typeInfo": {
"frame": "time.Time"
}
},
{
"name": "Value",
"type": "number",
"typeInfo": {
"frame": "float64"
},
"labels": {
"level": "error",
"location": "moon"
}
}
]
},
"data": {
"values": [
[
1645029699000
],
[
null
]
],
"entities": [
null,
{
"Inf": [
0
]
}
]
}
},
{
"schema": {
"meta": {
"type": "timeseries-multi",
"typeVersion": [
0,
0
],
"custom": {
"resultType": "vector"
},
"notices": [
{
"severity": "warning",
"text": "warning 1"
},
{
"severity": "warning",
"text": "warning 2"
}
]
},
"fields": [
{
"name": "Time",
"type": "time",
"typeInfo": {
"frame": "time.Time"
}
},
{
"name": "Value",
"type": "number",
"typeInfo": {
"frame": "float64"
},
"labels": {
"level": "info",
"location": "moon"
}
}
]
},
"data": {
"values": [
[
1645029699000
],
[
null
]
],
"entities": [
null,
{
"NegInf": [
0
]
}
]
}
},
{
"schema": {
"meta": {
"type": "timeseries-multi",
"typeVersion": [
0,
0
],
"custom": {
"resultType": "vector"
},
"notices": [
{
"severity": "warning",
"text": "warning 1"
},
{
"severity": "warning",
"text": "warning 2"
}
]
},
"fields": [
{
"name": "Time",
"type": "time",
"typeInfo": {
"frame": "time.Time"
}
},
{
"name": "Value",
"type": "number",
"typeInfo": {
"frame": "float64"
},
"labels": {
"level": "debug",
"location": "moon"
}
}
]
},
"data": {
"values": [
[
1645029699000
],
[
null
]
],
"entities": [
null,
{
"NaN": [
0
]
}
]
}
}
]
}

View File

@@ -0,0 +1,37 @@
{
"status" : "success",
"data" : {
"resultType" : "vector",
"result" : [
{
"metric" : {
"__name__" : "up",
"job" : "prometheus",
"instance" : "localhost:9090"
},
"value": [ 1435781451.781, "1" ]
},
{
"metric" : {
"__name__" : "up",
"job" : "node",
"instance" : "localhost:9100"
},
"value" : [ 1435781451.781, "0" ]
},
{
"metric": { "level": "error", "location": "moon"},
"value": [1645029699, "+Inf"]
},
{
"metric": { "level": "info", "location": "moon" },
"value": [1645029699, "-Inf"]
},
{
"metric": { "level": "debug", "location": "moon" },
"value": [1645029699, "NaN"]
}
]
},
"warnings" : ["warning 1", "warning 2"]
}

109
pkg/promlib/go.mod Normal file
View File

@@ -0,0 +1,109 @@
module github.com/grafana/grafana/pkg/promlib
go 1.21.0
require (
github.com/grafana/grafana-plugin-sdk-go v0.214.0
github.com/json-iterator/go v1.1.12
github.com/patrickmn/go-cache v2.1.0+incompatible
github.com/prometheus/client_golang v1.18.0
github.com/prometheus/common v0.46.0
github.com/prometheus/prometheus v1.8.2-0.20221021121301-51a44e6657c3
github.com/stretchr/testify v1.8.4
go.opentelemetry.io/otel v1.24.0
go.opentelemetry.io/otel/trace v1.24.0
golang.org/x/exp v0.0.0-20231206192017-f3f8817b8deb
)
require (
github.com/BurntSushi/toml v1.3.2 // indirect
github.com/alecthomas/units v0.0.0-20231202071711-9a357b53e9c9 // indirect
github.com/apache/arrow/go/v15 v15.0.0 // indirect
github.com/aws/aws-sdk-go v1.50.8 // indirect
github.com/beorn7/perks v1.0.1 // indirect
github.com/cenkalti/backoff/v4 v4.2.1 // indirect
github.com/cespare/xxhash/v2 v2.2.0 // indirect
github.com/cheekybits/genny v1.0.0 // indirect
github.com/chromedp/cdproto v0.0.0-20230802225258-3cf4e6d46a89 // indirect
github.com/cpuguy83/go-md2man/v2 v2.0.3 // indirect
github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc // indirect
github.com/dennwc/varint v1.0.0 // indirect
github.com/elazarl/goproxy v0.0.0-20230731152917-f99041a5c027 // indirect
github.com/fatih/color v1.15.0 // indirect
github.com/fsnotify/fsnotify v1.7.0 // indirect
github.com/getkin/kin-openapi v0.120.0 // indirect
github.com/go-kit/log v0.2.1 // indirect
github.com/go-logfmt/logfmt v0.6.0 // indirect
github.com/go-logr/logr v1.4.1 // indirect
github.com/go-logr/stdr v1.2.2 // indirect
github.com/go-openapi/jsonpointer v0.20.2 // indirect
github.com/go-openapi/swag v0.22.9 // indirect
github.com/goccy/go-json v0.10.2 // indirect
github.com/gogo/protobuf v1.3.2 // indirect
github.com/golang/protobuf v1.5.3 // indirect
github.com/google/flatbuffers v23.5.26+incompatible // indirect
github.com/google/go-cmp v0.6.0 // indirect
github.com/google/uuid v1.6.0 // indirect
github.com/gorilla/mux v1.8.0 // indirect
github.com/grafana/regexp v0.0.0-20221123153739-15dc172cd2db // indirect
github.com/grpc-ecosystem/go-grpc-middleware v1.4.0 // indirect
github.com/grpc-ecosystem/go-grpc-prometheus v1.2.1-0.20191002090509-6af20e3a5340 // indirect
github.com/grpc-ecosystem/grpc-gateway/v2 v2.19.0 // indirect
github.com/hashicorp/go-hclog v1.6.2 // indirect
github.com/hashicorp/go-plugin v1.6.0 // indirect
github.com/hashicorp/yamux v0.1.1 // indirect
github.com/invopop/yaml v0.2.0 // indirect
github.com/josharian/intern v1.0.0 // indirect
github.com/klauspost/compress v1.17.4 // indirect
github.com/klauspost/cpuid/v2 v2.2.5 // indirect
github.com/magefile/mage v1.15.0 // indirect
github.com/mailru/easyjson v0.7.7 // indirect
github.com/mattetti/filebuffer v1.0.1 // indirect
github.com/mattn/go-colorable v0.1.13 // indirect
github.com/mattn/go-isatty v0.0.19 // indirect
github.com/mattn/go-runewidth v0.0.13 // indirect
github.com/mitchellh/go-testing-interface v1.14.1 // indirect
github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd // indirect
github.com/modern-go/reflect2 v1.0.2 // indirect
github.com/mohae/deepcopy v0.0.0-20170929034955-c48cc78d4826 // indirect
github.com/oklog/run v1.1.0 // indirect
github.com/olekukonko/tablewriter v0.0.5 // indirect
github.com/perimeterx/marshmallow v1.1.5 // indirect
github.com/pierrec/lz4/v4 v4.1.18 // indirect
github.com/pkg/errors v0.9.1 // indirect
github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2 // indirect
github.com/prometheus/client_model v0.5.0 // indirect
github.com/prometheus/procfs v0.12.0 // indirect
github.com/rivo/uniseg v0.3.4 // indirect
github.com/russross/blackfriday/v2 v2.1.0 // indirect
github.com/smartystreets/goconvey v1.6.4 // indirect
github.com/unknwon/bra v0.0.0-20200517080246-1e3013ecaff8 // indirect
github.com/unknwon/com v1.0.1 // indirect
github.com/unknwon/log v0.0.0-20150304194804-e617c87089d3 // indirect
github.com/urfave/cli v1.22.14 // indirect
github.com/zeebo/xxh3 v1.0.2 // indirect
go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.49.0 // indirect
go.opentelemetry.io/contrib/instrumentation/net/http/httptrace/otelhttptrace v0.49.0 // indirect
go.opentelemetry.io/contrib/propagators/jaeger v1.22.0 // indirect
go.opentelemetry.io/contrib/samplers/jaegerremote v0.18.0 // indirect
go.opentelemetry.io/otel/exporters/otlp/otlptrace v1.24.0 // indirect
go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracegrpc v1.24.0 // indirect
go.opentelemetry.io/otel/metric v1.24.0 // indirect
go.opentelemetry.io/otel/sdk v1.24.0 // indirect
go.opentelemetry.io/proto/otlp v1.1.0 // indirect
go.uber.org/atomic v1.11.0 // indirect
go.uber.org/goleak v1.3.0 // indirect
golang.org/x/mod v0.14.0 // indirect
golang.org/x/net v0.21.0 // indirect
golang.org/x/sys v0.17.0 // indirect
golang.org/x/text v0.14.0 // indirect
golang.org/x/tools v0.17.0 // indirect
golang.org/x/xerrors v0.0.0-20231012003039-104605ab7028 // indirect
google.golang.org/genproto v0.0.0-20240123012728-ef4313101c80 // indirect
google.golang.org/genproto/googleapis/api v0.0.0-20240123012728-ef4313101c80 // indirect
google.golang.org/genproto/googleapis/rpc v0.0.0-20240123012728-ef4313101c80 // indirect
google.golang.org/grpc v1.62.1 // indirect
google.golang.org/protobuf v1.32.0 // indirect
gopkg.in/fsnotify/fsnotify.v1 v1.4.7 // indirect
gopkg.in/yaml.v3 v3.0.1 // indirect
)

129
pkg/promlib/go.sum Normal file
View File

@@ -0,0 +1,129 @@
github.com/BurntSushi/toml v1.3.2 h1:o7IhLm0Msx3BaB+n3Ag7L8EVlByGnpq14C4YWiu/gL8=
github.com/alecthomas/units v0.0.0-20231202071711-9a357b53e9c9 h1:ez/4by2iGztzR4L0zgAOR8lTQK9VlyBVVd7G4omaOQs=
github.com/apache/arrow/go/v15 v15.0.0 h1:1zZACWf85oEZY5/kd9dsQS7i+2G5zVQcbKTHgslqHNA=
github.com/aws/aws-sdk-go v1.50.8 h1:gY0WoOW+/Wz6XmYSgDH9ge3wnAevYDSQWPxxJvqAkP4=
github.com/beorn7/perks v1.0.1 h1:VlbKKnNfV8bJzeqoa4cOKqO6bYr3WgKZxO8Z16+hsOM=
github.com/bufbuild/protocompile v0.4.0 h1:LbFKd2XowZvQ/kajzguUp2DC9UEIQhIq77fZZlaQsNA=
github.com/cenkalti/backoff/v4 v4.2.1 h1:y4OZtCnogmCPw98Zjyt5a6+QwPLGkiQsYW5oUqylYbM=
github.com/cespare/xxhash/v2 v2.2.0 h1:DC2CZ1Ep5Y4k3ZQ899DldepgrayRUGE6BBZ/cd9Cj44=
github.com/cheekybits/genny v1.0.0 h1:uGGa4nei+j20rOSeDeP5Of12XVm7TGUd4dJA9RDitfE=
github.com/chromedp/cdproto v0.0.0-20230802225258-3cf4e6d46a89 h1:aPflPkRFkVwbW6dmcVqfgwp1i+UWGFH6VgR1Jim5Ygc=
github.com/cpuguy83/go-md2man/v2 v2.0.3 h1:qMCsGGgs+MAzDFyp9LpAe1Lqy/fY/qCovCm0qnXZOBM=
github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc h1:U9qPSI2PIWSS1VwoXQT9A3Wy9MM3WgvqSxFWenqJduM=
github.com/dennwc/varint v1.0.0 h1:kGNFFSSw8ToIy3obO/kKr8U9GZYUAxQEVuix4zfDWzE=
github.com/elazarl/goproxy v0.0.0-20230731152917-f99041a5c027 h1:1L0aalTpPz7YlMxETKpmQoWMBkeiuorElZIXoNmgiPE=
github.com/elazarl/goproxy/ext v0.0.0-20220115173737-adb46da277ac h1:9yrT5tmn9Zc0ytWPASlaPwQfQMQYnRf0RSDe1XvHw0Q=
github.com/fatih/color v1.15.0 h1:kOqh6YHBtK8aywxGerMG2Eq3H6Qgoqeo13Bk2Mv/nBs=
github.com/felixge/httpsnoop v1.0.4 h1:NFTV2Zj1bL4mc9sqWACXbQFVBBg2W3GPvqp8/ESS2Wg=
github.com/fsnotify/fsnotify v1.7.0 h1:8JEhPFa5W2WU7YfeZzPNqzMP6Lwt7L2715Ggo0nosvA=
github.com/getkin/kin-openapi v0.120.0 h1:MqJcNJFrMDFNc07iwE8iFC5eT2k/NPUFDIpNeiZv8Jg=
github.com/go-kit/log v0.2.1 h1:MRVx0/zhvdseW+Gza6N9rVzU/IVzaeE1SFI4raAhmBU=
github.com/go-logfmt/logfmt v0.6.0 h1:wGYYu3uicYdqXVgoYbvnkrPVXkuLM1p1ifugDMEdRi4=
github.com/go-logr/logr v1.4.1 h1:pKouT5E8xu9zeFC39JXRDukb6JFQPXM5p5I91188VAQ=
github.com/go-logr/stdr v1.2.2 h1:hSWxHoqTgW2S2qGc0LTAI563KZ5YKYRhT3MFKZMbjag=
github.com/go-openapi/jsonpointer v0.20.2 h1:mQc3nmndL8ZBzStEo3JYF8wzmeWffDH4VbXz58sAx6Q=
github.com/go-openapi/swag v0.22.9 h1:XX2DssF+mQKM2DHsbgZK74y/zj4mo9I99+89xUmuZCE=
github.com/go-test/deep v1.0.8 h1:TDsG77qcSprGbC6vTN8OuXp5g+J+b5Pcguhf7Zt61VM=
github.com/goccy/go-json v0.10.2 h1:CrxCmQqYDkv1z7lO7Wbh2HN93uovUHgrECaO5ZrCXAU=
github.com/gogo/protobuf v1.3.2 h1:Ov1cvc58UF3b5XjBnZv7+opcTcQFZebYjWzi34vdm4Q=
github.com/golang/protobuf v1.5.3 h1:KhyjKVUg7Usr/dYsdSqoFveMYd5ko72D+zANwlG1mmg=
github.com/golang/snappy v0.0.4 h1:yAGX7huGHXlcLOEtBnF4w7FQwA26wojNCwOYAEhLjQM=
github.com/google/flatbuffers v23.5.26+incompatible h1:M9dgRyhJemaM4Sw8+66GHBu8ioaQmyPLg1b8VwK5WJg=
github.com/google/go-cmp v0.6.0 h1:ofyhxvXcZhMsU5ulbFiLKl/XBFqE1GSq7atu8tAmTRI=
github.com/google/uuid v1.6.0 h1:NIvaJDMOsjHA8n1jAhLSgzrAzy1Hgr+hNrb57e+94F0=
github.com/gopherjs/gopherjs v0.0.0-20181103185306-d547d1d9531e h1:JKmoR8x90Iww1ks85zJ1lfDGgIiMDuIptTOhJq+zKyg=
github.com/gorilla/mux v1.8.0 h1:i40aqfkR1h2SlN9hojwV5ZA91wcXFOvkdNIeFDP5koI=
github.com/grafana/grafana-plugin-sdk-go v0.214.0 h1:09AoomxfsMdKmS4bc5tF81f7fvI9HjHckGFAmu/UQls=
github.com/grafana/regexp v0.0.0-20221123153739-15dc172cd2db h1:7aN5cccjIqCLTzedH7MZzRZt5/lsAHch6Z3L2ZGn5FA=
github.com/grpc-ecosystem/go-grpc-middleware v1.4.0 h1:UH//fgunKIs4JdUbpDl1VZCDaL56wXCB/5+wF6uHfaI=
github.com/grpc-ecosystem/go-grpc-prometheus v1.2.1-0.20191002090509-6af20e3a5340 h1:uGoIog/wiQHI9GAxXO5TJbT0wWKH3O9HhOJW1F9c3fY=
github.com/grpc-ecosystem/grpc-gateway/v2 v2.19.0 h1:Wqo399gCIufwto+VfwCSvsnfGpF/w5E9CNxSwbpD6No=
github.com/hashicorp/go-hclog v1.6.2 h1:NOtoftovWkDheyUM/8JW3QMiXyxJK3uHRK7wV04nD2I=
github.com/hashicorp/go-hclog v1.6.2/go.mod h1:W4Qnvbt70Wk/zYJryRzDRU/4r0kIg0PVHBcfoyhpF5M=
github.com/hashicorp/go-plugin v1.6.0 h1:wgd4KxHJTVGGqWBq4QPB1i5BZNEx9BR8+OFmHDmTk8A=
github.com/hashicorp/yamux v0.1.1 h1:yrQxtgseBDrq9Y652vSRDvsKCJKOUD+GzTS4Y0Y8pvE=
github.com/invopop/yaml v0.2.0 h1:7zky/qH+O0DwAyoobXUqvVBwgBFRxKoQ/3FjcVpjTMY=
github.com/jhump/protoreflect v1.15.1 h1:HUMERORf3I3ZdX05WaQ6MIpd/NJ434hTp5YiKgfCL6c=
github.com/jmespath/go-jmespath v0.4.0 h1:BEgLn5cpjn8UN1mAw4NjwDrS35OdebyEtFe+9YPoQUg=
github.com/josharian/intern v1.0.0 h1:vlS4z54oSdjm0bgjRigI+G1HpF+tI+9rE5LLzOg8HmY=
github.com/jpillora/backoff v1.0.0 h1:uvFg412JmmHBHw7iwprIxkPMI+sGQ4kzOWsMeHnm2EA=
github.com/json-iterator/go v1.1.12 h1:PV8peI4a0ysnczrg+LtxykD8LfKY9ML6u2jnxaEnrnM=
github.com/jtolds/gls v4.20.0+incompatible h1:xdiiI2gbIgH/gLH7ADydsJ1uDOEzR8yvV7C0MuV77Wo=
github.com/klauspost/compress v1.17.4 h1:Ej5ixsIri7BrIjBkRZLTo6ghwrEtHFk7ijlczPW4fZ4=
github.com/klauspost/cpuid/v2 v2.2.5 h1:0E5MSMDEoAulmXNFquVs//DdoomxaoTY1kUhbc/qbZg=
github.com/kr/pretty v0.3.1 h1:flRD4NNwYAUpkphVc1HcthR4KEIFJ65n8Mw5qdRn3LE=
github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY=
github.com/magefile/mage v1.15.0 h1:BvGheCMAsG3bWUDbZ8AyXXpCNwU9u5CB6sM+HNb9HYg=
github.com/mailru/easyjson v0.7.7 h1:UGYAvKxe3sBsEDzO8ZeWOSlIQfWFlxbzLZe7hwFURr0=
github.com/mattetti/filebuffer v1.0.1 h1:gG7pyfnSIZCxdoKq+cPa8T0hhYtD9NxCdI4D7PTjRLM=
github.com/mattn/go-colorable v0.1.13 h1:fFA4WZxdEF4tXPZVKMLwD8oUnCTTo08duU7wxecdEvA=
github.com/mattn/go-isatty v0.0.19 h1:JITubQf0MOLdlGRuRq+jtsDlekdYPia9ZFsB8h/APPA=
github.com/mattn/go-runewidth v0.0.13 h1:lTGmDsbAYt5DmK6OnoV7EuIF1wEIFAcxld6ypU4OSgU=
github.com/mitchellh/go-testing-interface v1.14.1 h1:jrgshOhYAUVNMAJiKbEu7EqAwgJJ2JqpQmpLJOu07cU=
github.com/mitchellh/reflectwalk v1.0.2 h1:G2LzWKi524PWgd3mLHV8Y5k7s6XUvT0Gef6zxSIeXaQ=
github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd h1:TRLaZ9cD/w8PVh93nsPXa1VrQ6jlwL5oN8l14QlcNfg=
github.com/modern-go/reflect2 v1.0.2 h1:xBagoLtFs94CBntxluKeaWgTMpvLxC4ur3nMaC9Gz0M=
github.com/mohae/deepcopy v0.0.0-20170929034955-c48cc78d4826 h1:RWengNIwukTxcDr9M+97sNutRR1RKhG96O6jWumTTnw=
github.com/mwitkow/go-conntrack v0.0.0-20190716064945-2f068394615f h1:KUppIJq7/+SVif2QVs3tOP0zanoHgBEVAwHxUSIzRqU=
github.com/oklog/run v1.1.0 h1:GEenZ1cK0+q0+wsJew9qUg/DyD8k3JzYsZAi5gYi2mA=
github.com/oklog/ulid v1.3.1 h1:EGfNDEx6MqHz8B3uNV6QAib1UR2Lm97sHi3ocA6ESJ4=
github.com/olekukonko/tablewriter v0.0.5 h1:P2Ga83D34wi1o9J6Wh1mRuqd4mF/x/lgBS7N7AbDhec=
github.com/patrickmn/go-cache v2.1.0+incompatible h1:HRMgzkcYKYpi3C8ajMPV8OFXaaRUnok+kx1WdO15EQc=
github.com/perimeterx/marshmallow v1.1.5 h1:a2LALqQ1BlHM8PZblsDdidgv1mWi1DgC2UmX50IvK2s=
github.com/pierrec/lz4/v4 v4.1.18 h1:xaKrnTkyoqfh1YItXl56+6KJNVYWlEEPuAQW9xsplYQ=
github.com/pkg/errors v0.9.1 h1:FEBLx1zS214owpjy7qsBeixbURkuhQAwrK5UwLGTwt4=
github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2 h1:Jamvg5psRIccs7FGNTlIRMkT8wgtp5eCXdBlqhYGL6U=
github.com/prometheus/client_golang v1.18.0 h1:HzFfmkOzH5Q8L8G+kSJKUx5dtG87sewO+FoDDqP5Tbk=
github.com/prometheus/client_model v0.5.0 h1:VQw1hfvPvk3Uv6Qf29VrPF32JB6rtbgI6cYPYQjL0Qw=
github.com/prometheus/common v0.46.0 h1:doXzt5ybi1HBKpsZOL0sSkaNHJJqkyfEWZGGqqScV0Y=
github.com/prometheus/common/sigv4 v0.1.0 h1:qoVebwtwwEhS85Czm2dSROY5fTo2PAPEVdDeppTwGX4=
github.com/prometheus/procfs v0.12.0 h1:jluTpSng7V9hY0O2R9DzzJHYb2xULk9VTR1V1R/k6Bo=
github.com/prometheus/prometheus v1.8.2-0.20221021121301-51a44e6657c3 h1:etRZv4bJf9YAuyPWbyFufjkijfeoPSmyA5xNcd4DoyI=
github.com/prometheus/prometheus v1.8.2-0.20221021121301-51a44e6657c3/go.mod h1:plwr4+63Q1xL8oIdBDeU854um7Cct0Av8dhP44lutMw=
github.com/rivo/uniseg v0.3.4 h1:3Z3Eu6FGHZWSfNKJTOUiPatWwfc7DzJRU04jFUqJODw=
github.com/rogpeppe/go-internal v1.11.0 h1:cWPaGQEPrBb5/AsnsZesgZZ9yb1OQ+GOISoDNXVBh4M=
github.com/russross/blackfriday/v2 v2.1.0 h1:JIOH55/0cWyOuilr9/qlrm0BSXldqnqwMsf35Ld67mk=
github.com/smartystreets/assertions v0.0.0-20190116191733-b6c0e53d7304 h1:Jpy1PXuP99tXNrhbq2BaPz9B+jNAvH1JPQQpG/9GCXY=
github.com/smartystreets/goconvey v1.6.4 h1:fv0U8FUIMPNf1L9lnHLvLhgicrIVChEkdzIKYqbNC9s=
github.com/stretchr/testify v1.8.4 h1:CcVxjf3Q8PM0mHUKJCdn+eZZtm5yQwehR5yeSVQQcUk=
github.com/ugorji/go/codec v1.2.7 h1:YPXUKf7fYbp/y8xloBqZOw2qaVggbfwMlI8WM3wZUJ0=
github.com/unknwon/bra v0.0.0-20200517080246-1e3013ecaff8 h1:aVGB3YnaS/JNfOW3tiHIlmNmTDg618va+eT0mVomgyI=
github.com/unknwon/com v1.0.1 h1:3d1LTxD+Lnf3soQiD4Cp/0BRB+Rsa/+RTvz8GMMzIXs=
github.com/unknwon/log v0.0.0-20150304194804-e617c87089d3 h1:4EYQaWAatQokdji3zqZloVIW/Ke1RQjYw2zHULyrHJg=
github.com/urfave/cli v1.22.14 h1:ebbhrRiGK2i4naQJr+1Xj92HXZCrK7MsyTS/ob3HnAk=
github.com/zeebo/assert v1.3.0 h1:g7C04CbJuIDKNPFHmsk4hwZDO5O+kntRxzaUoNXj+IQ=
github.com/zeebo/xxh3 v1.0.2 h1:xZmwmqxHZA8AI603jOQ0tMqmBr9lPeFwGg6d+xy9DC0=
go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.49.0 h1:4Pp6oUg3+e/6M4C0A/3kJ2VYa++dsWVTtGgLVj5xtHg=
go.opentelemetry.io/contrib/instrumentation/net/http/httptrace/otelhttptrace v0.49.0 h1:RtcvQ4iw3w9NBB5yRwgA4sSa82rfId7n4atVpvKx3bY=
go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.49.0 h1:jq9TW8u3so/bN+JPT166wjOI6/vQPF6Xe7nMNIltagk=
go.opentelemetry.io/contrib/propagators/jaeger v1.22.0 h1:bAHX+zN/inu+Rbqk51REmC8oXLl+Dw6pp9ldQf/onaY=
go.opentelemetry.io/contrib/samplers/jaegerremote v0.18.0 h1:Q9PrD94WoMolBx44ef5UWWvufpVSME0MiSymXZfedso=
go.opentelemetry.io/otel v1.24.0 h1:0LAOdjNmQeSTzGBzduGe/rU4tZhMwL5rWgtp9Ku5Jfo=
go.opentelemetry.io/otel/exporters/otlp/otlptrace v1.24.0 h1:t6wl9SPayj+c7lEIFgm4ooDBZVb01IhLB4InpomhRw8=
go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracegrpc v1.24.0 h1:Mw5xcxMwlqoJd97vwPxA8isEaIoxsta9/Q51+TTJLGE=
go.opentelemetry.io/otel/metric v1.24.0 h1:6EhoGWWK28x1fbpA4tYTOWBkPefTDQnb8WSGXlc88kI=
go.opentelemetry.io/otel/sdk v1.24.0 h1:YMPPDNymmQN3ZgczicBY3B6sf9n62Dlj9pWD3ucgoDw=
go.opentelemetry.io/otel/trace v1.24.0 h1:CsKnnL4dUAr/0llH9FKuc698G04IrpWV0MQA/Y1YELI=
go.opentelemetry.io/proto/otlp v1.1.0 h1:2Di21piLrCqJ3U3eXGCTPHE9R8Nh+0uglSnOyxikMeI=
go.uber.org/atomic v1.11.0 h1:ZvwS0R+56ePWxUNi+Atn9dWONBPp/AUETXlHW0DxSjE=
go.uber.org/goleak v1.3.0 h1:2K3zAYmnTNqV73imy9J1T3WC+gmCePx2hEGkimedGto=
golang.org/x/exp v0.0.0-20231206192017-f3f8817b8deb h1:c0vyKkb6yr3KR7jEfJaOSv4lG7xPkbN6r52aJz1d8a8=
golang.org/x/mod v0.14.0 h1:dGoOF9QVLYng8IHTm7BAyWqCqSheQ5pYWGhzW00YJr0=
golang.org/x/net v0.21.0 h1:AQyQV4dYCvJ7vGmJyKki9+PBdyvhkSd8EIx/qb0AYv4=
golang.org/x/oauth2 v0.16.0 h1:aDkGMBSYxElaoP81NpoUoz2oo2R2wHdZpGToUxfyQrQ=
golang.org/x/sync v0.6.0 h1:5BMeUDZ7vkXGfEr1x9B4bRcTH4lpkTkpdh0T/J+qjbQ=
golang.org/x/sys v0.17.0 h1:25cE3gD+tdBA7lp7QfhuV+rJiE9YXTcS3VG1SqssI/Y=
golang.org/x/text v0.14.0 h1:ScX5w1eTa3QqT8oi6+ziP7dTV1S2+ALU0bI+0zXKWiQ=
golang.org/x/tools v0.17.0 h1:FvmRgNOcs3kOa+T20R1uhfP9F6HgG2mfxDv1vrx1Htc=
golang.org/x/xerrors v0.0.0-20231012003039-104605ab7028 h1:+cNy6SZtPcJQH3LJVLOSmiC7MMxXNOb3PU/VUEz+EhU=
gonum.org/v1/gonum v0.12.0 h1:xKuo6hzt+gMav00meVPUlXwSdoEJP46BR+wdxQEFK2o=
google.golang.org/appengine v1.6.8 h1:IhEN5q69dyKagZPYMSdIjS2HqprW324FRQZJcGqPAsM=
google.golang.org/genproto v0.0.0-20240123012728-ef4313101c80 h1:KAeGQVN3M9nD0/bQXnr/ClcEMJ968gUXJQ9pwfSynuQ=
google.golang.org/genproto/googleapis/api v0.0.0-20240123012728-ef4313101c80 h1:Lj5rbfG876hIAYFjqiJnPHfhXbv+nzTWfm04Fg/XSVU=
google.golang.org/genproto/googleapis/rpc v0.0.0-20240123012728-ef4313101c80 h1:AjyfHzEPEFp/NpvfN5g+KDla3EMojjhRVZc1i7cj+oM=
google.golang.org/grpc v1.62.1 h1:B4n+nfKzOICUXMgyrNd19h/I9oH0L1pizfk1d4zSgTk=
google.golang.org/protobuf v1.32.0 h1:pPC6BG5ex8PDFnkbrGU3EixyhKcQ2aDuBS36lqK/C7I=
gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c h1:Hei/4ADfdWqJk1ZMxUNpqntNwaWcugrBjAiHlqqRiVk=
gopkg.in/fsnotify/fsnotify.v1 v1.4.7 h1:XNNYLJHt73EyYiCZi6+xjupS9CpvmiDgjPTAjrBlQbo=
gopkg.in/yaml.v2 v2.4.0 h1:D8xgwECY7CYvx+Y2n4sBz93Jn9JRvxdiyyo8CTfuKaY=
gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA=

108
pkg/promlib/healthcheck.go Normal file
View File

@@ -0,0 +1,108 @@
package promlib
import (
"context"
"encoding/json"
"errors"
"fmt"
"time"
"github.com/grafana/grafana-plugin-sdk-go/backend"
"github.com/grafana/grafana/pkg/promlib/models"
)
const (
refID = "__healthcheck__"
)
var logger = backend.NewLoggerWith("logger", "tsdb.prometheus")
func (s *Service) CheckHealth(ctx context.Context, req *backend.CheckHealthRequest) (*backend.CheckHealthResult,
error) {
logger := logger.FromContext(ctx)
ds, err := s.getInstance(ctx, req.PluginContext)
// check that the datasource exists
if err != nil {
return getHealthCheckMessage("error getting datasource info", err)
}
if ds == nil {
return getHealthCheckMessage("", errors.New("invalid datasource info received"))
}
hc, err := healthcheck(ctx, req, ds)
if err != nil {
logger.Warn("Error performing prometheus healthcheck", "err", err.Error())
return nil, err
}
heuristics, err := getHeuristics(ctx, ds)
if err != nil {
logger.Warn("Failed to get prometheus heuristics", "err", err.Error())
} else {
jsonDetails, err := json.Marshal(heuristics)
if err != nil {
logger.Warn("Failed to marshal heuristics", "err", err)
} else {
hc.JSONDetails = jsonDetails
}
}
return hc, nil
}
func healthcheck(ctx context.Context, req *backend.CheckHealthRequest, i *instance) (*backend.CheckHealthResult, error) {
qm := models.QueryModel{
UtcOffsetSec: 0,
CommonQueryProperties: models.CommonQueryProperties{
RefId: refID,
},
PrometheusQueryProperties: models.PrometheusQueryProperties{
Expr: "1+1",
Instant: true,
},
}
b, _ := json.Marshal(&qm)
query := backend.DataQuery{
RefID: refID,
TimeRange: backend.TimeRange{
From: time.Unix(1, 0).UTC(),
To: time.Unix(4, 0).UTC(),
},
JSON: b,
}
resp, err := i.queryData.Execute(ctx, &backend.QueryDataRequest{
PluginContext: req.PluginContext,
Queries: []backend.DataQuery{query},
})
if err != nil {
return getHealthCheckMessage("There was an error returned querying the Prometheus API.", err)
}
if resp.Responses[refID].Error != nil {
return getHealthCheckMessage("There was an error returned querying the Prometheus API.",
errors.New(resp.Responses[refID].Error.Error()))
}
return getHealthCheckMessage("Successfully queried the Prometheus API.", nil)
}
func getHealthCheckMessage(message string, err error) (*backend.CheckHealthResult, error) {
if err == nil {
return &backend.CheckHealthResult{
Status: backend.HealthStatusOk,
Message: message,
}, nil
}
errorMessage := fmt.Sprintf("%s - %s", err.Error(), message)
return &backend.CheckHealthResult{
Status: backend.HealthStatusError,
Message: errorMessage,
}, nil
}

View File

@@ -0,0 +1,140 @@
package promlib
import (
"context"
"io"
"net/http"
"strings"
"testing"
"time"
"github.com/grafana/grafana-plugin-sdk-go/backend"
"github.com/grafana/grafana-plugin-sdk-go/backend/datasource"
sdkhttpclient "github.com/grafana/grafana-plugin-sdk-go/backend/httpclient"
"github.com/stretchr/testify/assert"
)
type healthCheckProvider[T http.RoundTripper] struct {
sdkhttpclient.Provider
RoundTripper *T
}
type healthCheckSuccessRoundTripper struct {
}
type healthCheckFailRoundTripper struct {
}
func (rt *healthCheckSuccessRoundTripper) RoundTrip(req *http.Request) (*http.Response, error) {
return &http.Response{
Status: "200",
StatusCode: 200,
Header: nil,
Body: io.NopCloser(strings.NewReader(`{
"status": "success",
"data": {
"resultType": "scalar",
"result": [
1692969348.331,
"2"
]
}
}`)),
ContentLength: 0,
Request: req,
}, nil
}
func (rt *healthCheckFailRoundTripper) RoundTrip(req *http.Request) (*http.Response, error) {
return &http.Response{
Status: "400",
StatusCode: 400,
Header: nil,
Body: nil,
ContentLength: 0,
Request: req,
}, nil
}
func (provider *healthCheckProvider[T]) New(opts ...sdkhttpclient.Options) (*http.Client, error) {
client := &http.Client{}
provider.RoundTripper = new(T)
client.Transport = *provider.RoundTripper
return client, nil
}
func (provider *healthCheckProvider[T]) GetTransport(opts ...sdkhttpclient.Options) (http.RoundTripper, error) {
return *new(T), nil
}
func getMockProvider[T http.RoundTripper]() *sdkhttpclient.Provider {
p := &healthCheckProvider[T]{
RoundTripper: new(T),
}
anotherFN := func(o sdkhttpclient.Options, next http.RoundTripper) http.RoundTripper {
return *p.RoundTripper
}
fn := sdkhttpclient.MiddlewareFunc(anotherFN)
mid := sdkhttpclient.NamedMiddlewareFunc("mock", fn)
return sdkhttpclient.NewProvider(sdkhttpclient.ProviderOptions{Middlewares: []sdkhttpclient.Middleware{mid}})
}
func Test_healthcheck(t *testing.T) {
t.Run("should do a successful health check", func(t *testing.T) {
httpProvider := getMockProvider[*healthCheckSuccessRoundTripper]()
s := &Service{
im: datasource.NewInstanceManager(newInstanceSettings(httpProvider, backend.NewLoggerWith("logger", "test"), mockExtendClientOpts)),
}
req := &backend.CheckHealthRequest{
PluginContext: getPluginContext(),
Headers: nil,
}
res, err := s.CheckHealth(context.Background(), req)
assert.NoError(t, err)
assert.Equal(t, backend.HealthStatusOk, res.Status)
})
t.Run("should return an error for an unsuccessful health check", func(t *testing.T) {
httpProvider := getMockProvider[*healthCheckFailRoundTripper]()
s := &Service{
im: datasource.NewInstanceManager(newInstanceSettings(httpProvider, backend.NewLoggerWith("logger", "test"), mockExtendClientOpts)),
}
req := &backend.CheckHealthRequest{
PluginContext: getPluginContext(),
Headers: nil,
}
res, err := s.CheckHealth(context.Background(), req)
assert.NoError(t, err)
assert.Equal(t, backend.HealthStatusError, res.Status)
})
}
func getPluginContext() backend.PluginContext {
return backend.PluginContext{
OrgID: 0,
PluginID: "prometheus",
User: nil,
AppInstanceSettings: nil,
DataSourceInstanceSettings: getPromInstanceSettings(),
}
}
func getPromInstanceSettings() *backend.DataSourceInstanceSettings {
return &backend.DataSourceInstanceSettings{
ID: 0,
UID: "",
Type: "prometheus",
Name: "test-prometheus",
URL: "http://promurl:9090",
User: "",
Database: "",
BasicAuthEnabled: true,
BasicAuthUser: "admin",
JSONData: []byte("{}"),
DecryptedSecureJSONData: map[string]string{},
Updated: time.Time{},
}
}

112
pkg/promlib/heuristics.go Normal file
View File

@@ -0,0 +1,112 @@
package promlib
import (
"context"
"encoding/json"
"errors"
"fmt"
"net/http"
"github.com/grafana/grafana-plugin-sdk-go/backend"
)
const (
KindPrometheus = "Prometheus"
KindMimir = "Mimir"
)
var (
ErrNoBuildInfo = errors.New("no build info")
)
type BuildInfoRequest struct {
PluginContext backend.PluginContext
}
type BuildInfoResponse struct {
Status string `json:"status"`
Data BuildInfoResponseData `json:"data"`
}
type BuildInfoResponseData struct {
Version string `json:"version"`
Revision string `json:"revision"`
Branch string `json:"branch"`
Features map[string]string `json:"features"`
BuildUser string `json:"buildUser"`
BuildDate string `json:"buildDate"`
GoVersion string `json:"goVersion"`
}
func (s *Service) GetBuildInfo(ctx context.Context, req BuildInfoRequest) (*BuildInfoResponse, error) {
ds, err := s.getInstance(ctx, req.PluginContext)
if err != nil {
return nil, err
}
return getBuildInfo(ctx, ds)
}
// getBuildInfo queries /api/v1/status/buildinfo
func getBuildInfo(ctx context.Context, i *instance) (*BuildInfoResponse, error) {
resp, err := i.resource.Execute(ctx, &backend.CallResourceRequest{
Path: "api/v1/status/buildinfo",
})
if err != nil {
return nil, err
}
if resp.Status == http.StatusNotFound {
return nil, ErrNoBuildInfo
}
if resp.Status != http.StatusOK {
return nil, fmt.Errorf("unexpected response %d", resp.Status)
}
res := BuildInfoResponse{}
if err := json.Unmarshal(resp.Body, &res); err != nil {
return nil, fmt.Errorf("failed to unmarshal JSON: %w", err)
}
return &res, nil
}
type HeuristicsRequest struct {
PluginContext backend.PluginContext
}
type Heuristics struct {
Application string `json:"application"`
Features Features `json:"features"`
}
type Features struct {
RulerApiEnabled bool `json:"rulerApiEnabled"`
}
func (s *Service) GetHeuristics(ctx context.Context, req HeuristicsRequest) (*Heuristics, error) {
ds, err := s.getInstance(ctx, req.PluginContext)
if err != nil {
return nil, err
}
return getHeuristics(ctx, ds)
}
func getHeuristics(ctx context.Context, i *instance) (*Heuristics, error) {
heuristics := Heuristics{
Application: "unknown",
Features: Features{
RulerApiEnabled: false,
},
}
buildInfo, err := getBuildInfo(ctx, i)
if err != nil {
logger.Warn("Failed to get prometheus buildinfo", "err", err.Error())
return nil, fmt.Errorf("failed to get buildinfo: %w", err)
}
if len(buildInfo.Data.Features) == 0 {
// If there are no features then this is a Prometheus datasource
heuristics.Application = KindPrometheus
heuristics.Features.RulerApiEnabled = false
} else {
heuristics.Application = KindMimir
heuristics.Features.RulerApiEnabled = true
}
return &heuristics, nil
}

View File

@@ -0,0 +1,88 @@
package promlib
import (
"context"
"io"
"net/http"
"strconv"
"strings"
"testing"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
"github.com/grafana/grafana-plugin-sdk-go/backend"
"github.com/grafana/grafana-plugin-sdk-go/backend/datasource"
sdkhttpclient "github.com/grafana/grafana-plugin-sdk-go/backend/httpclient"
)
type heuristicsSuccessRoundTripper struct {
res io.ReadCloser
status int
}
func (rt *heuristicsSuccessRoundTripper) RoundTrip(req *http.Request) (*http.Response, error) {
return &http.Response{
Status: strconv.Itoa(rt.status),
StatusCode: rt.status,
Header: nil,
Body: rt.res,
ContentLength: 0,
Request: req,
}, nil
}
func newHeuristicsSDKProvider(hrt heuristicsSuccessRoundTripper) *sdkhttpclient.Provider {
anotherFN := func(o sdkhttpclient.Options, next http.RoundTripper) http.RoundTripper {
return &hrt
}
fn := sdkhttpclient.MiddlewareFunc(anotherFN)
mid := sdkhttpclient.NamedMiddlewareFunc("mock", fn)
return sdkhttpclient.NewProvider(sdkhttpclient.ProviderOptions{Middlewares: []sdkhttpclient.Middleware{mid}})
}
func mockExtendClientOpts(ctx context.Context, settings backend.DataSourceInstanceSettings, clientOpts *sdkhttpclient.Options) error {
return nil
}
func Test_GetHeuristics(t *testing.T) {
t.Run("should return Prometheus", func(t *testing.T) {
rt := heuristicsSuccessRoundTripper{
res: io.NopCloser(strings.NewReader("{\"status\":\"success\",\"data\":{\"version\":\"1.0\"}}")),
status: http.StatusOK,
}
httpProvider := newHeuristicsSDKProvider(rt)
s := &Service{
im: datasource.NewInstanceManager(newInstanceSettings(httpProvider, backend.NewLoggerWith("logger", "test"), mockExtendClientOpts)),
}
req := HeuristicsRequest{
PluginContext: getPluginContext(),
}
res, err := s.GetHeuristics(context.Background(), req)
assert.NoError(t, err)
require.NotNil(t, res)
assert.Equal(t, KindPrometheus, res.Application)
assert.Equal(t, Features{RulerApiEnabled: false}, res.Features)
})
t.Run("should return Mimir", func(t *testing.T) {
rt := heuristicsSuccessRoundTripper{
res: io.NopCloser(strings.NewReader("{\"status\":\"success\",\"data\":{\"features\":{\"foo\":\"bar\"},\"version\":\"1.0\"}}")),
status: http.StatusOK,
}
httpProvider := newHeuristicsSDKProvider(rt)
s := &Service{
im: datasource.NewInstanceManager(newInstanceSettings(httpProvider, backend.NewLoggerWith("logger", "test"), mockExtendClientOpts)),
}
req := HeuristicsRequest{
PluginContext: getPluginContext(),
}
res, err := s.GetHeuristics(context.Background(), req)
assert.NoError(t, err)
require.NotNil(t, res)
assert.Equal(t, KindMimir, res.Application)
assert.Equal(t, Features{RulerApiEnabled: true}, res.Features)
})
}

View File

@@ -0,0 +1,87 @@
package instrumentation
import (
"github.com/grafana/grafana-plugin-sdk-go/backend"
"github.com/prometheus/client_golang/prometheus"
"github.com/prometheus/client_golang/prometheus/promauto"
)
var (
pluginRequestCounter = promauto.NewCounterVec(prometheus.CounterOpts{
Namespace: "grafana",
Name: "prometheus_plugin_backend_request_count",
Help: "The total amount of prometheus backend plugin requests",
}, []string{"endpoint", "status", "errorSource"})
)
const (
StatusOK = "ok"
StatusError = "error"
EndpointCallResource = "callResource"
EndpointQueryData = "queryData"
PluginSource = "plugin"
ExternalSource = "external"
DatabaseSource = "database"
NoneSource = "none"
)
func UpdateQueryDataMetrics(err error, resp *backend.QueryDataResponse) {
status := StatusOK
if err != nil {
status = StatusError
}
errorSource := getErrorSource(err, resp)
pluginRequestCounter.WithLabelValues(EndpointQueryData, status, errorSource).Inc()
}
func getErrorSource(err error, resp *backend.QueryDataResponse) string {
if err != nil {
return PluginSource
}
// If there is different errorSource from the list of responses, we want to return the most severe one.
// The priority order is: pluginSource > databaseSource > externalSource > noneSource
var errorSource = NoneSource
for _, res := range resp.Responses {
responseErrorSource := getErrorSourceForResponse(res)
if responseErrorSource == PluginSource {
return PluginSource
}
if responseErrorSource == DatabaseSource {
errorSource = DatabaseSource
}
if responseErrorSource == ExternalSource && errorSource == NoneSource {
errorSource = ExternalSource
}
}
return errorSource
}
func getErrorSourceForResponse(res backend.DataResponse) string {
if res.Error != nil {
return PluginSource
}
if res.Status >= 500 {
return DatabaseSource
}
if res.Status >= 400 {
// Those error codes are related to authentication and authorization.
if res.Status == 401 || res.Status == 402 || res.Status == 403 || res.Status == 407 {
return ExternalSource
}
return PluginSource
}
return NoneSource
}

View File

@@ -0,0 +1,117 @@
package instrumentation
import (
"fmt"
"testing"
"github.com/grafana/grafana-plugin-sdk-go/backend"
)
func checkErrorSource(t *testing.T, expected, actual string) {
t.Helper()
if expected != actual {
t.Errorf("expected errorSource to be %v, but got %v", expected, actual)
}
}
func TestGetErrorSourceForResponse(t *testing.T) {
t.Run("A response that return an error should return pluginSource", func(t *testing.T) {
errorSource := getErrorSourceForResponse(backend.DataResponse{Error: fmt.Errorf("error")})
checkErrorSource(t, PluginSource, errorSource)
})
t.Run("A response with an http satus code > 500 should return databaseSource", func(t *testing.T) {
errorSource := getErrorSourceForResponse(backend.DataResponse{Error: nil, Status: 500})
checkErrorSource(t, DatabaseSource, errorSource)
errorSource = getErrorSourceForResponse(backend.DataResponse{Error: nil, Status: 503})
checkErrorSource(t, DatabaseSource, errorSource)
errorSource = getErrorSourceForResponse(backend.DataResponse{Error: nil, Status: 507})
checkErrorSource(t, DatabaseSource, errorSource)
})
t.Run("A response with an http satus related to auth (401, 402, 403, 407), should return externalSource", func(t *testing.T) {
errorSource := getErrorSourceForResponse(backend.DataResponse{Error: nil, Status: 401})
checkErrorSource(t, ExternalSource, errorSource)
errorSource = getErrorSourceForResponse(backend.DataResponse{Error: nil, Status: 402})
checkErrorSource(t, ExternalSource, errorSource)
errorSource = getErrorSourceForResponse(backend.DataResponse{Error: nil, Status: 403})
checkErrorSource(t, ExternalSource, errorSource)
errorSource = getErrorSourceForResponse(backend.DataResponse{Error: nil, Status: 407})
checkErrorSource(t, ExternalSource, errorSource)
})
t.Run("A response with an http satus of 4xx but not related to auth (401, 402, 403, 407), should return pluginSource", func(t *testing.T) {
errorSource := getErrorSourceForResponse(backend.DataResponse{Error: nil, Status: 400})
checkErrorSource(t, PluginSource, errorSource)
errorSource = getErrorSourceForResponse(backend.DataResponse{Error: nil, Status: 404})
checkErrorSource(t, PluginSource, errorSource)
errorSource = getErrorSourceForResponse(backend.DataResponse{Error: nil, Status: 405})
checkErrorSource(t, PluginSource, errorSource)
})
t.Run("A response without error and with an http status of 2xx, should return noneSource", func(t *testing.T) {
errorSource := getErrorSourceForResponse(backend.DataResponse{Error: nil, Status: 200})
checkErrorSource(t, NoneSource, errorSource)
errorSource = getErrorSourceForResponse(backend.DataResponse{Error: nil, Status: 201})
checkErrorSource(t, NoneSource, errorSource)
})
}
func TestGetErrorSource(t *testing.T) {
t.Run("If status of backend.QueryDataResponse is statusError, then errorSource is pluginSource ", func(t *testing.T) {
errorSource := getErrorSource(fmt.Errorf("a random error"), nil)
checkErrorSource(t, PluginSource, errorSource)
})
t.Run("If status of backend.QueryDataResponse is statusOK, then errorSource is the most severe response's errorSource: pluginSource > databaseSource > externalSource > noneSource", func(t *testing.T) {
errorSource := getErrorSource(nil, &backend.QueryDataResponse{
Responses: map[string]backend.DataResponse{
"A": {Error: fmt.Errorf("error")},
"B": {Error: nil, Status: 200},
},
})
checkErrorSource(t, PluginSource, errorSource)
errorSource = getErrorSource(nil, &backend.QueryDataResponse{
Responses: map[string]backend.DataResponse{
"A": {Error: nil, Status: 400},
"B": {Error: nil, Status: 500},
"C": {Error: nil, Status: 401},
"D": {Error: nil, Status: 200},
},
})
checkErrorSource(t, PluginSource, errorSource)
errorSource = getErrorSource(nil, &backend.QueryDataResponse{
Responses: map[string]backend.DataResponse{
"B": {Error: nil, Status: 500},
"C": {Error: nil, Status: 401},
"D": {Error: nil, Status: 200},
},
})
checkErrorSource(t, DatabaseSource, errorSource)
errorSource = getErrorSource(nil, &backend.QueryDataResponse{
Responses: map[string]backend.DataResponse{
"C": {Error: nil, Status: 401},
"D": {Error: nil, Status: 200},
},
})
checkErrorSource(t, ExternalSource, errorSource)
errorSource = getErrorSource(nil, &backend.QueryDataResponse{
Responses: map[string]backend.DataResponse{
"D": {Error: nil, Status: 200},
},
})
checkErrorSource(t, NoneSource, errorSource)
})
}

View File

@@ -0,0 +1,74 @@
// Package intervalv2 partially copied from https://github.com/grafana/grafana/blob/main/pkg/tsdb/intervalv2/intervalv2.go
package intervalv2
import (
"time"
"github.com/grafana/grafana-plugin-sdk-go/backend"
"github.com/grafana/grafana-plugin-sdk-go/backend/gtime"
)
var (
DefaultRes int64 = 1500
defaultMinInterval = time.Millisecond * 1
)
type Interval struct {
Text string
Value time.Duration
}
type intervalCalculator struct {
minInterval time.Duration
}
type Calculator interface {
Calculate(timerange backend.TimeRange, minInterval time.Duration, maxDataPoints int64) Interval
CalculateSafeInterval(timerange backend.TimeRange, resolution int64) Interval
}
type CalculatorOptions struct {
MinInterval time.Duration
}
func NewCalculator(opts ...CalculatorOptions) *intervalCalculator {
calc := &intervalCalculator{}
for _, o := range opts {
if o.MinInterval == 0 {
calc.minInterval = defaultMinInterval
} else {
calc.minInterval = o.MinInterval
}
}
return calc
}
func (ic *intervalCalculator) Calculate(timerange backend.TimeRange, minInterval time.Duration, maxDataPoints int64) Interval {
to := timerange.To.UnixNano()
from := timerange.From.UnixNano()
resolution := maxDataPoints
if resolution == 0 {
resolution = DefaultRes
}
calculatedInterval := time.Duration((to - from) / resolution)
if calculatedInterval < minInterval {
return Interval{Text: gtime.FormatInterval(minInterval), Value: minInterval}
}
rounded := gtime.RoundInterval(calculatedInterval)
return Interval{Text: gtime.FormatInterval(rounded), Value: rounded}
}
func (ic *intervalCalculator) CalculateSafeInterval(timerange backend.TimeRange, safeRes int64) Interval {
to := timerange.To.UnixNano()
from := timerange.From.UnixNano()
safeInterval := time.Duration((to - from) / safeRes)
rounded := gtime.RoundInterval(safeInterval)
return Interval{Text: gtime.FormatInterval(rounded), Value: rounded}
}

View File

@@ -0,0 +1,63 @@
package intervalv2
import (
"testing"
"time"
"github.com/grafana/grafana-plugin-sdk-go/backend"
"github.com/stretchr/testify/assert"
)
func TestIntervalCalculator_Calculate(t *testing.T) {
calculator := NewCalculator(CalculatorOptions{})
timeNow := time.Now()
testCases := []struct {
name string
timeRange backend.TimeRange
resolution int64
expected string
}{
{"from 5m to now and default resolution", backend.TimeRange{From: timeNow, To: timeNow.Add(5 * time.Minute)}, 0, "200ms"},
{"from 5m to now and 500 resolution", backend.TimeRange{From: timeNow, To: timeNow.Add(5 * time.Minute)}, 500, "500ms"},
{"from 15m to now and default resolution", backend.TimeRange{From: timeNow, To: timeNow.Add(15 * time.Minute)}, 0, "500ms"},
{"from 15m to now and 100 resolution", backend.TimeRange{From: timeNow, To: timeNow.Add(15 * time.Minute)}, 100, "10s"},
{"from 30m to now and default resolution", backend.TimeRange{From: timeNow, To: timeNow.Add(30 * time.Minute)}, 0, "1s"},
{"from 30m to now and 3000 resolution", backend.TimeRange{From: timeNow, To: timeNow.Add(30 * time.Minute)}, 3000, "500ms"},
{"from 1h to now and default resolution", backend.TimeRange{From: timeNow, To: timeNow.Add(time.Hour)}, 0, "2s"},
{"from 1h to now and 1000 resolution", backend.TimeRange{From: timeNow, To: timeNow.Add(time.Hour)}, 1000, "5s"},
}
for _, tc := range testCases {
t.Run(tc.name, func(t *testing.T) {
interval := calculator.Calculate(tc.timeRange, time.Millisecond*1, tc.resolution)
assert.Equal(t, tc.expected, interval.Text)
})
}
}
func TestIntervalCalculator_CalculateSafeInterval(t *testing.T) {
calculator := NewCalculator(CalculatorOptions{})
timeNow := time.Now()
testCases := []struct {
name string
timeRange backend.TimeRange
safeResolution int64
expected string
}{
{"from 5m to now", backend.TimeRange{From: timeNow, To: timeNow.Add(5 * time.Minute)}, 11000, "20ms"},
{"from 15m to now", backend.TimeRange{From: timeNow, To: timeNow.Add(15 * time.Minute)}, 11000, "100ms"},
{"from 30m to now", backend.TimeRange{From: timeNow, To: timeNow.Add(30 * time.Minute)}, 11000, "200ms"},
{"from 24h to now", backend.TimeRange{From: timeNow, To: timeNow.Add(1440 * time.Minute)}, 11000, "10s"},
}
for _, tc := range testCases {
t.Run(tc.name, func(t *testing.T) {
interval := calculator.CalculateSafeInterval(tc.timeRange, tc.safeResolution)
assert.Equal(t, tc.expected, interval.Text)
})
}
}

154
pkg/promlib/library.go Normal file
View File

@@ -0,0 +1,154 @@
package promlib
import (
"context"
"errors"
"fmt"
"strings"
"time"
"github.com/grafana/grafana-plugin-sdk-go/backend"
"github.com/grafana/grafana-plugin-sdk-go/backend/datasource"
sdkhttpclient "github.com/grafana/grafana-plugin-sdk-go/backend/httpclient"
"github.com/grafana/grafana-plugin-sdk-go/backend/instancemgmt"
"github.com/grafana/grafana-plugin-sdk-go/backend/log"
"github.com/patrickmn/go-cache"
apiv1 "github.com/prometheus/client_golang/api/prometheus/v1"
"github.com/grafana/grafana/pkg/promlib/client"
"github.com/grafana/grafana/pkg/promlib/instrumentation"
"github.com/grafana/grafana/pkg/promlib/querydata"
"github.com/grafana/grafana/pkg/promlib/resource"
)
type Service struct {
im instancemgmt.InstanceManager
logger log.Logger
}
type instance struct {
queryData *querydata.QueryData
resource *resource.Resource
versionCache *cache.Cache
}
type ExtendOptions func(ctx context.Context, settings backend.DataSourceInstanceSettings, clientOpts *sdkhttpclient.Options) error
func NewService(httpClientProvider *sdkhttpclient.Provider, plog log.Logger, extendOptions ExtendOptions) *Service {
if httpClientProvider == nil {
httpClientProvider = sdkhttpclient.NewProvider()
}
return &Service{
im: datasource.NewInstanceManager(newInstanceSettings(httpClientProvider, plog, extendOptions)),
logger: plog,
}
}
func newInstanceSettings(httpClientProvider *sdkhttpclient.Provider, log log.Logger, extendOptions ExtendOptions) datasource.InstanceFactoryFunc {
return func(ctx context.Context, settings backend.DataSourceInstanceSettings) (instancemgmt.Instance, error) {
// Creates a http roundTripper.
opts, err := client.CreateTransportOptions(ctx, settings, log)
if err != nil {
return nil, fmt.Errorf("error creating transport options: %v", err)
}
err = extendOptions(ctx, settings, opts)
if err != nil {
return nil, fmt.Errorf("error extending transport options: %v", err)
}
httpClient, err := httpClientProvider.New(*opts)
if err != nil {
return nil, fmt.Errorf("error creating http client: %v", err)
}
// New version using custom client and better response parsing
qd, err := querydata.New(httpClient, settings, log)
if err != nil {
return nil, err
}
// Resource call management using new custom client same as querydata
r, err := resource.New(httpClient, settings, log)
if err != nil {
return nil, err
}
return instance{
queryData: qd,
resource: r,
versionCache: cache.New(time.Minute*1, time.Minute*5),
}, nil
}
}
func (s *Service) QueryData(ctx context.Context, req *backend.QueryDataRequest) (*backend.QueryDataResponse, error) {
if len(req.Queries) == 0 {
err := fmt.Errorf("query contains no queries")
instrumentation.UpdateQueryDataMetrics(err, nil)
return &backend.QueryDataResponse{}, err
}
i, err := s.getInstance(ctx, req.PluginContext)
if err != nil {
instrumentation.UpdateQueryDataMetrics(err, nil)
return nil, err
}
qd, err := i.queryData.Execute(ctx, req)
instrumentation.UpdateQueryDataMetrics(err, qd)
return qd, err
}
func (s *Service) CallResource(ctx context.Context, req *backend.CallResourceRequest, sender backend.CallResourceResponseSender) error {
i, err := s.getInstance(ctx, req.PluginContext)
if err != nil {
return err
}
if strings.EqualFold(req.Path, "version-detect") {
versionObj, found := i.versionCache.Get("version")
if found {
return sender.Send(versionObj.(*backend.CallResourceResponse))
}
vResp, err := i.resource.DetectVersion(ctx, req)
if err != nil {
return err
}
i.versionCache.Set("version", vResp, cache.DefaultExpiration)
return sender.Send(vResp)
}
resp, err := i.resource.Execute(ctx, req)
if err != nil {
return err
}
return sender.Send(resp)
}
func (s *Service) getInstance(ctx context.Context, pluginCtx backend.PluginContext) (*instance, error) {
i, err := s.im.Get(ctx, pluginCtx)
if err != nil {
return nil, err
}
in := i.(instance)
return &in, nil
}
// IsAPIError returns whether err is or wraps a Prometheus error.
func IsAPIError(err error) bool {
// Check if the right error type is in err's chain.
var e *apiv1.Error
return errors.As(err, &e)
}
func ConvertAPIError(err error) error {
var e *apiv1.Error
if errors.As(err, &e) {
return fmt.Errorf("%s: %s", e.Msg, e.Detail)
}
return err
}

118
pkg/promlib/library_test.go Normal file
View File

@@ -0,0 +1,118 @@
package promlib
import (
"context"
"io"
"net/http"
"testing"
"time"
"github.com/grafana/grafana-plugin-sdk-go/backend"
sdkhttpclient "github.com/grafana/grafana-plugin-sdk-go/backend/httpclient"
"github.com/stretchr/testify/require"
)
type fakeSender struct{}
func (sender *fakeSender) Send(resp *backend.CallResourceResponse) error {
return nil
}
type fakeRoundtripper struct {
Req *http.Request
}
func (rt *fakeRoundtripper) RoundTrip(req *http.Request) (*http.Response, error) {
rt.Req = req
return &http.Response{
Status: "200",
StatusCode: 200,
Header: nil,
Body: nil,
ContentLength: 0,
}, nil
}
type fakeHTTPClientProvider struct {
sdkhttpclient.Provider
Roundtripper *fakeRoundtripper
}
func (provider *fakeHTTPClientProvider) New(opts ...sdkhttpclient.Options) (*http.Client, error) {
client := &http.Client{}
provider.Roundtripper = &fakeRoundtripper{}
client.Transport = provider.Roundtripper
return client, nil
}
func (provider *fakeHTTPClientProvider) GetTransport(opts ...sdkhttpclient.Options) (http.RoundTripper, error) {
return &fakeRoundtripper{}, nil
}
func getMockPromTestSDKProvider(f *fakeHTTPClientProvider) *sdkhttpclient.Provider {
anotherFN := func(o sdkhttpclient.Options, next http.RoundTripper) http.RoundTripper {
_, _ = f.New()
return f.Roundtripper
}
fn := sdkhttpclient.MiddlewareFunc(anotherFN)
mid := sdkhttpclient.NamedMiddlewareFunc("mock", fn)
return sdkhttpclient.NewProvider(sdkhttpclient.ProviderOptions{Middlewares: []sdkhttpclient.Middleware{mid}})
}
func mockExtendTransportOptions(ctx context.Context, settings backend.DataSourceInstanceSettings, clientOpts *sdkhttpclient.Options) error {
return nil
}
func TestService(t *testing.T) {
t.Run("Service", func(t *testing.T) {
t.Run("CallResource", func(t *testing.T) {
t.Run("creates correct request", func(t *testing.T) {
f := &fakeHTTPClientProvider{}
httpProvider := getMockPromTestSDKProvider(f)
service := NewService(httpProvider, backend.NewLoggerWith("logger", "test"), mockExtendTransportOptions)
req := &backend.CallResourceRequest{
PluginContext: backend.PluginContext{
OrgID: 0,
PluginID: "prometheus",
User: nil,
AppInstanceSettings: nil,
DataSourceInstanceSettings: &backend.DataSourceInstanceSettings{
ID: 0,
UID: "",
Type: "prometheus",
Name: "test-prom",
URL: "http://localhost:9090",
User: "",
Database: "",
BasicAuthEnabled: true,
BasicAuthUser: "admin",
Updated: time.Time{},
JSONData: []byte("{}"),
},
},
Path: "/api/v1/series",
Method: http.MethodPost,
URL: "/api/v1/series",
Body: []byte("match%5B%5D: ALERTS\nstart: 1655271408\nend: 1655293008"),
}
sender := &fakeSender{}
err := service.CallResource(context.Background(), req, sender)
require.NoError(t, err)
require.Equal(
t,
http.Header{
"Content-Type": {"application/x-www-form-urlencoded"},
"Idempotency-Key": []string(nil),
},
f.Roundtripper.Req.Header)
require.Equal(t, http.MethodPost, f.Roundtripper.Req.Method)
body, err := io.ReadAll(f.Roundtripper.Req.Body)
require.NoError(t, err)
require.Equal(t, []byte("match%5B%5D: ALERTS\nstart: 1655271408\nend: 1655293008"), body)
require.Equal(t, "http://localhost:9090/api/v1/series", f.Roundtripper.Req.URL.String())
})
})
})
}

View File

@@ -0,0 +1,57 @@
package middleware
import (
"net/http"
"net/url"
sdkhttpclient "github.com/grafana/grafana-plugin-sdk-go/backend/httpclient"
"github.com/grafana/grafana-plugin-sdk-go/backend/log"
)
const (
customQueryParametersMiddlewareName = "prom-custom-query-parameters"
customQueryParametersKey = "customQueryParameters"
grafanaDataKey = "grafanaData"
)
func CustomQueryParameters(logger log.Logger) sdkhttpclient.Middleware {
return sdkhttpclient.NamedMiddlewareFunc(customQueryParametersMiddlewareName, func(opts sdkhttpclient.Options, next http.RoundTripper) http.RoundTripper {
grafanaData, exists := opts.CustomOptions[grafanaDataKey]
if !exists {
return next
}
data, ok := grafanaData.(map[string]any)
if !ok {
return next
}
customQueryParamsVal, exists := data[customQueryParametersKey]
if !exists {
return next
}
customQueryParams, ok := customQueryParamsVal.(string)
if !ok || customQueryParams == "" {
return next
}
values, err := url.ParseQuery(customQueryParams)
if err != nil {
logger.Error("Failed to parse custom query parameters, skipping middleware", "error", err)
return next
}
return sdkhttpclient.RoundTripperFunc(func(req *http.Request) (*http.Response, error) {
q := req.URL.Query()
for k, keyValues := range values {
for _, value := range keyValues {
q.Add(k, value)
}
}
req.URL.RawQuery = q.Encode()
return next.RoundTrip(req)
})
})
}

View File

@@ -0,0 +1,170 @@
package middleware
import (
"net/http"
"net/url"
"strings"
"testing"
"github.com/grafana/grafana-plugin-sdk-go/backend"
"github.com/grafana/grafana-plugin-sdk-go/backend/httpclient"
"github.com/stretchr/testify/require"
)
func TestCustomQueryParametersMiddleware(t *testing.T) {
require.Equal(t, "customQueryParameters", customQueryParametersKey)
finalRoundTripper := httpclient.RoundTripperFunc(func(req *http.Request) (*http.Response, error) {
return &http.Response{StatusCode: http.StatusOK}, nil
})
t.Run("Without custom query parameters set should not apply middleware", func(t *testing.T) {
mw := CustomQueryParameters(backend.NewLoggerWith("logger", "test"))
rt := mw.CreateMiddleware(httpclient.Options{}, finalRoundTripper)
require.NotNil(t, rt)
middlewareName, ok := mw.(httpclient.MiddlewareName)
require.True(t, ok)
require.Equal(t, customQueryParametersMiddlewareName, middlewareName.MiddlewareName())
req, err := http.NewRequest(http.MethodGet, "http://test.com/query?hello=name", nil)
require.NoError(t, err)
res, err := rt.RoundTrip(req)
require.NoError(t, err)
require.NotNil(t, res)
if res.Body != nil {
require.NoError(t, res.Body.Close())
}
require.Equal(t, "http://test.com/query?hello=name", req.URL.String())
})
t.Run("Without custom query parameters set as string should not apply middleware", func(t *testing.T) {
mw := CustomQueryParameters(backend.NewLoggerWith("logger", "test"))
rt := mw.CreateMiddleware(httpclient.Options{
CustomOptions: map[string]any{
customQueryParametersKey: 64,
},
}, finalRoundTripper)
require.NotNil(t, rt)
middlewareName, ok := mw.(httpclient.MiddlewareName)
require.True(t, ok)
require.Equal(t, customQueryParametersMiddlewareName, middlewareName.MiddlewareName())
req, err := http.NewRequest(http.MethodGet, "http://test.com/query?hello=name", nil)
require.NoError(t, err)
res, err := rt.RoundTrip(req)
require.NoError(t, err)
require.NotNil(t, res)
if res.Body != nil {
require.NoError(t, res.Body.Close())
}
require.Equal(t, "http://test.com/query?hello=name", req.URL.String())
})
t.Run("With custom query parameters set as empty string should not apply middleware", func(t *testing.T) {
mw := CustomQueryParameters(backend.NewLoggerWith("logger", "test"))
rt := mw.CreateMiddleware(httpclient.Options{
CustomOptions: map[string]any{
customQueryParametersKey: "",
},
}, finalRoundTripper)
require.NotNil(t, rt)
middlewareName, ok := mw.(httpclient.MiddlewareName)
require.True(t, ok)
require.Equal(t, customQueryParametersMiddlewareName, middlewareName.MiddlewareName())
req, err := http.NewRequest(http.MethodGet, "http://test.com/query?hello=name", nil)
require.NoError(t, err)
res, err := rt.RoundTrip(req)
require.NoError(t, err)
require.NotNil(t, res)
if res.Body != nil {
require.NoError(t, res.Body.Close())
}
require.Equal(t, "http://test.com/query?hello=name", req.URL.String())
})
t.Run("With custom query parameters set as invalid query string should not apply middleware", func(t *testing.T) {
mw := CustomQueryParameters(backend.NewLoggerWith("logger", "test"))
rt := mw.CreateMiddleware(httpclient.Options{
CustomOptions: map[string]any{
customQueryParametersKey: "custom=%%abc&test=abc",
},
}, finalRoundTripper)
require.NotNil(t, rt)
middlewareName, ok := mw.(httpclient.MiddlewareName)
require.True(t, ok)
require.Equal(t, customQueryParametersMiddlewareName, middlewareName.MiddlewareName())
req, err := http.NewRequest(http.MethodGet, "http://test.com/query?hello=name", nil)
require.NoError(t, err)
res, err := rt.RoundTrip(req)
require.NoError(t, err)
require.NotNil(t, res)
if res.Body != nil {
require.NoError(t, res.Body.Close())
}
require.Equal(t, "http://test.com/query?hello=name", req.URL.String())
})
t.Run("With custom query parameters set should apply middleware for request URL containing query parameters ", func(t *testing.T) {
mw := CustomQueryParameters(backend.NewLoggerWith("logger", "test"))
rt := mw.CreateMiddleware(httpclient.Options{
CustomOptions: map[string]any{
grafanaDataKey: map[string]any{
customQueryParametersKey: "custom=par/am&second=f oo",
},
},
}, finalRoundTripper)
require.NotNil(t, rt)
middlewareName, ok := mw.(httpclient.MiddlewareName)
require.True(t, ok)
require.Equal(t, customQueryParametersMiddlewareName, middlewareName.MiddlewareName())
req, err := http.NewRequest(http.MethodGet, "http://test.com/query?hello=name", nil)
require.NoError(t, err)
res, err := rt.RoundTrip(req)
require.NoError(t, err)
require.NotNil(t, res)
if res.Body != nil {
require.NoError(t, res.Body.Close())
}
require.True(t, strings.HasPrefix(req.URL.String(), "http://test.com/query?"))
q := req.URL.Query()
require.Len(t, q, 3)
require.Equal(t, "name", url.QueryEscape(q.Get("hello")))
require.Equal(t, "par%2Fam", url.QueryEscape(q.Get("custom")))
require.Equal(t, "f+oo", url.QueryEscape(q.Get("second")))
})
t.Run("With custom query parameters set should apply middleware for request URL not containing query parameters", func(t *testing.T) {
mw := CustomQueryParameters(backend.NewLoggerWith("logger", "test"))
rt := mw.CreateMiddleware(httpclient.Options{
CustomOptions: map[string]any{
grafanaDataKey: map[string]any{
customQueryParametersKey: "custom=par/am&second=f oo",
},
},
}, finalRoundTripper)
require.NotNil(t, rt)
middlewareName, ok := mw.(httpclient.MiddlewareName)
require.True(t, ok)
require.Equal(t, customQueryParametersMiddlewareName, middlewareName.MiddlewareName())
req, err := http.NewRequest(http.MethodGet, "http://test.com/query", nil)
require.NoError(t, err)
res, err := rt.RoundTrip(req)
require.NoError(t, err)
require.NotNil(t, res)
if res.Body != nil {
require.NoError(t, res.Body.Close())
}
require.Equal(t, "http://test.com/query?custom=par%2Fam&second=f+oo", req.URL.String())
})
}

View File

@@ -0,0 +1,30 @@
package middleware
import (
"net/http"
sdkhttpclient "github.com/grafana/grafana-plugin-sdk-go/backend/httpclient"
"github.com/grafana/grafana-plugin-sdk-go/backend/log"
)
func ForceHttpGet(logger log.Logger) sdkhttpclient.Middleware {
return sdkhttpclient.NamedMiddlewareFunc("force-http-get", func(opts sdkhttpclient.Options, next http.RoundTripper) http.RoundTripper {
// the prometheus library we use does not allow us to set the http method.
// it's behavior is to first try POST, and if it fails in certain ways
// (for example, by returning a method-not-allowed error), it will try GET.
// so here, we check if the http-method is POST, and if it is, we
// return an artificial method-not-allowed response.
// this will cause the prometheus library to retry with GET.
return sdkhttpclient.RoundTripperFunc(func(req *http.Request) (*http.Response, error) {
if req.Method == http.MethodPost {
resp := &http.Response{
StatusCode: http.StatusMethodNotAllowed,
}
return resp, nil
}
return next.RoundTrip(req)
})
})
}

View File

@@ -0,0 +1,44 @@
package middleware
import (
"net/http"
"testing"
"github.com/grafana/grafana-plugin-sdk-go/backend"
"github.com/grafana/grafana-plugin-sdk-go/backend/httpclient"
"github.com/stretchr/testify/require"
)
func TestEnsureHttpMethodMiddleware(t *testing.T) {
t.Run("Name should be correct", func(t *testing.T) {
finalRoundTripper := httpclient.RoundTripperFunc(func(req *http.Request) (*http.Response, error) {
return &http.Response{StatusCode: http.StatusOK}, nil
})
mw := ForceHttpGet(backend.NewLoggerWith("logger", "test"))
rt := mw.CreateMiddleware(httpclient.Options{}, finalRoundTripper)
require.NotNil(t, rt)
middlewareName, ok := mw.(httpclient.MiddlewareName)
require.True(t, ok)
require.Equal(t, "force-http-get", middlewareName.MiddlewareName())
})
t.Run("Should force GET method", func(t *testing.T) {
finalRoundTripper := httpclient.RoundTripperFunc(func(req *http.Request) (*http.Response, error) {
return &http.Response{StatusCode: http.StatusOK}, nil
})
mw := ForceHttpGet(backend.NewLoggerWith("logger", "test"))
rt := mw.CreateMiddleware(httpclient.Options{}, finalRoundTripper)
require.NotNil(t, rt)
req, err := http.NewRequest(http.MethodPost, "http://example.com", nil)
require.NoError(t, err)
res, err := rt.RoundTrip(req)
require.NoError(t, err)
require.NotNil(t, res)
require.Equal(t, res.StatusCode, http.StatusMethodNotAllowed)
if res.Body != nil {
require.NoError(t, res.Body.Close())
}
})
}

361
pkg/promlib/models/query.go Normal file
View File

@@ -0,0 +1,361 @@
package models
import (
"encoding/json"
"fmt"
"math"
"strconv"
"strings"
"time"
"github.com/grafana/grafana-plugin-sdk-go/backend"
"github.com/grafana/grafana-plugin-sdk-go/backend/gtime"
"github.com/prometheus/prometheus/model/labels"
"github.com/prometheus/prometheus/promql/parser"
"github.com/grafana/grafana/pkg/promlib/intervalv2"
)
// PromQueryFormat defines model for PromQueryFormat.
// +enum
type PromQueryFormat string
const (
PromQueryFormatTimeSeries PromQueryFormat = "time_series"
PromQueryFormatTable PromQueryFormat = "table"
PromQueryFormatHeatmap PromQueryFormat = "heatmap"
)
// QueryEditorMode defines model for QueryEditorMode.
// +enum
type QueryEditorMode string
const (
QueryEditorModeBuilder QueryEditorMode = "builder"
QueryEditorModeCode QueryEditorMode = "code"
)
// PrometheusQueryProperties defines the specific properties used for prometheus
type PrometheusQueryProperties struct {
// The response format
Format PromQueryFormat `json:"format,omitempty"`
// The actual expression/query that will be evaluated by Prometheus
Expr string `json:"expr"`
// Returns a Range vector, comprised of a set of time series containing a range of data points over time for each time series
Range bool `json:"range,omitempty"`
// Returns only the latest value that Prometheus has scraped for the requested time series
Instant bool `json:"instant,omitempty"`
// Execute an additional query to identify interesting raw samples relevant for the given expr
Exemplar bool `json:"exemplar,omitempty"`
// what we should show in the editor
EditorMode QueryEditorMode `json:"editorMode,omitempty"`
// Used to specify how many times to divide max data points by. We use max data points under query options
// See https://github.com/grafana/grafana/issues/48081
// Deprecated: use interval
IntervalFactor int64 `json:"intervalFactor,omitempty"`
// Series name override or template. Ex. {{hostname}} will be replaced with label value for hostname
LegendFormat string `json:"legendFormat,omitempty"`
// ???
Scope *struct {
Matchers string `json:"matchers"`
} `json:"scope,omitempty"`
}
// Internal interval and range variables
const (
varInterval = "$__interval"
varIntervalMs = "$__interval_ms"
varRange = "$__range"
varRangeS = "$__range_s"
varRangeMs = "$__range_ms"
varRateInterval = "$__rate_interval"
varRateIntervalMs = "$__rate_interval_ms"
)
// Internal interval and range variables with {} syntax
// Repetitive code, we should have functionality to unify these
const (
varIntervalAlt = "${__interval}"
varIntervalMsAlt = "${__interval_ms}"
varRangeAlt = "${__range}"
varRangeSAlt = "${__range_s}"
varRangeMsAlt = "${__range_ms}"
varRateIntervalAlt = "${__rate_interval}"
varRateIntervalMsAlt = "${__rate_interval_ms}"
)
type TimeSeriesQueryType string
const (
RangeQueryType TimeSeriesQueryType = "range"
InstantQueryType TimeSeriesQueryType = "instant"
ExemplarQueryType TimeSeriesQueryType = "exemplar"
UnknownQueryType TimeSeriesQueryType = "unknown"
)
var safeResolution = 11000
// QueryModel includes both the common and specific values
type QueryModel struct {
PrometheusQueryProperties `json:",inline"`
CommonQueryProperties `json:",inline"`
// The following properties may be part of the request payload, however they are not saved in panel JSON
// Timezone offset to align start & end time on backend
UtcOffsetSec int64 `json:"utcOffsetSec,omitempty"`
Interval string `json:"interval,omitempty"`
}
// CommonQueryProperties is properties applied to all queries
// NOTE: this will soon be replaced with a struct from the SDK
type CommonQueryProperties struct {
RefId string `json:"refId,omitempty"`
IntervalMs int64 `json:"intervalMs,omitempty"`
}
type TimeRange struct {
Start time.Time
End time.Time
Step time.Duration
}
// The internal query object
type Query struct {
Expr string
Step time.Duration
LegendFormat string
Start time.Time
End time.Time
RefId string
InstantQuery bool
RangeQuery bool
ExemplarQuery bool
UtcOffsetSec int64
Scope Scope
}
type Scope struct {
Matchers []*labels.Matcher
}
func Parse(query backend.DataQuery, dsScrapeInterval string, intervalCalculator intervalv2.Calculator, fromAlert bool, enableScope bool) (*Query, error) {
model := &QueryModel{}
if err := json.Unmarshal(query.JSON, model); err != nil {
return nil, err
}
// Final step value for prometheus
calculatedStep, err := calculatePrometheusInterval(model.Interval, dsScrapeInterval, model.IntervalMs, model.IntervalFactor, query, intervalCalculator)
if err != nil {
return nil, err
}
// Interpolate variables in expr
timeRange := query.TimeRange.To.Sub(query.TimeRange.From)
expr := interpolateVariables(
model.Expr,
query.Interval,
calculatedStep,
model.Interval,
dsScrapeInterval,
timeRange,
)
var matchers []*labels.Matcher
if enableScope && model.Scope != nil && model.Scope.Matchers != "" {
matchers, err = parser.ParseMetricSelector(model.Scope.Matchers)
if err != nil {
return nil, fmt.Errorf("failed to parse metric selector %v in scope", model.Scope.Matchers)
}
expr, err = ApplyQueryScope(expr, matchers)
if err != nil {
return nil, err
}
}
if !model.Instant && !model.Range {
// In older dashboards, we were not setting range query param and !range && !instant was run as range query
model.Range = true
}
// We never want to run exemplar query for alerting
if fromAlert {
model.Exemplar = false
}
return &Query{
Expr: expr,
Step: calculatedStep,
LegendFormat: model.LegendFormat,
Start: query.TimeRange.From,
End: query.TimeRange.To,
RefId: query.RefID,
InstantQuery: model.Instant,
RangeQuery: model.Range,
ExemplarQuery: model.Exemplar,
UtcOffsetSec: model.UtcOffsetSec,
}, nil
}
func (query *Query) Type() TimeSeriesQueryType {
if query.InstantQuery {
return InstantQueryType
}
if query.RangeQuery {
return RangeQueryType
}
if query.ExemplarQuery {
return ExemplarQueryType
}
return UnknownQueryType
}
func (query *Query) TimeRange() TimeRange {
return TimeRange{
Step: query.Step,
// Align query range to step. It rounds start and end down to a multiple of step.
Start: AlignTimeRange(query.Start, query.Step, query.UtcOffsetSec),
End: AlignTimeRange(query.End, query.Step, query.UtcOffsetSec),
}
}
func calculatePrometheusInterval(
queryInterval, dsScrapeInterval string,
intervalMs, intervalFactor int64,
query backend.DataQuery,
intervalCalculator intervalv2.Calculator,
) (time.Duration, error) {
// we need to compare the original query model after it is overwritten below to variables so that we can
// calculate the rateInterval if it is equal to $__rate_interval or ${__rate_interval}
originalQueryInterval := queryInterval
// If we are using variable for interval/step, we will replace it with calculated interval
if isVariableInterval(queryInterval) {
queryInterval = ""
}
minInterval, err := gtime.GetIntervalFrom(dsScrapeInterval, queryInterval, intervalMs, 15*time.Second)
if err != nil {
return time.Duration(0), err
}
calculatedInterval := intervalCalculator.Calculate(query.TimeRange, minInterval, query.MaxDataPoints)
safeInterval := intervalCalculator.CalculateSafeInterval(query.TimeRange, int64(safeResolution))
adjustedInterval := safeInterval.Value
if calculatedInterval.Value > safeInterval.Value {
adjustedInterval = calculatedInterval.Value
}
// here is where we compare for $__rate_interval or ${__rate_interval}
if originalQueryInterval == varRateInterval || originalQueryInterval == varRateIntervalAlt {
// Rate interval is final and is not affected by resolution
return calculateRateInterval(adjustedInterval, dsScrapeInterval), nil
} else {
queryIntervalFactor := intervalFactor
if queryIntervalFactor == 0 {
queryIntervalFactor = 1
}
return time.Duration(int64(adjustedInterval) * queryIntervalFactor), nil
}
}
// calculateRateInterval calculates the $__rate_interval value
// queryInterval is the value calculated range / maxDataPoints on the frontend
// queryInterval is shown on the Query Options Panel above the query editor
// requestedMinStep is the data source scrape interval (default 15s)
// requestedMinStep can be changed by setting "Min Step" value in Options panel below the code editor
func calculateRateInterval(
queryInterval time.Duration,
requestedMinStep string,
) time.Duration {
scrape := requestedMinStep
if scrape == "" {
scrape = "15s"
}
scrapeIntervalDuration, err := gtime.ParseIntervalStringToTimeDuration(scrape)
if err != nil {
return time.Duration(0)
}
rateInterval := time.Duration(int64(math.Max(float64(queryInterval+scrapeIntervalDuration), float64(4)*float64(scrapeIntervalDuration))))
return rateInterval
}
// interpolateVariables interpolates built-in variables
// expr PromQL query
// queryInterval Requested interval in milliseconds. This value may be overridden by MinStep in query options
// calculatedStep Calculated final step value. It was calculated in calculatePrometheusInterval
// requestedMinStep Requested minimum step value. QueryModel.interval
// dsScrapeInterval Data source scrape interval in the config
// timeRange Requested time range for query
func interpolateVariables(
expr string,
queryInterval time.Duration,
calculatedStep time.Duration,
requestedMinStep string,
dsScrapeInterval string,
timeRange time.Duration,
) string {
rangeMs := timeRange.Milliseconds()
rangeSRounded := int64(math.Round(float64(rangeMs) / 1000.0))
var rateInterval time.Duration
if requestedMinStep == varRateInterval || requestedMinStep == varRateIntervalAlt {
rateInterval = calculatedStep
} else {
if requestedMinStep == varInterval || requestedMinStep == varIntervalAlt {
requestedMinStep = calculatedStep.String()
}
if requestedMinStep == "" {
requestedMinStep = dsScrapeInterval
}
rateInterval = calculateRateInterval(queryInterval, requestedMinStep)
}
expr = strings.ReplaceAll(expr, varIntervalMs, strconv.FormatInt(int64(calculatedStep/time.Millisecond), 10))
expr = strings.ReplaceAll(expr, varInterval, gtime.FormatInterval(calculatedStep))
expr = strings.ReplaceAll(expr, varRangeMs, strconv.FormatInt(rangeMs, 10))
expr = strings.ReplaceAll(expr, varRangeS, strconv.FormatInt(rangeSRounded, 10))
expr = strings.ReplaceAll(expr, varRange, strconv.FormatInt(rangeSRounded, 10)+"s")
expr = strings.ReplaceAll(expr, varRateIntervalMs, strconv.FormatInt(int64(rateInterval/time.Millisecond), 10))
expr = strings.ReplaceAll(expr, varRateInterval, rateInterval.String())
// Repetitive code, we should have functionality to unify these
expr = strings.ReplaceAll(expr, varIntervalMsAlt, strconv.FormatInt(int64(calculatedStep/time.Millisecond), 10))
expr = strings.ReplaceAll(expr, varIntervalAlt, gtime.FormatInterval(calculatedStep))
expr = strings.ReplaceAll(expr, varRangeMsAlt, strconv.FormatInt(rangeMs, 10))
expr = strings.ReplaceAll(expr, varRangeSAlt, strconv.FormatInt(rangeSRounded, 10))
expr = strings.ReplaceAll(expr, varRangeAlt, strconv.FormatInt(rangeSRounded, 10)+"s")
expr = strings.ReplaceAll(expr, varRateIntervalMsAlt, strconv.FormatInt(int64(rateInterval/time.Millisecond), 10))
expr = strings.ReplaceAll(expr, varRateIntervalAlt, rateInterval.String())
return expr
}
func isVariableInterval(interval string) bool {
if interval == varInterval || interval == varIntervalMs || interval == varRateInterval || interval == varRateIntervalMs {
return true
}
// Repetitive code, we should have functionality to unify these
if interval == varIntervalAlt || interval == varIntervalMsAlt || interval == varRateIntervalAlt || interval == varRateIntervalMsAlt {
return true
}
return false
}
// AlignTimeRange aligns query range to step and handles the time offset.
// It rounds start and end down to a multiple of step.
// Prometheus caching is dependent on the range being aligned with the step.
// Rounding to the step can significantly change the start and end of the range for larger steps, i.e. a week.
// In rounding the range to a 1w step the range will always start on a Thursday.
func AlignTimeRange(t time.Time, step time.Duration, offset int64) time.Time {
offsetNano := float64(offset * 1e9)
stepNano := float64(step.Nanoseconds())
return time.Unix(0, int64(math.Floor((float64(t.UnixNano())+offsetNano)/stepNano)*stepNano-offsetNano)).UTC()
}

View File

@@ -0,0 +1,784 @@
package models_test
import (
"fmt"
"reflect"
"testing"
"time"
"github.com/grafana/grafana-plugin-sdk-go/backend"
"github.com/stretchr/testify/require"
"github.com/grafana/grafana/pkg/promlib/intervalv2"
"github.com/grafana/grafana/pkg/promlib/models"
)
var (
now = time.Now()
intervalCalculator = intervalv2.NewCalculator()
)
func TestParse(t *testing.T) {
t.Run("parsing query from unified alerting", func(t *testing.T) {
timeRange := backend.TimeRange{
From: now,
To: now.Add(12 * time.Hour),
}
queryJson := `{
"expr": "go_goroutines",
"refId": "A",
"exemplar": true
}`
q := backend.DataQuery{
JSON: []byte(queryJson),
TimeRange: timeRange,
RefID: "A",
}
res, err := models.Parse(q, "15s", intervalCalculator, true, false)
require.NoError(t, err)
require.Equal(t, false, res.ExemplarQuery)
})
t.Run("parsing query model with step", func(t *testing.T) {
timeRange := backend.TimeRange{
From: now,
To: now.Add(12 * time.Hour),
}
q := queryContext(`{
"expr": "go_goroutines",
"format": "time_series",
"refId": "A"
}`, timeRange, time.Duration(1)*time.Minute)
res, err := models.Parse(q, "15s", intervalCalculator, false, false)
require.NoError(t, err)
require.Equal(t, time.Second*30, res.Step)
})
t.Run("parsing query model without step parameter", func(t *testing.T) {
timeRange := backend.TimeRange{
From: now,
To: now.Add(1 * time.Hour),
}
q := queryContext(`{
"expr": "go_goroutines",
"format": "time_series",
"intervalFactor": 1,
"refId": "A"
}`, timeRange, time.Duration(1)*time.Minute)
res, err := models.Parse(q, "15s", intervalCalculator, false, false)
require.NoError(t, err)
require.Equal(t, time.Second*15, res.Step)
})
t.Run("parsing query model with high intervalFactor", func(t *testing.T) {
timeRange := backend.TimeRange{
From: now,
To: now.Add(48 * time.Hour),
}
q := queryContext(`{
"expr": "go_goroutines",
"format": "time_series",
"intervalFactor": 10,
"refId": "A"
}`, timeRange, time.Duration(1)*time.Minute)
res, err := models.Parse(q, "15s", intervalCalculator, false, false)
require.NoError(t, err)
require.Equal(t, time.Minute*20, res.Step)
})
t.Run("parsing query model with low intervalFactor", func(t *testing.T) {
timeRange := backend.TimeRange{
From: now,
To: now.Add(48 * time.Hour),
}
q := queryContext(`{
"expr": "go_goroutines",
"format": "time_series",
"intervalFactor": 1,
"refId": "A"
}`, timeRange, time.Duration(1)*time.Minute)
res, err := models.Parse(q, "15s", intervalCalculator, false, false)
require.NoError(t, err)
require.Equal(t, time.Minute*2, res.Step)
})
t.Run("parsing query model specified scrape-interval in the data source", func(t *testing.T) {
timeRange := backend.TimeRange{
From: now,
To: now.Add(48 * time.Hour),
}
q := queryContext(`{
"expr": "go_goroutines",
"format": "time_series",
"intervalFactor": 1,
"refId": "A"
}`, timeRange, time.Duration(1)*time.Minute)
res, err := models.Parse(q, "240s", intervalCalculator, false, false)
require.NoError(t, err)
require.Equal(t, time.Minute*4, res.Step)
})
t.Run("parsing query model with $__interval variable", func(t *testing.T) {
timeRange := backend.TimeRange{
From: now,
To: now.Add(48 * time.Hour),
}
q := queryContext(`{
"expr": "rate(ALERTS{job=\"test\" [$__interval]})",
"format": "time_series",
"intervalFactor": 1,
"intervalMs": 60000,
"refId": "A"
}`, timeRange, time.Duration(1)*time.Minute)
res, err := models.Parse(q, "15s", intervalCalculator, false, false)
require.NoError(t, err)
require.Equal(t, "rate(ALERTS{job=\"test\" [2m]})", res.Expr)
require.Equal(t, 120*time.Second, res.Step)
})
t.Run("parsing query model with ${__interval} variable", func(t *testing.T) {
timeRange := backend.TimeRange{
From: now,
To: now.Add(48 * time.Hour),
}
q := queryContext(`{
"expr": "rate(ALERTS{job=\"test\" [${__interval}]})",
"format": "time_series",
"intervalFactor": 1,
"interval": "1m",
"intervalMs": 60000,
"refId": "A"
}`, timeRange, time.Duration(1)*time.Minute)
res, err := models.Parse(q, "15s", intervalCalculator, false, false)
require.NoError(t, err)
require.Equal(t, "rate(ALERTS{job=\"test\" [2m]})", res.Expr)
})
t.Run("parsing query model with $__interval_ms variable", func(t *testing.T) {
timeRange := backend.TimeRange{
From: now,
To: now.Add(48 * time.Hour),
}
q := queryContext(`{
"expr": "rate(ALERTS{job=\"test\" [$__interval_ms]})",
"format": "time_series",
"intervalFactor": 1,
"intervalMs": 60000,
"refId": "A"
}`, timeRange, time.Duration(1)*time.Minute)
res, err := models.Parse(q, "15s", intervalCalculator, false, false)
require.NoError(t, err)
require.Equal(t, "rate(ALERTS{job=\"test\" [120000]})", res.Expr)
})
t.Run("parsing query model with $__interval_ms and $__interval variable", func(t *testing.T) {
timeRange := backend.TimeRange{
From: now,
To: now.Add(48 * time.Hour),
}
q := queryContext(`{
"expr": "rate(ALERTS{job=\"test\" [$__interval_ms]}) + rate(ALERTS{job=\"test\" [$__interval]})",
"format": "time_series",
"intervalFactor": 1,
"intervalMs": 60000,
"refId": "A"
}`, timeRange, time.Duration(1)*time.Minute)
res, err := models.Parse(q, "15s", intervalCalculator, false, false)
require.NoError(t, err)
require.Equal(t, "rate(ALERTS{job=\"test\" [120000]}) + rate(ALERTS{job=\"test\" [2m]})", res.Expr)
})
t.Run("parsing query model with ${__interval_ms} and ${__interval} variable", func(t *testing.T) {
timeRange := backend.TimeRange{
From: now,
To: now.Add(48 * time.Hour),
}
q := queryContext(`{
"expr": "rate(ALERTS{job=\"test\" [${__interval_ms}]}) + rate(ALERTS{job=\"test\" [${__interval}]})",
"format": "time_series",
"intervalFactor": 1,
"intervalMs": 60000,
"refId": "A"
}`, timeRange, time.Duration(1)*time.Minute)
res, err := models.Parse(q, "15s", intervalCalculator, false, false)
require.NoError(t, err)
require.Equal(t, "rate(ALERTS{job=\"test\" [120000]}) + rate(ALERTS{job=\"test\" [2m]})", res.Expr)
})
t.Run("parsing query model with $__range variable", func(t *testing.T) {
timeRange := backend.TimeRange{
From: now,
To: now.Add(48 * time.Hour),
}
q := queryContext(`{
"expr": "rate(ALERTS{job=\"test\" [$__range]})",
"format": "time_series",
"intervalFactor": 1,
"refId": "A"
}`, timeRange, time.Duration(1)*time.Minute)
res, err := models.Parse(q, "15s", intervalCalculator, false, false)
require.NoError(t, err)
require.Equal(t, "rate(ALERTS{job=\"test\" [172800s]})", res.Expr)
})
t.Run("parsing query model with $__range_s variable", func(t *testing.T) {
timeRange := backend.TimeRange{
From: now,
To: now.Add(48 * time.Hour),
}
q := queryContext(`{
"expr": "rate(ALERTS{job=\"test\" [$__range_s]})",
"format": "time_series",
"intervalFactor": 1,
"refId": "A"
}`, timeRange, time.Duration(1)*time.Minute)
res, err := models.Parse(q, "15s", intervalCalculator, false, false)
require.NoError(t, err)
require.Equal(t, "rate(ALERTS{job=\"test\" [172800]})", res.Expr)
})
t.Run("parsing query model with ${__range_s} variable", func(t *testing.T) {
timeRange := backend.TimeRange{
From: now,
To: now.Add(48 * time.Hour),
}
q := queryContext(`{
"expr": "rate(ALERTS{job=\"test\" [${__range_s}s]})",
"format": "time_series",
"intervalFactor": 1,
"refId": "A"
}`, timeRange, time.Duration(1)*time.Minute)
res, err := models.Parse(q, "15s", intervalCalculator, false, false)
require.NoError(t, err)
require.Equal(t, "rate(ALERTS{job=\"test\" [172800s]})", res.Expr)
})
t.Run("parsing query model with $__range_s variable below 0.5s", func(t *testing.T) {
timeRange := backend.TimeRange{
From: now,
To: now.Add(40 * time.Millisecond),
}
q := queryContext(`{
"expr": "rate(ALERTS{job=\"test\" [$__range_s]})",
"format": "time_series",
"intervalFactor": 1,
"refId": "A"
}`, timeRange, time.Duration(1)*time.Minute)
res, err := models.Parse(q, "15s", intervalCalculator, false, false)
require.NoError(t, err)
require.Equal(t, "rate(ALERTS{job=\"test\" [0]})", res.Expr)
})
t.Run("parsing query model with $__range_s variable between 1-0.5s", func(t *testing.T) {
timeRange := backend.TimeRange{
From: now,
To: now.Add(800 * time.Millisecond),
}
q := queryContext(`{
"expr": "rate(ALERTS{job=\"test\" [$__range_s]})",
"format": "time_series",
"intervalFactor": 1,
"refId": "A"
}`, timeRange, time.Duration(1)*time.Minute)
res, err := models.Parse(q, "15s", intervalCalculator, false, false)
require.NoError(t, err)
require.Equal(t, "rate(ALERTS{job=\"test\" [1]})", res.Expr)
})
t.Run("parsing query model with $__range_ms variable", func(t *testing.T) {
timeRange := backend.TimeRange{
From: now,
To: now.Add(48 * time.Hour),
}
q := queryContext(`{
"expr": "rate(ALERTS{job=\"test\" [$__range_ms]})",
"format": "time_series",
"intervalFactor": 1,
"refId": "A"
}`, timeRange, time.Duration(1)*time.Minute)
res, err := models.Parse(q, "15s", intervalCalculator, false, false)
require.NoError(t, err)
require.Equal(t, "rate(ALERTS{job=\"test\" [172800000]})", res.Expr)
})
t.Run("parsing query model with $__range_ms variable below 1s", func(t *testing.T) {
timeRange := backend.TimeRange{
From: now,
To: now.Add(20 * time.Millisecond),
}
q := queryContext(`{
"expr": "rate(ALERTS{job=\"test\" [$__range_ms]})",
"format": "time_series",
"intervalFactor": 1,
"refId": "A"
}`, timeRange, time.Duration(1)*time.Minute)
res, err := models.Parse(q, "15s", intervalCalculator, false, false)
require.NoError(t, err)
require.Equal(t, "rate(ALERTS{job=\"test\" [20]})", res.Expr)
})
t.Run("parsing query model with $__rate_interval variable", func(t *testing.T) {
timeRange := backend.TimeRange{
From: now,
To: now.Add(5 * time.Minute),
}
q := queryContext(`{
"expr": "rate(ALERTS{job=\"test\" [$__rate_interval]})",
"format": "time_series",
"intervalFactor": 1,
"interval": "5m",
"refId": "A"
}`, timeRange, time.Duration(1)*time.Minute)
res, err := models.Parse(q, "15s", intervalCalculator, false, false)
require.NoError(t, err)
require.Equal(t, "rate(ALERTS{job=\"test\" [20m0s]})", res.Expr)
})
t.Run("parsing query model with $__rate_interval variable in expr and interval", func(t *testing.T) {
timeRange := backend.TimeRange{
From: now,
To: now.Add(5 * time.Minute),
}
q := queryContext(`{
"expr": "rate(ALERTS{job=\"test\" [$__rate_interval]})",
"format": "time_series",
"intervalFactor": 1,
"interval": "$__rate_interval",
"refId": "A"
}`, timeRange, 1*time.Minute)
res, err := models.Parse(q, "15s", intervalCalculator, false, false)
require.NoError(t, err)
require.Equal(t, "rate(ALERTS{job=\"test\" [1m0s]})", res.Expr)
require.Equal(t, 1*time.Minute, res.Step)
})
t.Run("parsing query model with $__rate_interval_ms variable", func(t *testing.T) {
timeRange := backend.TimeRange{
From: now,
To: now.Add(48 * time.Hour),
}
q := queryContext(`{
"expr": "rate(ALERTS{job=\"test\" [$__rate_interval_ms]})",
"format": "time_series",
"intervalFactor": 1,
"refId": "A"
}`, timeRange, 2*time.Minute)
res, err := models.Parse(q, "15s", intervalCalculator, false, false)
require.NoError(t, err)
require.Equal(t, "rate(ALERTS{job=\"test\" [135000]})", res.Expr)
})
t.Run("parsing query model with $__rate_interval_ms and $__rate_interval variable", func(t *testing.T) {
timeRange := backend.TimeRange{
From: now,
To: now.Add(48 * time.Hour),
}
q := queryContext(`{
"expr": "rate(ALERTS{job=\"test\" [$__rate_interval_ms]}) + rate(ALERTS{job=\"test\" [$__rate_interval]})",
"format": "time_series",
"intervalFactor": 1,
"refId": "A"
}`, timeRange, 2*time.Minute)
res, err := models.Parse(q, "15s", intervalCalculator, false, false)
require.NoError(t, err)
require.Equal(t, "rate(ALERTS{job=\"test\" [135000]}) + rate(ALERTS{job=\"test\" [2m15s]})", res.Expr)
})
t.Run("parsing query model with ${__rate_interval_ms} and ${__rate_interval} variable", func(t *testing.T) {
timeRange := backend.TimeRange{
From: now,
To: now.Add(48 * time.Hour),
}
q := queryContext(`{
"expr": "rate(ALERTS{job=\"test\" [${__rate_interval_ms}]}) + rate(ALERTS{job=\"test\" [${__rate_interval}]})",
"format": "time_series",
"intervalFactor": 1,
"refId": "A"
}`, timeRange, 2*time.Minute)
res, err := models.Parse(q, "15s", intervalCalculator, false, false)
require.NoError(t, err)
require.Equal(t, "rate(ALERTS{job=\"test\" [135000]}) + rate(ALERTS{job=\"test\" [2m15s]})", res.Expr)
})
t.Run("parsing query model of range query", func(t *testing.T) {
timeRange := backend.TimeRange{
From: now,
To: now.Add(48 * time.Hour),
}
q := queryContext(`{
"expr": "go_goroutines",
"format": "time_series",
"intervalFactor": 1,
"refId": "A",
"range": true
}`, timeRange, time.Duration(1)*time.Minute)
res, err := models.Parse(q, "15s", intervalCalculator, false, false)
require.NoError(t, err)
require.Equal(t, true, res.RangeQuery)
})
t.Run("parsing query model of range and instant query", func(t *testing.T) {
timeRange := backend.TimeRange{
From: now,
To: now.Add(48 * time.Hour),
}
q := queryContext(`{
"expr": "go_goroutines",
"format": "time_series",
"intervalFactor": 1,
"refId": "A",
"range": true,
"instant": true
}`, timeRange, time.Duration(1)*time.Minute)
res, err := models.Parse(q, "15s", intervalCalculator, false, false)
require.NoError(t, err)
require.Equal(t, true, res.RangeQuery)
require.Equal(t, true, res.InstantQuery)
})
t.Run("parsing query model of with no query type", func(t *testing.T) {
timeRange := backend.TimeRange{
From: now,
To: now.Add(48 * time.Hour),
}
q := queryContext(`{
"expr": "go_goroutines",
"format": "time_series",
"intervalFactor": 1,
"refId": "A"
}`, timeRange, time.Duration(1)*time.Minute)
res, err := models.Parse(q, "15s", intervalCalculator, false, false)
require.NoError(t, err)
require.Equal(t, true, res.RangeQuery)
})
}
func TestRateInterval(t *testing.T) {
type args struct {
expr string
interval string
intervalMs int64
dsScrapeInterval string
timeRange *backend.TimeRange
}
tests := []struct {
name string
args args
want *models.Query
}{
{
name: "intervalMs 100s, minStep override 150s and scrape interval 30s",
args: args{
expr: "rate(rpc_durations_seconds_count[$__rate_interval])",
interval: "150s",
intervalMs: 100000,
dsScrapeInterval: "30s",
},
want: &models.Query{
Expr: "rate(rpc_durations_seconds_count[10m0s])",
Step: time.Second * 150,
},
},
{
name: "intervalMs 120s, minStep override 150s and ds scrape interval 30s",
args: args{
expr: "rate(rpc_durations_seconds_count[$__rate_interval])",
interval: "150s",
intervalMs: 120000,
dsScrapeInterval: "30s",
},
want: &models.Query{
Expr: "rate(rpc_durations_seconds_count[10m0s])",
Step: time.Second * 150,
},
},
{
name: "intervalMs 120s, minStep auto (interval not overridden) and ds scrape interval 30s",
args: args{
expr: "rate(rpc_durations_seconds_count[$__rate_interval])",
interval: "120s",
intervalMs: 120000,
dsScrapeInterval: "30s",
},
want: &models.Query{
Expr: "rate(rpc_durations_seconds_count[8m0s])",
Step: time.Second * 120,
},
},
{
name: "interval and minStep are automatically calculated and ds scrape interval 30s and time range 1 hour",
args: args{
expr: "rate(rpc_durations_seconds_count[$__rate_interval])",
interval: "30s",
intervalMs: 30000,
dsScrapeInterval: "30s",
timeRange: &backend.TimeRange{
From: now,
To: now.Add(1 * time.Hour),
},
},
want: &models.Query{
Expr: "rate(rpc_durations_seconds_count[2m0s])",
Step: time.Second * 30,
},
},
{
name: "minStep is $__rate_interval and ds scrape interval 30s and time range 1 hour",
args: args{
expr: "rate(rpc_durations_seconds_count[$__rate_interval])",
interval: "$__rate_interval",
intervalMs: 30000,
dsScrapeInterval: "30s",
timeRange: &backend.TimeRange{
From: now,
To: now.Add(1 * time.Hour),
},
},
want: &models.Query{
Expr: "rate(rpc_durations_seconds_count[2m0s])",
Step: time.Minute * 2,
},
},
{
name: "minStep is $__rate_interval and ds scrape interval 30s and time range 2 days",
args: args{
expr: "rate(rpc_durations_seconds_count[$__rate_interval])",
interval: "$__rate_interval",
intervalMs: 120000,
dsScrapeInterval: "30s",
timeRange: &backend.TimeRange{
From: now,
To: now.Add(2 * 24 * time.Hour),
},
},
want: &models.Query{
Expr: "rate(rpc_durations_seconds_count[2m30s])",
Step: time.Second * 150,
},
},
{
name: "minStep is $__rate_interval and ds scrape interval 15s and time range 2 days",
args: args{
expr: "rate(rpc_durations_seconds_count[$__rate_interval])",
interval: "$__interval",
intervalMs: 120000,
dsScrapeInterval: "15s",
timeRange: &backend.TimeRange{
From: now,
To: now.Add(2 * 24 * time.Hour),
},
},
want: &models.Query{
Expr: "rate(rpc_durations_seconds_count[8m0s])",
Step: time.Second * 120,
},
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
q := mockQuery(tt.args.expr, tt.args.interval, tt.args.intervalMs, tt.args.timeRange)
q.MaxDataPoints = 12384
res, err := models.Parse(q, tt.args.dsScrapeInterval, intervalCalculator, false, false)
require.NoError(t, err)
require.Equal(t, tt.want.Expr, res.Expr)
require.Equal(t, tt.want.Step, res.Step)
})
}
t.Run("minStep is auto and ds scrape interval 30s and time range 1 hour", func(t *testing.T) {
query := backend.DataQuery{
RefID: "G",
QueryType: "",
MaxDataPoints: 1613,
Interval: 30 * time.Second,
TimeRange: backend.TimeRange{
From: now,
To: now.Add(1 * time.Hour),
},
JSON: []byte(`{
"datasource":{"type":"prometheus","uid":"zxS5e5W4k"},
"datasourceId":38,
"editorMode":"code",
"exemplar":false,
"expr":"sum(rate(process_cpu_seconds_total[$__rate_interval]))",
"instant":false,
"interval":"",
"intervalMs":30000,
"key":"Q-f96b6729-c47a-4ea8-8f71-a79774cf9bd5-0",
"legendFormat":"__auto",
"maxDataPoints":1613,
"range":true,
"refId":"G",
"requestId":"1G",
"utcOffsetSec":3600
}`),
}
res, err := models.Parse(query, "30s", intervalCalculator, false, false)
require.NoError(t, err)
require.Equal(t, "sum(rate(process_cpu_seconds_total[2m0s]))", res.Expr)
require.Equal(t, 30*time.Second, res.Step)
})
t.Run("minStep is auto and ds scrape interval 15s and time range 5 minutes", func(t *testing.T) {
query := backend.DataQuery{
RefID: "A",
QueryType: "",
MaxDataPoints: 1055,
Interval: 15 * time.Second,
TimeRange: backend.TimeRange{
From: now,
To: now.Add(5 * time.Minute),
},
JSON: []byte(`{
"datasource": {
"type": "prometheus",
"uid": "2z9d6ElGk"
},
"editorMode": "code",
"expr": "sum(rate(cache_requests_total[$__rate_interval]))",
"legendFormat": "__auto",
"range": true,
"refId": "A",
"exemplar": false,
"requestId": "1A",
"utcOffsetSec": 0,
"interval": "",
"datasourceId": 508,
"intervalMs": 15000,
"maxDataPoints": 1055
}`),
}
res, err := models.Parse(query, "15s", intervalCalculator, false, false)
require.NoError(t, err)
require.Equal(t, "sum(rate(cache_requests_total[1m0s]))", res.Expr)
require.Equal(t, 15*time.Second, res.Step)
})
}
func mockQuery(expr string, interval string, intervalMs int64, timeRange *backend.TimeRange) backend.DataQuery {
if timeRange == nil {
timeRange = &backend.TimeRange{
From: now,
To: now.Add(1 * time.Hour),
}
}
return backend.DataQuery{
Interval: time.Duration(intervalMs) * time.Millisecond,
JSON: []byte(fmt.Sprintf(`{
"expr": "%s",
"format": "time_series",
"interval": "%s",
"intervalMs": %v,
"intervalFactor": 1,
"refId": "A"
}`, expr, interval, intervalMs)),
TimeRange: *timeRange,
RefID: "A",
}
}
func queryContext(json string, timeRange backend.TimeRange, queryInterval time.Duration) backend.DataQuery {
return backend.DataQuery{
Interval: queryInterval,
JSON: []byte(json),
TimeRange: timeRange,
RefID: "A",
}
}
// AlignTimeRange aligns query range to step and handles the time offset.
// It rounds start and end down to a multiple of step.
// Prometheus caching is dependent on the range being aligned with the step.
// Rounding to the step can significantly change the start and end of the range for larger steps, i.e. a week.
// In rounding the range to a 1w step the range will always start on a Thursday.
func TestAlignTimeRange(t *testing.T) {
type args struct {
t time.Time
step time.Duration
offset int64
}
var monday int64 = 1704672000
var thursday int64 = 1704326400
var one_week_min_step = 604800 * time.Second
tests := []struct {
name string
args args
want time.Time
}{
{
name: "second step",
args: args{t: time.Unix(1664816826, 0), step: 10 * time.Second, offset: 0},
want: time.Unix(1664816820, 0).UTC(),
},
{name: "millisecond step", args: args{t: time.Unix(1664816825, 5*int64(time.Millisecond)), step: 10 * time.Millisecond, offset: 0}, want: time.Unix(1664816825, 0).UTC()},
{name: "second step with offset", args: args{t: time.Unix(1664816825, 5*int64(time.Millisecond)), step: 2 * time.Second, offset: -3}, want: time.Unix(1664816825, 0).UTC()},
// we may not want this functionality in the future but if we change this we break Prometheus caching.
{
name: "1w step with range date of Monday that changes the range to a Thursday.",
args: args{t: time.Unix(monday, 0), step: one_week_min_step, offset: 0},
want: time.Unix(thursday, 0).UTC(),
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
if got := models.AlignTimeRange(tt.args.t, tt.args.step, tt.args.offset); !reflect.DeepEqual(got, tt.want) {
t.Errorf("AlignTimeRange() = %v, want %v", got, tt.want)
}
})
}
}

View File

@@ -0,0 +1,55 @@
package models
import (
"time"
"github.com/grafana/grafana-plugin-sdk-go/data"
)
// +enum
type ResultType string
const (
ResultTypeMatrix ResultType = "matrix"
ResultTypeExemplar ResultType = "exemplar"
ResultTypeVector ResultType = "vector"
ResultTypeUnknown ResultType = ""
)
func ResultTypeFromFrame(frame *data.Frame) ResultType {
if frame.Meta.Custom == nil {
return ResultTypeUnknown
}
custom, ok := frame.Meta.Custom.(map[string]string)
if !ok {
return ResultTypeUnknown
}
rt, ok := custom["resultType"]
if !ok {
return ResultTypeUnknown
}
switch rt {
case ResultTypeMatrix.String():
return ResultTypeMatrix
case ResultTypeExemplar.String():
return ResultTypeExemplar
case ResultTypeVector.String():
return ResultTypeVector
}
return ResultTypeUnknown
}
func (r ResultType) String() string {
return string(r)
}
type Exemplar struct {
SeriesLabels map[string]string
Fields []*data.Field
RowIdx int
Value float64
Timestamp time.Time
}

View File

@@ -0,0 +1,52 @@
package models
import (
"github.com/prometheus/prometheus/model/labels"
"github.com/prometheus/prometheus/promql/parser"
)
func ApplyQueryScope(rawExpr string, matchers []*labels.Matcher) (string, error) {
expr, err := parser.ParseExpr(rawExpr)
if err != nil {
return "", err
}
matcherNamesToIdx := make(map[string]int, len(matchers))
for i, matcher := range matchers {
if matcher == nil {
continue
}
matcherNamesToIdx[matcher.Name] = i
}
parser.Inspect(expr, func(node parser.Node, nodes []parser.Node) error {
switch v := node.(type) {
case *parser.VectorSelector:
found := make([]bool, len(matchers))
for _, matcher := range v.LabelMatchers {
if matcher == nil || matcher.Name == "__name__" { // const prob
continue
}
if _, ok := matcherNamesToIdx[matcher.Name]; ok {
found[matcherNamesToIdx[matcher.Name]] = true
newM := matchers[matcherNamesToIdx[matcher.Name]]
matcher.Name = newM.Name
matcher.Type = newM.Type
matcher.Value = newM.Value
}
}
for i, f := range found {
if f {
continue
}
v.LabelMatchers = append(v.LabelMatchers, matchers[i])
}
return nil
default:
return nil
}
})
return expr.String(), nil
}

View File

@@ -0,0 +1,87 @@
package exemplar
import (
"time"
"github.com/grafana/grafana-plugin-sdk-go/data"
)
var _ data.Framer = (*Framer)(nil)
type Framer struct {
frames data.Frames
sampler Sampler
labelTracker LabelTracker
meta *data.FrameMeta
refID string
}
func NewFramer(sampler Sampler, labelTracker LabelTracker) *Framer {
return &Framer{
frames: data.Frames{},
sampler: sampler,
labelTracker: labelTracker,
}
}
func (f *Framer) SetMeta(meta *data.FrameMeta) {
f.meta = meta
}
func (f *Framer) SetRefID(refID string) {
f.refID = refID
}
func (f *Framer) AddFrame(frame *data.Frame) {
f.frames = append(f.frames, frame)
}
func (f *Framer) Frames() (data.Frames, error) {
exemplars := f.sampler.Sample()
f.sampler.Reset()
if len(exemplars) == 0 {
return f.frames, nil
}
// the new exemplar frame will be a single frame in long format
// with a timestamp, metric value, and one or more label fields
exemplarFrame := data.NewFrame("exemplar")
exemplarFrame.RefID = f.refID
exemplarFrame.Meta = f.meta
// init the fields for the new exemplar frame
timeField := data.NewField(data.TimeSeriesTimeFieldName, nil, make([]time.Time, 0, len(exemplars)))
valueField := data.NewField(data.TimeSeriesValueFieldName, nil, make([]float64, 0, len(exemplars)))
exemplarFrame.Fields = append(exemplarFrame.Fields, timeField, valueField)
labelNames := f.labelTracker.GetNames()
exemplarLabels := make(map[string]string, len(labelNames))
for _, labelName := range labelNames {
exemplarFrame.Fields = append(exemplarFrame.Fields, data.NewField(labelName, nil, make([]string, 0, len(exemplars))))
}
// add the sampled exemplars to the new exemplar frame
for _, b := range exemplars {
// Fill labels map with default values
for _, n := range labelNames {
exemplarLabels[n] = b.SeriesLabels[n]
}
// Enter corresponding label values from exemplar fields
for _, bf := range b.Fields {
if _, exists := exemplarLabels[bf.Name]; exists {
exemplarLabels[bf.Name] = bf.CopyAt(b.RowIdx).(string)
}
}
timeField.Append(b.Timestamp)
valueField.Append(b.Value)
for i, labelName := range labelNames {
colIdx := i + 2 // +2 to skip time and value fields
exemplarFrame.Fields[colIdx].Append(exemplarLabels[labelName])
}
}
f.frames = append(f.frames, exemplarFrame)
return f.frames, nil
}

View File

@@ -0,0 +1,52 @@
package exemplar
import (
"sort"
"github.com/grafana/grafana-plugin-sdk-go/data"
)
var _ LabelTracker = (*labelTracker)(nil)
type LabelTracker interface {
Add(labels map[string]string)
AddFields(fields []*data.Field)
GetNames() []string
}
type labelTracker struct {
labelSet map[string]struct{}
}
func NewLabelTracker() LabelTracker {
return &labelTracker{
labelSet: map[string]struct{}{},
}
}
// Add saves label names that haven't been seen before
// so that they can be used to build the label fields in the exemplar frame
func (l *labelTracker) Add(labels map[string]string) {
for k := range labels {
l.labelSet[k] = struct{}{}
}
}
// AddFields saves field names so that they can be used to build the label fields in the exemplar frame.
func (l *labelTracker) AddFields(fields []*data.Field) {
for _, f := range fields {
l.labelSet[f.Name] = struct{}{}
}
}
// GetNames returns sorted unique label names
func (l *labelTracker) GetNames() []string {
labelNames := make([]string, 0, len(l.labelSet))
for k := range l.labelSet {
labelNames = append(labelNames, k)
}
sort.SliceStable(labelNames, func(i, j int) bool {
return labelNames[i] < labelNames[j]
})
return labelNames
}

View File

@@ -0,0 +1,46 @@
package exemplar
import (
"sort"
"time"
"github.com/grafana/grafana/pkg/promlib/models"
)
type Sampler interface {
Add(models.Exemplar)
SetStep(time.Duration)
Sample() []models.Exemplar
Reset()
}
var _ Sampler = (*NoOpSampler)(nil)
type NoOpSampler struct {
exemplars []models.Exemplar
}
func NewNoOpSampler() Sampler {
return &NoOpSampler{
exemplars: []models.Exemplar{},
}
}
func (e *NoOpSampler) Add(ex models.Exemplar) {
e.exemplars = append(e.exemplars, ex)
}
func (e *NoOpSampler) SetStep(time.Duration) {
// noop
}
func (e *NoOpSampler) Sample() []models.Exemplar {
sort.SliceStable(e.exemplars, func(i, j int) bool {
return e.exemplars[i].Timestamp.Before(e.exemplars[j].Timestamp)
})
return e.exemplars
}
func (e *NoOpSampler) Reset() {
e.exemplars = []models.Exemplar{}
}

View File

@@ -0,0 +1,93 @@
package exemplar
import (
"math"
"sort"
"time"
"github.com/grafana/grafana/pkg/promlib/models"
)
type StandardDeviationSampler struct {
step time.Duration
buckets map[time.Time][]models.Exemplar
count int
mean float64
m2 float64
}
func NewStandardDeviationSampler() Sampler {
return &StandardDeviationSampler{
buckets: map[time.Time][]models.Exemplar{},
}
}
func (e *StandardDeviationSampler) SetStep(step time.Duration) {
e.step = step
}
func (e *StandardDeviationSampler) Add(ex models.Exemplar) {
bucketTs := models.AlignTimeRange(ex.Timestamp, e.step, 0)
e.updateAggregations(ex.Value)
if _, exists := e.buckets[bucketTs]; !exists {
e.buckets[bucketTs] = []models.Exemplar{ex}
return
}
e.buckets[bucketTs] = append(e.buckets[bucketTs], ex)
}
// updateAggregations uses Welford's online algorithm for calculating the mean and variance
// https://en.wikipedia.org/wiki/Algorithms_for_calculating_variance#Welford's_online_algorithm
func (e *StandardDeviationSampler) updateAggregations(val float64) {
e.count++
delta := val - e.mean
e.mean += delta / float64(e.count)
delta2 := val - e.mean
e.m2 += delta * delta2
}
// standardDeviation calculates the amount of variation in the data
// https://en.wikipedia.org/wiki/Standard_deviation
func (e *StandardDeviationSampler) standardDeviation() float64 {
if e.count < 2 {
return 0
}
return math.Sqrt(e.m2 / float64(e.count-1))
}
func (e *StandardDeviationSampler) Sample() []models.Exemplar {
exemplars := make([]models.Exemplar, 0, len(e.buckets))
for _, b := range e.buckets {
// sort by value in descending order
sort.SliceStable(b, func(i, j int) bool {
return b[i].Value > b[j].Value
})
sampled := []models.Exemplar{}
for _, ex := range b {
if len(sampled) == 0 {
sampled = append(sampled, ex)
continue
}
// only sample values at least 2 standard deviation distance to previously taken value
prev := sampled[len(sampled)-1]
if e.standardDeviation() != 0.0 && prev.Value-ex.Value > e.standardDeviation()*2.0 {
sampled = append(sampled, ex)
}
}
exemplars = append(exemplars, sampled...)
}
sort.SliceStable(exemplars, func(i, j int) bool {
return exemplars[i].Timestamp.Before(exemplars[j].Timestamp)
})
return exemplars
}
func (e *StandardDeviationSampler) Reset() {
e.step = 0
e.buckets = map[time.Time][]models.Exemplar{}
e.count = 0
e.mean = 0
e.m2 = 0
}

View File

@@ -0,0 +1,28 @@
package exemplar_test
import (
"testing"
"time"
"github.com/grafana/grafana-plugin-sdk-go/experimental"
"github.com/grafana/grafana/pkg/promlib/models"
"github.com/grafana/grafana/pkg/promlib/querydata/exemplar"
)
func TestStdDevSampler(t *testing.T) {
sampler := exemplar.NewStandardDeviationSampler().(*exemplar.StandardDeviationSampler)
t.Run("standard deviation sampler", func(t *testing.T) {
tr := models.TimeRange{
Start: time.Unix(0, 0),
End: time.Unix(100000, 0),
}
ex := generateTestExemplars(tr)
sampler.SetStep(600 * time.Second)
for i := 0; i < len(ex); i++ {
sampler.Add(ex[i])
}
framer := exemplar.NewFramer(sampler, exemplar.NewLabelTracker())
experimental.CheckGoldenJSONFramer(t, "testdata", "stddev_sampler", framer, update)
})
}

View File

@@ -0,0 +1,45 @@
package exemplar_test
import (
"testing"
"time"
"github.com/grafana/grafana-plugin-sdk-go/experimental"
"github.com/grafana/grafana/pkg/promlib/models"
"github.com/grafana/grafana/pkg/promlib/querydata/exemplar"
)
const update = true
func TestNoOpSampler(t *testing.T) {
sampler := exemplar.NewNoOpSampler().(*exemplar.NoOpSampler)
t.Run("no-op sampler", func(t *testing.T) {
tr := models.TimeRange{
Start: time.Unix(0, 0),
End: time.Unix(2000, 0),
}
ex := generateTestExemplars(tr)
for i := 0; i < len(ex); i++ {
sampler.Add(ex[i])
}
framer := exemplar.NewFramer(sampler, exemplar.NewLabelTracker())
experimental.CheckGoldenJSONFramer(t, "testdata", "noop_sampler", framer, update)
})
}
func generateTestExemplars(tr models.TimeRange) []models.Exemplar {
exemplars := []models.Exemplar{}
next := tr.Start.UTC()
for {
if next.Equal(tr.End) || next.After(tr.End) {
break
}
exemplars = append(exemplars, models.Exemplar{
Timestamp: next,
Value: float64(next.Unix()),
})
next = next.Add(time.Second).UTC()
}
return exemplars
}

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,392 @@
// 🌟 This was machine generated. Do not edit. 🌟
//
// Frame[0]
// Name: exemplar
// Dimensions: 2 Fields by 167 Rows
// +-------------------------------+-----------------+
// | Name: Time | Name: Value |
// | Labels: | Labels: |
// | Type: []time.Time | Type: []float64 |
// +-------------------------------+-----------------+
// | 1970-01-01 00:09:59 +0000 UTC | 599 |
// | 1970-01-01 00:19:59 +0000 UTC | 1199 |
// | 1970-01-01 00:29:59 +0000 UTC | 1799 |
// | 1970-01-01 00:39:59 +0000 UTC | 2399 |
// | 1970-01-01 00:49:59 +0000 UTC | 2999 |
// | 1970-01-01 00:59:59 +0000 UTC | 3599 |
// | 1970-01-01 01:09:59 +0000 UTC | 4199 |
// | 1970-01-01 01:19:59 +0000 UTC | 4799 |
// | 1970-01-01 01:29:59 +0000 UTC | 5399 |
// | ... | ... |
// +-------------------------------+-----------------+
//
//
// 🌟 This was machine generated. Do not edit. 🌟
{
"status": 200,
"frames": [
{
"schema": {
"name": "exemplar",
"fields": [
{
"name": "Time",
"type": "time",
"typeInfo": {
"frame": "time.Time"
}
},
{
"name": "Value",
"type": "number",
"typeInfo": {
"frame": "float64"
}
}
]
},
"data": {
"values": [
[
599000,
1199000,
1799000,
2399000,
2999000,
3599000,
4199000,
4799000,
5399000,
5999000,
6599000,
7199000,
7799000,
8399000,
8999000,
9599000,
10199000,
10799000,
11399000,
11999000,
12599000,
13199000,
13799000,
14399000,
14999000,
15599000,
16199000,
16799000,
17399000,
17999000,
18599000,
19199000,
19799000,
20399000,
20999000,
21599000,
22199000,
22799000,
23399000,
23999000,
24599000,
25199000,
25799000,
26399000,
26999000,
27599000,
28199000,
28799000,
29399000,
29999000,
30599000,
31199000,
31799000,
32399000,
32999000,
33599000,
34199000,
34799000,
35399000,
35999000,
36599000,
37199000,
37799000,
38399000,
38999000,
39599000,
40199000,
40799000,
41399000,
41999000,
42599000,
43199000,
43799000,
44399000,
44999000,
45599000,
46199000,
46799000,
47399000,
47999000,
48599000,
49199000,
49799000,
50399000,
50999000,
51599000,
52199000,
52799000,
53399000,
53999000,
54599000,
55199000,
55799000,
56399000,
56999000,
57599000,
58199000,
58799000,
59399000,
59999000,
60599000,
61199000,
61799000,
62399000,
62999000,
63599000,
64199000,
64799000,
65399000,
65999000,
66599000,
67199000,
67799000,
68399000,
68999000,
69599000,
70199000,
70799000,
71399000,
71999000,
72599000,
73199000,
73799000,
74399000,
74999000,
75599000,
76199000,
76799000,
77399000,
77999000,
78599000,
79199000,
79799000,
80399000,
80999000,
81599000,
82199000,
82799000,
83399000,
83999000,
84599000,
85199000,
85799000,
86399000,
86999000,
87599000,
88199000,
88799000,
89399000,
89999000,
90599000,
91199000,
91799000,
92399000,
92999000,
93599000,
94199000,
94799000,
95399000,
95999000,
96599000,
97199000,
97799000,
98399000,
98999000,
99599000,
99999000
],
[
599,
1199,
1799,
2399,
2999,
3599,
4199,
4799,
5399,
5999,
6599,
7199,
7799,
8399,
8999,
9599,
10199,
10799,
11399,
11999,
12599,
13199,
13799,
14399,
14999,
15599,
16199,
16799,
17399,
17999,
18599,
19199,
19799,
20399,
20999,
21599,
22199,
22799,
23399,
23999,
24599,
25199,
25799,
26399,
26999,
27599,
28199,
28799,
29399,
29999,
30599,
31199,
31799,
32399,
32999,
33599,
34199,
34799,
35399,
35999,
36599,
37199,
37799,
38399,
38999,
39599,
40199,
40799,
41399,
41999,
42599,
43199,
43799,
44399,
44999,
45599,
46199,
46799,
47399,
47999,
48599,
49199,
49799,
50399,
50999,
51599,
52199,
52799,
53399,
53999,
54599,
55199,
55799,
56399,
56999,
57599,
58199,
58799,
59399,
59999,
60599,
61199,
61799,
62399,
62999,
63599,
64199,
64799,
65399,
65999,
66599,
67199,
67799,
68399,
68999,
69599,
70199,
70799,
71399,
71999,
72599,
73199,
73799,
74399,
74999,
75599,
76199,
76799,
77399,
77999,
78599,
79199,
79799,
80399,
80999,
81599,
82199,
82799,
83399,
83999,
84599,
85199,
85799,
86399,
86999,
87599,
88199,
88799,
89399,
89999,
90599,
91199,
91799,
92399,
92999,
93599,
94199,
94799,
95399,
95999,
96599,
97199,
97799,
98399,
98999,
99599,
99999
]
]
}
}
]
}

View File

@@ -0,0 +1,153 @@
package querydata_test
import (
"bytes"
"context"
"encoding/json"
"fmt"
"io"
"math/rand"
"net/http"
"os"
"path/filepath"
"strings"
"testing"
"time"
"github.com/grafana/grafana-plugin-sdk-go/backend"
"github.com/stretchr/testify/require"
"github.com/grafana/grafana/pkg/promlib/models"
)
// when memory-profiling this benchmark, these commands are recommended:
// - go test -benchmem -run=^$ -bench ^BenchmarkExemplarJson$ github.com/grafana/grafana/pkg/promlib/querydata -memprofile memprofile.out -count 6 | tee old.txt
// - go tool pprof -http=localhost:6061 memprofile.out
func BenchmarkExemplarJson(b *testing.B) {
queryFileName := filepath.Join("../testdata", "exemplar.query.json")
query, err := loadStoredQuery(queryFileName)
require.NoError(b, err)
responseFileName := filepath.Join("../testdata", "exemplar.result.json")
// nolint:gosec
// We can ignore the gosec G304 warning since this is a test file
responseBytes, err := os.ReadFile(responseFileName)
require.NoError(b, err)
tCtx, err := setup()
require.NoError(b, err)
b.ResetTimer()
for n := 0; n < b.N; n++ {
res := http.Response{
StatusCode: 200,
Body: io.NopCloser(bytes.NewReader(responseBytes)),
}
tCtx.httpProvider.setResponse(&res)
resp, err := tCtx.queryData.Execute(context.Background(), query)
require.NoError(b, err)
for _, r := range resp.Responses {
require.NoError(b, r.Error)
}
}
}
var resp *backend.QueryDataResponse
// when memory-profiling this benchmark, these commands are recommended:
// - go test -benchmem -run=^$ -bench ^BenchmarkRangeJson$ github.com/grafana/grafana/pkg/promlib/querydata -memprofile memprofile.out -count 6 | tee old.txt
// - go tool pprof -http=localhost:6061 memprofile.out
// - benchstat old.txt new.txt
func BenchmarkRangeJson(b *testing.B) {
var (
r *backend.QueryDataResponse
err error
)
body, q := createJsonTestData(1642000000, 1, 300, 400)
tCtx, err := setup()
require.NoError(b, err)
b.ResetTimer()
for n := 0; n < b.N; n++ {
res := http.Response{
StatusCode: 200,
Body: io.NopCloser(bytes.NewReader(body)),
}
tCtx.httpProvider.setResponse(&res)
r, err = tCtx.queryData.Execute(context.Background(), q)
require.NoError(b, err)
}
resp = r
}
const nanRate = 0.002
// we build the JSON file from strings,
// it was easier to write it this way.
func makeJsonTestMetric(index int) string {
return fmt.Sprintf(`{"server":"main","category":"maintenance","case":"%v"}`, index)
}
// return a value between -100 and +100, sometimes NaN, in string
func makeJsonTestValue(r *rand.Rand) string {
if r.Float64() < nanRate {
return "NaN"
} else {
return fmt.Sprintf("%f", (r.Float64()*200)-100)
}
}
// create one time-series
func makeJsonTestSeries(start int64, step int64, timestampCount int, r *rand.Rand, seriesIndex int) string {
var values []string
for i := 0; i < timestampCount; i++ {
// create out of order timestamps to test sorting
if seriesIndex == 0 && i%2 == 0 {
continue
}
value := fmt.Sprintf(`[%d,"%v"]`, start+(int64(i)*step), makeJsonTestValue(r))
values = append(values, value)
}
return fmt.Sprintf(`{"metric":%v,"values":[%v]}`, makeJsonTestMetric(seriesIndex), strings.Join(values, ","))
}
func createJsonTestData(start int64, step int64, timestampCount int, seriesCount int) ([]byte, *backend.QueryDataRequest) {
// we use random numbers as values, but they have to be the same numbers
// every time we call this, so we create a random source.
r := rand.New(rand.NewSource(42))
var allSeries []string
for i := 0; i < seriesCount; i++ {
allSeries = append(allSeries, makeJsonTestSeries(start, step, timestampCount, r, i))
}
bytes := []byte(fmt.Sprintf(`{"status":"success","data":{"resultType":"matrix","result":[%v]}}`, strings.Join(allSeries, ",")))
qm := models.QueryModel{
PrometheusQueryProperties: models.PrometheusQueryProperties{
Range: true,
Expr: "test",
},
}
data, err := json.Marshal(&qm)
if err != nil {
panic(err)
}
res := backend.QueryDataRequest{
Queries: []backend.DataQuery{
{
RefID: "A",
TimeRange: backend.TimeRange{
From: time.Unix(start, 0),
To: time.Unix(start+((int64(timestampCount)-1)*step), 0),
},
Interval: time.Second * time.Duration(step),
JSON: data,
},
},
}
return bytes, &res
}

View File

@@ -0,0 +1,153 @@
package querydata_test
import (
"bytes"
"context"
"encoding/json"
"fmt"
"io"
"net/http"
"os"
"path/filepath"
"testing"
"time"
"github.com/grafana/grafana-plugin-sdk-go/backend"
"github.com/grafana/grafana-plugin-sdk-go/experimental"
"github.com/stretchr/testify/require"
"github.com/grafana/grafana/pkg/promlib/models"
)
var update = true
func TestRangeResponses(t *testing.T) {
tt := []struct {
name string
filepath string
}{
{name: "parse a simple matrix response", filepath: "range_simple"},
{name: "parse a simple matrix response with value missing steps", filepath: "range_missing"},
{name: "parse a matrix response with Infinity", filepath: "range_infinity"},
{name: "parse a matrix response with NaN", filepath: "range_nan"},
{name: "parse a response with legendFormat __auto", filepath: "range_auto"},
}
for _, test := range tt {
queryFileName := filepath.Join("../testdata", test.filepath+".query.json")
responseFileName := filepath.Join("../testdata", test.filepath+".result.json")
goldenFileName := test.filepath + ".result.golden"
t.Run(test.name, goldenScenario(test.name, queryFileName, responseFileName, goldenFileName))
}
}
func TestExemplarResponses(t *testing.T) {
tt := []struct {
name string
filepath string
}{
{name: "parse an exemplar response", filepath: "exemplar"},
}
for _, test := range tt {
queryFileName := filepath.Join("../testdata", test.filepath+".query.json")
responseFileName := filepath.Join("../testdata", test.filepath+".result.json")
goldenFileName := test.filepath + ".result.golden"
t.Run(test.name, goldenScenario(test.name, queryFileName, responseFileName, goldenFileName))
}
}
func goldenScenario(name, queryFileName, responseFileName, goldenFileName string) func(t *testing.T) {
return func(t *testing.T) {
query, err := loadStoredQuery(queryFileName)
require.NoError(t, err)
//nolint:gosec
responseBytes, err := os.ReadFile(responseFileName)
require.NoError(t, err)
result, err := runQuery(responseBytes, query)
require.NoError(t, err)
require.Len(t, result.Responses, 1)
dr, found := result.Responses["A"]
require.True(t, found)
experimental.CheckGoldenJSONResponse(t, "../testdata", goldenFileName, &dr, update)
}
}
// we store the prometheus query data in a json file, here is some minimal code
// to be able to read it back. unfortunately we cannot use the models.Query
// struct here, because it has `time.time` and `time.duration` fields that
// cannot be unmarshalled from JSON automatically.
type storedPrometheusQuery struct {
RefId string
RangeQuery bool
ExemplarQuery bool
Start int64
End int64
Step int64
Expr string
LegendFormat string
}
func loadStoredQuery(fileName string) (*backend.QueryDataRequest, error) {
//nolint:gosec
bytes, err := os.ReadFile(fileName)
if err != nil {
return nil, err
}
var sq storedPrometheusQuery
err = json.Unmarshal(bytes, &sq)
if err != nil {
return nil, err
}
qm := models.QueryModel{
PrometheusQueryProperties: models.PrometheusQueryProperties{
Range: sq.RangeQuery,
Exemplar: sq.ExemplarQuery,
Expr: sq.Expr,
LegendFormat: sq.LegendFormat,
},
CommonQueryProperties: models.CommonQueryProperties{
IntervalMs: sq.Step * 1000,
},
Interval: fmt.Sprintf("%ds", sq.Step),
}
data, err := json.Marshal(&qm)
if err != nil {
return nil, err
}
return &backend.QueryDataRequest{
Queries: []backend.DataQuery{
{
TimeRange: backend.TimeRange{
From: time.Unix(sq.Start, 0),
To: time.Unix(sq.End, 0),
},
RefID: sq.RefId,
Interval: time.Second * time.Duration(sq.Step),
JSON: json.RawMessage(data),
},
},
}, nil
}
func runQuery(response []byte, q *backend.QueryDataRequest) (*backend.QueryDataResponse, error) {
tCtx, err := setup()
if err != nil {
return nil, err
}
res := &http.Response{
StatusCode: 200,
Body: io.NopCloser(bytes.NewReader(response)),
}
tCtx.httpProvider.setResponse(res)
return tCtx.queryData.Execute(context.Background(), q)
}

View File

@@ -0,0 +1,228 @@
package querydata
import (
"context"
"fmt"
"net/http"
"regexp"
"time"
"github.com/grafana/grafana-plugin-sdk-go/backend"
"github.com/grafana/grafana-plugin-sdk-go/data"
"github.com/grafana/grafana-plugin-sdk-go/data/utils/maputil"
"go.opentelemetry.io/otel/attribute"
"go.opentelemetry.io/otel/trace"
"github.com/grafana/grafana-plugin-sdk-go/backend/log"
"github.com/grafana/grafana-plugin-sdk-go/backend/tracing"
"github.com/grafana/grafana/pkg/promlib/client"
"github.com/grafana/grafana/pkg/promlib/intervalv2"
"github.com/grafana/grafana/pkg/promlib/models"
"github.com/grafana/grafana/pkg/promlib/querydata/exemplar"
"github.com/grafana/grafana/pkg/promlib/utils"
)
const legendFormatAuto = "__auto"
var legendFormatRegexp = regexp.MustCompile(`\{\{\s*(.+?)\s*\}\}`)
type ExemplarEvent struct {
Time time.Time
Value float64
Labels map[string]string
}
// QueryData handles querying but different from buffered package uses a custom client instead of default Go Prom
// client.
type QueryData struct {
intervalCalculator intervalv2.Calculator
tracer trace.Tracer
client *client.Client
log log.Logger
ID int64
URL string
TimeInterval string
exemplarSampler func() exemplar.Sampler
}
func New(
httpClient *http.Client,
settings backend.DataSourceInstanceSettings,
plog log.Logger,
) (*QueryData, error) {
jsonData, err := utils.GetJsonData(settings)
if err != nil {
return nil, err
}
httpMethod, _ := maputil.GetStringOptional(jsonData, "httpMethod")
timeInterval, err := maputil.GetStringOptional(jsonData, "timeInterval")
if err != nil {
return nil, err
}
if httpMethod == "" {
httpMethod = http.MethodPost
}
promClient := client.NewClient(httpClient, httpMethod, settings.URL)
// standard deviation sampler is the default for backwards compatibility
exemplarSampler := exemplar.NewStandardDeviationSampler
return &QueryData{
intervalCalculator: intervalv2.NewCalculator(),
tracer: tracing.DefaultTracer(),
log: plog,
client: promClient,
TimeInterval: timeInterval,
ID: settings.ID,
URL: settings.URL,
exemplarSampler: exemplarSampler,
}, nil
}
func (s *QueryData) Execute(ctx context.Context, req *backend.QueryDataRequest) (*backend.QueryDataResponse, error) {
fromAlert := req.Headers["FromAlert"] == "true"
result := backend.QueryDataResponse{
Responses: backend.Responses{},
}
cfg := backend.GrafanaConfigFromContext(ctx)
hasPromQLScopeFeatureFlag := cfg.FeatureToggles().IsEnabled("promQLScope")
hasPrometheusDataplaneFeatureFlag := cfg.FeatureToggles().IsEnabled("prometheusDataplane")
for _, q := range req.Queries {
query, err := models.Parse(q, s.TimeInterval, s.intervalCalculator, fromAlert, hasPromQLScopeFeatureFlag)
if err != nil {
return &result, err
}
r := s.fetch(ctx, s.client, query, hasPrometheusDataplaneFeatureFlag)
if r == nil {
s.log.FromContext(ctx).Debug("Received nil response from runQuery", "query", query.Expr)
continue
}
result.Responses[q.RefID] = *r
}
return &result, nil
}
func (s *QueryData) fetch(ctx context.Context, client *client.Client, q *models.Query, enablePrometheusDataplane bool) *backend.DataResponse {
traceCtx, end := s.trace(ctx, q)
defer end()
logger := s.log.FromContext(traceCtx)
logger.Debug("Sending query", "start", q.Start, "end", q.End, "step", q.Step, "query", q.Expr)
dr := &backend.DataResponse{
Frames: data.Frames{},
Error: nil,
}
if q.InstantQuery {
res := s.instantQuery(traceCtx, client, q, enablePrometheusDataplane)
dr.Error = res.Error
dr.Frames = res.Frames
dr.Status = res.Status
}
if q.RangeQuery {
res := s.rangeQuery(traceCtx, client, q, enablePrometheusDataplane)
if res.Error != nil {
if dr.Error == nil {
dr.Error = res.Error
} else {
dr.Error = fmt.Errorf("%v %w", dr.Error, res.Error)
}
// When both instant and range are true, we may overwrite the status code.
// To fix this (and other things) they should come in separate http requests.
dr.Status = res.Status
}
dr.Frames = append(dr.Frames, res.Frames...)
}
if q.ExemplarQuery {
res := s.exemplarQuery(traceCtx, client, q, enablePrometheusDataplane)
if res.Error != nil {
// If exemplar query returns error, we want to only log it and
// continue with other results processing
logger.Error("Exemplar query failed", "query", q.Expr, "err", res.Error)
}
dr.Frames = append(dr.Frames, res.Frames...)
}
return dr
}
func (s *QueryData) rangeQuery(ctx context.Context, c *client.Client, q *models.Query, enablePrometheusDataplaneFlag bool) backend.DataResponse {
res, err := c.QueryRange(ctx, q)
if err != nil {
return backend.DataResponse{
Error: err,
Status: backend.StatusBadGateway,
}
}
defer func() {
err := res.Body.Close()
if err != nil {
s.log.Warn("Failed to close query range response body", "error", err)
}
}()
return s.parseResponse(ctx, q, res, enablePrometheusDataplaneFlag)
}
func (s *QueryData) instantQuery(ctx context.Context, c *client.Client, q *models.Query, enablePrometheusDataplaneFlag bool) backend.DataResponse {
res, err := c.QueryInstant(ctx, q)
if err != nil {
return backend.DataResponse{
Error: err,
Status: backend.StatusBadGateway,
}
}
// This is only for health check fall back scenario
if res.StatusCode != 200 && q.RefId == "__healthcheck__" {
return backend.DataResponse{
Error: fmt.Errorf(res.Status),
}
}
defer func() {
err := res.Body.Close()
if err != nil {
s.log.Warn("Failed to close response body", "error", err)
}
}()
return s.parseResponse(ctx, q, res, enablePrometheusDataplaneFlag)
}
func (s *QueryData) exemplarQuery(ctx context.Context, c *client.Client, q *models.Query, enablePrometheusDataplaneFlag bool) backend.DataResponse {
res, err := c.QueryExemplars(ctx, q)
if err != nil {
return backend.DataResponse{
Error: err,
}
}
defer func() {
err := res.Body.Close()
if err != nil {
s.log.Warn("Failed to close response body", "error", err)
}
}()
return s.parseResponse(ctx, q, res, enablePrometheusDataplaneFlag)
}
func (s *QueryData) trace(ctx context.Context, q *models.Query) (context.Context, func()) {
return utils.StartTrace(ctx, s.tracer, "datasource.prometheus",
attribute.String("expr", q.Expr),
attribute.Int64("start_unixnano", q.Start.UnixNano()),
attribute.Int64("stop_unixnano", q.End.UnixNano()),
)
}

View File

@@ -0,0 +1,486 @@
package querydata_test
import (
"bytes"
"context"
"encoding/json"
"fmt"
"io"
"math"
"net/http"
"testing"
"time"
apiv1 "github.com/prometheus/client_golang/api/prometheus/v1"
p "github.com/prometheus/common/model"
"github.com/stretchr/testify/require"
"github.com/grafana/grafana-plugin-sdk-go/backend"
"github.com/grafana/grafana-plugin-sdk-go/backend/httpclient"
"github.com/grafana/grafana-plugin-sdk-go/backend/log"
"github.com/grafana/grafana-plugin-sdk-go/data"
"github.com/grafana/grafana/pkg/promlib/client"
"github.com/grafana/grafana/pkg/promlib/models"
"github.com/grafana/grafana/pkg/promlib/querydata"
)
func TestPrometheus_parseTimeSeriesResponse(t *testing.T) {
t.Run("exemplars response should be sampled and parsed normally", func(t *testing.T) {
t.Skip()
exemplars := []apiv1.ExemplarQueryResult{
{
SeriesLabels: p.LabelSet{
"__name__": "tns_request_duration_seconds_bucket",
"instance": "app:80",
"job": "tns/app",
},
Exemplars: []apiv1.Exemplar{
{
Labels: p.LabelSet{"traceID": "test1"},
Value: 0.003535405,
Timestamp: p.TimeFromUnixNano(time.Now().Add(-2 * time.Minute).UnixNano()),
},
{
Labels: p.LabelSet{"traceID": "test2"},
Value: 0.005555605,
Timestamp: p.TimeFromUnixNano(time.Now().Add(-4 * time.Minute).UnixNano()),
},
{
Labels: p.LabelSet{"traceID": "test3"},
Value: 0.007545445,
Timestamp: p.TimeFromUnixNano(time.Now().Add(-6 * time.Minute).UnixNano()),
},
{
Labels: p.LabelSet{"traceID": "test4"},
Value: 0.009545445,
Timestamp: p.TimeFromUnixNano(time.Now().Add(-7 * time.Minute).UnixNano()),
},
},
},
}
tctx, err := setup()
require.NoError(t, err)
qm := models.QueryModel{
UtcOffsetSec: 0,
PrometheusQueryProperties: models.PrometheusQueryProperties{
LegendFormat: "legend {{app}}",
Exemplar: true,
},
}
b, err := json.Marshal(&qm)
require.NoError(t, err)
query := backend.DataQuery{
RefID: "A",
JSON: b,
}
res, err := execute(tctx, query, exemplars)
require.NoError(t, err)
// Test fields
require.Len(t, res, 1)
// require.Equal(t, res[0].Name, "exemplar")
require.Equal(t, res[0].Fields[0].Name, "Time")
require.Equal(t, res[0].Fields[1].Name, "Value")
require.Len(t, res[0].Fields, 6)
// Test correct values (sampled to 2)
require.Equal(t, res[0].Fields[1].Len(), 2)
require.Equal(t, res[0].Fields[1].At(0), 0.009545445)
require.Equal(t, res[0].Fields[1].At(1), 0.003535405)
})
t.Run("matrix response should be parsed normally", func(t *testing.T) {
values := []p.SamplePair{
{Value: 1, Timestamp: 1000},
{Value: 2, Timestamp: 2000},
{Value: 3, Timestamp: 3000},
{Value: 4, Timestamp: 4000},
{Value: 5, Timestamp: 5000},
}
result := queryResult{
Type: p.ValMatrix,
Result: p.Matrix{
&p.SampleStream{
Metric: p.Metric{"app": "Application", "tag2": "tag2"},
Values: values,
},
},
}
qm := models.QueryModel{
UtcOffsetSec: 0,
PrometheusQueryProperties: models.PrometheusQueryProperties{
Range: true,
LegendFormat: "legend {{app}}",
},
}
b, err := json.Marshal(&qm)
require.NoError(t, err)
query := backend.DataQuery{
TimeRange: backend.TimeRange{
From: time.Unix(1, 0).UTC(),
To: time.Unix(5, 0).UTC(),
},
JSON: b,
}
tctx, err := setup()
require.NoError(t, err)
res, err := execute(tctx, query, result)
require.NoError(t, err)
require.Len(t, res, 1)
require.Len(t, res[0].Fields, 2)
require.Len(t, res[0].Fields[0].Labels, 0)
require.Equal(t, "Time", res[0].Fields[0].Name)
require.Len(t, res[0].Fields[1].Labels, 2)
require.Equal(t, "app=Application, tag2=tag2", res[0].Fields[1].Labels.String())
require.Equal(t, "legend Application", res[0].Name)
// Ensure the timestamps are UTC zoned
testValue := res[0].Fields[0].At(0)
require.Equal(t, "UTC", testValue.(time.Time).Location().String())
})
t.Run("matrix response with missed data points should be parsed correctly", func(t *testing.T) {
values := []p.SamplePair{
{Value: 1, Timestamp: 1000},
{Value: 4, Timestamp: 4000},
}
result := queryResult{
Type: p.ValMatrix,
Result: p.Matrix{
&p.SampleStream{
Metric: p.Metric{"app": "Application", "tag2": "tag2"},
Values: values,
},
},
}
qm := models.QueryModel{
UtcOffsetSec: 0,
PrometheusQueryProperties: models.PrometheusQueryProperties{
Range: true,
LegendFormat: "",
},
}
b, err := json.Marshal(&qm)
require.NoError(t, err)
query := backend.DataQuery{
TimeRange: backend.TimeRange{
From: time.Unix(1, 0).UTC(),
To: time.Unix(4, 0).UTC(),
},
JSON: b,
}
tctx, err := setup()
require.NoError(t, err)
res, err := execute(tctx, query, result)
require.NoError(t, err)
require.Len(t, res, 1)
require.Equal(t, res[0].Fields[0].Len(), 2)
require.Equal(t, time.Unix(1, 0).UTC(), res[0].Fields[0].At(0))
require.Equal(t, time.Unix(4, 0).UTC(), res[0].Fields[0].At(1))
require.Equal(t, res[0].Fields[1].Len(), 2)
require.Equal(t, float64(1), res[0].Fields[1].At(0).(float64))
require.Equal(t, float64(4), res[0].Fields[1].At(1).(float64))
})
t.Run("matrix response with from alerting missed data points should be parsed correctly", func(t *testing.T) {
values := []p.SamplePair{
{Value: 1, Timestamp: 1000},
{Value: 4, Timestamp: 4000},
}
result := queryResult{
Type: p.ValMatrix,
Result: p.Matrix{
&p.SampleStream{
Metric: p.Metric{"app": "Application", "tag2": "tag2"},
Values: values,
},
},
}
qm := models.QueryModel{
UtcOffsetSec: 0,
PrometheusQueryProperties: models.PrometheusQueryProperties{
Range: true,
LegendFormat: "",
},
}
b, err := json.Marshal(&qm)
require.NoError(t, err)
query := backend.DataQuery{
TimeRange: backend.TimeRange{
From: time.Unix(1, 0).UTC(),
To: time.Unix(4, 0).UTC(),
},
JSON: b,
}
tctx, err := setup()
require.NoError(t, err)
res, err := execute(tctx, query, result)
require.NoError(t, err)
require.Len(t, res, 1)
require.Len(t, res[0].Fields, 2)
require.Len(t, res[0].Fields[0].Labels, 0)
require.Equal(t, res[0].Fields[0].Name, "Time")
require.Len(t, res[0].Fields[1].Labels, 2)
require.Equal(t, res[0].Fields[1].Labels.String(), "app=Application, tag2=tag2")
require.Equal(t, "{app=\"Application\", tag2=\"tag2\"}", res[0].Name)
})
t.Run("matrix response with NaN value should be changed to null", func(t *testing.T) {
result := queryResult{
Type: p.ValMatrix,
Result: p.Matrix{
&p.SampleStream{
Metric: p.Metric{"app": "Application"},
Values: []p.SamplePair{
{Value: p.SampleValue(math.NaN()), Timestamp: 1000},
},
},
},
}
qm := models.QueryModel{
UtcOffsetSec: 0,
PrometheusQueryProperties: models.PrometheusQueryProperties{
Range: true,
LegendFormat: "",
},
}
b, err := json.Marshal(&qm)
require.NoError(t, err)
query := backend.DataQuery{
TimeRange: backend.TimeRange{
From: time.Unix(1, 0).UTC(),
To: time.Unix(4, 0).UTC(),
},
JSON: b,
}
tctx, err := setup()
require.NoError(t, err)
res, err := execute(tctx, query, result)
require.NoError(t, err)
require.Equal(t, "{app=\"Application\"}", res[0].Name)
require.True(t, math.IsNaN(res[0].Fields[1].At(0).(float64)))
})
t.Run("vector response should be parsed normally", func(t *testing.T) {
qr := queryResult{
Type: p.ValVector,
Result: p.Vector{
&p.Sample{
Metric: p.Metric{"app": "Application", "tag2": "tag2"},
Value: 1,
Timestamp: 123,
},
},
}
qm := models.QueryModel{
UtcOffsetSec: 0,
PrometheusQueryProperties: models.PrometheusQueryProperties{
Instant: true,
LegendFormat: "legend {{app}}",
},
}
b, err := json.Marshal(&qm)
require.NoError(t, err)
query := backend.DataQuery{
JSON: b,
}
tctx, err := setup()
require.NoError(t, err)
res, err := execute(tctx, query, qr)
require.NoError(t, err)
require.Len(t, res, 1)
require.Len(t, res[0].Fields, 2)
require.Len(t, res[0].Fields[0].Labels, 0)
require.Equal(t, res[0].Fields[0].Name, "Time")
require.Equal(t, res[0].Fields[0].Name, "Time")
require.Len(t, res[0].Fields[1].Labels, 2)
require.Equal(t, res[0].Fields[1].Labels.String(), "app=Application, tag2=tag2")
require.Equal(t, "legend Application", res[0].Name)
// Ensure the timestamps are UTC zoned
testValue := res[0].Fields[0].At(0)
require.Equal(t, "UTC", testValue.(time.Time).Location().String())
require.Equal(t, int64(123), testValue.(time.Time).UnixMilli())
})
t.Run("scalar response should be parsed normally", func(t *testing.T) {
t.Skip("TODO: implement scalar responses")
qr := queryResult{
Type: p.ValScalar,
Result: &p.Scalar{
Value: 1,
Timestamp: 123,
},
}
qm := models.QueryModel{
UtcOffsetSec: 0,
PrometheusQueryProperties: models.PrometheusQueryProperties{
Instant: true,
LegendFormat: "",
},
}
b, err := json.Marshal(&qm)
require.NoError(t, err)
query := backend.DataQuery{
JSON: b,
}
tctx, err := setup()
require.NoError(t, err)
res, err := execute(tctx, query, qr)
require.NoError(t, err)
require.Len(t, res, 1)
require.Len(t, res[0].Fields, 2)
require.Len(t, res[0].Fields[0].Labels, 0)
require.Equal(t, res[0].Fields[0].Name, "Time")
require.Equal(t, "1", res[0].Fields[1].Name)
// Ensure the timestamps are UTC zoned
testValue := res[0].Fields[0].At(0)
require.Equal(t, "UTC", testValue.(time.Time).Location().String())
require.Equal(t, int64(123), testValue.(time.Time).UnixMilli())
})
}
type queryResult struct {
Type p.ValueType `json:"resultType"`
Result any `json:"result"`
}
func executeWithHeaders(tctx *testContext, query backend.DataQuery, qr any, headers map[string]string) (data.Frames, error) {
req := backend.QueryDataRequest{
Queries: []backend.DataQuery{query},
Headers: headers,
}
promRes, err := toAPIResponse(qr)
defer func() {
if err := promRes.Body.Close(); err != nil {
fmt.Println(fmt.Errorf("response body close error: %v", err))
}
}()
if err != nil {
return nil, err
}
tctx.httpProvider.setResponse(promRes)
res, err := tctx.queryData.Execute(context.Background(), &req)
if err != nil {
return nil, err
}
return res.Responses[req.Queries[0].RefID].Frames, nil
}
func execute(tctx *testContext, query backend.DataQuery, qr any) (data.Frames, error) {
return executeWithHeaders(tctx, query, qr, map[string]string{})
}
type apiResponse struct {
Status string `json:"status"`
Data json.RawMessage `json:"data"`
}
func toAPIResponse(d any) (*http.Response, error) {
b, err := json.Marshal(d)
if err != nil {
return nil, err
}
res := apiResponse{
Status: "success",
Data: json.RawMessage(b),
}
raw, err := json.Marshal(&res)
if err != nil {
return nil, err
}
return &http.Response{
StatusCode: 200,
Body: io.NopCloser(bytes.NewReader(raw)),
}, nil
}
type testContext struct {
httpProvider *fakeHttpClientProvider
queryData *querydata.QueryData
}
func setup() (*testContext, error) {
httpProvider := &fakeHttpClientProvider{
opts: httpclient.Options{
Timeouts: &httpclient.DefaultTimeoutOptions,
},
res: &http.Response{
StatusCode: 200,
Body: io.NopCloser(bytes.NewReader([]byte(`{}`))),
},
}
settings := backend.DataSourceInstanceSettings{
URL: "http://localhost:9090",
JSONData: json.RawMessage(`{"timeInterval": "15s"}`),
}
opts, err := client.CreateTransportOptions(context.Background(), settings, log.New())
if err != nil {
return nil, err
}
httpClient, err := httpProvider.New(*opts)
if err != nil {
return nil, err
}
queryData, _ := querydata.New(httpClient, settings, log.New())
return &testContext{
httpProvider: httpProvider,
queryData: queryData,
}, nil
}
type fakeHttpClientProvider struct {
httpclient.Provider
opts httpclient.Options
req *http.Request
res *http.Response
}
func (p *fakeHttpClientProvider) New(opts ...httpclient.Options) (*http.Client, error) {
p.opts = opts[0]
c, err := httpclient.New(opts[0])
if err != nil {
return nil, err
}
c.Transport = p
return c, nil
}
func (p *fakeHttpClientProvider) GetTransport(opts ...httpclient.Options) (http.RoundTripper, error) {
p.opts = opts[0]
return http.DefaultTransport, nil
}
func (p *fakeHttpClientProvider) setResponse(res *http.Response) {
p.res = res
}
func (p *fakeHttpClientProvider) RoundTrip(req *http.Request) (*http.Response, error) {
p.req = req
return p.res, nil
}

View File

@@ -0,0 +1,201 @@
package querydata
import (
"context"
"fmt"
"net/http"
"sort"
"strings"
"time"
"github.com/grafana/grafana-plugin-sdk-go/backend"
"github.com/grafana/grafana-plugin-sdk-go/data"
jsoniter "github.com/json-iterator/go"
"github.com/grafana/grafana/pkg/promlib/converter"
"github.com/grafana/grafana/pkg/promlib/models"
"github.com/grafana/grafana/pkg/promlib/querydata/exemplar"
"github.com/grafana/grafana/pkg/promlib/utils"
)
func (s *QueryData) parseResponse(ctx context.Context, q *models.Query, res *http.Response, enablePrometheusDataplaneFlag bool) backend.DataResponse {
defer func() {
if err := res.Body.Close(); err != nil {
s.log.FromContext(ctx).Error("Failed to close response body", "err", err)
}
}()
ctx, endSpan := utils.StartTrace(ctx, s.tracer, "datasource.prometheus.parseResponse")
defer endSpan()
iter := jsoniter.Parse(jsoniter.ConfigDefault, res.Body, 1024)
r := converter.ReadPrometheusStyleResult(iter, converter.Options{
Dataplane: enablePrometheusDataplaneFlag,
})
r.Status = backend.Status(res.StatusCode)
// Add frame to attach metadata
if len(r.Frames) == 0 && !q.ExemplarQuery {
r.Frames = append(r.Frames, data.NewFrame(""))
}
// The ExecutedQueryString can be viewed in QueryInspector in UI
for i, frame := range r.Frames {
addMetadataToMultiFrame(q, frame, enablePrometheusDataplaneFlag)
if i == 0 {
frame.Meta.ExecutedQueryString = executedQueryString(q)
}
}
if r.Error == nil {
r = s.processExemplars(ctx, q, r)
}
return r
}
func (s *QueryData) processExemplars(ctx context.Context, q *models.Query, dr backend.DataResponse) backend.DataResponse {
_, endSpan := utils.StartTrace(ctx, s.tracer, "datasource.prometheus.processExemplars")
defer endSpan()
sampler := s.exemplarSampler()
labelTracker := exemplar.NewLabelTracker()
// we are moving from a multi-frame response returned
// by the converter to a single exemplar frame,
// so we need to build a new frame array with the
// old exemplar frames filtered out
framer := exemplar.NewFramer(sampler, labelTracker)
for _, frame := range dr.Frames {
// we don't need to process non-exemplar frames
// so they can be added to the response
if !isExemplarFrame(frame) {
framer.AddFrame(frame)
continue
}
// copy the current exemplar frame metadata
framer.SetMeta(frame.Meta)
framer.SetRefID(frame.RefID)
step := time.Duration(frame.Fields[0].Config.Interval) * time.Millisecond
sampler.SetStep(step)
seriesLabels := getSeriesLabels(frame)
labelTracker.Add(seriesLabels)
labelTracker.AddFields(frame.Fields[2:])
for rowIdx := 0; rowIdx < frame.Fields[0].Len(); rowIdx++ {
ts := frame.CopyAt(0, rowIdx).(time.Time)
val := frame.CopyAt(1, rowIdx).(float64)
ex := models.Exemplar{
RowIdx: rowIdx,
Fields: frame.Fields[2:],
Value: val,
Timestamp: ts,
SeriesLabels: seriesLabels,
}
sampler.Add(ex)
}
}
frames, err := framer.Frames()
return backend.DataResponse{
Frames: frames,
Error: err,
}
}
func addMetadataToMultiFrame(q *models.Query, frame *data.Frame, enableDataplane bool) {
if frame.Meta == nil {
frame.Meta = &data.FrameMeta{}
}
if len(frame.Fields) < 2 {
return
}
frame.Fields[0].Config = &data.FieldConfig{Interval: float64(q.Step.Milliseconds())}
customName := getName(q, frame.Fields[1])
if customName != "" {
frame.Fields[1].Config = &data.FieldConfig{DisplayNameFromDS: customName}
}
if enableDataplane {
valueField := frame.Fields[1]
if n, ok := valueField.Labels["__name__"]; ok {
valueField.Name = n
}
} else {
frame.Name = customName
}
}
// this is based on the logic from the String() function in github.com/prometheus/common/model.go
func metricNameFromLabels(f *data.Field) string {
labels := f.Labels
metricName, hasName := labels["__name__"]
numLabels := len(labels) - 1
if !hasName {
numLabels = len(labels)
}
labelStrings := make([]string, 0, numLabels)
for label, value := range labels {
if label != "__name__" {
labelStrings = append(labelStrings, fmt.Sprintf("%s=%q", label, value))
}
}
switch numLabels {
case 0:
if hasName {
return metricName
}
return "{}"
default:
sort.Strings(labelStrings)
return fmt.Sprintf("%s{%s}", metricName, strings.Join(labelStrings, ", "))
}
}
func executedQueryString(q *models.Query) string {
return "Expr: " + q.Expr + "\n" + "Step: " + q.Step.String()
}
func getName(q *models.Query, field *data.Field) string {
labels := field.Labels
legend := metricNameFromLabels(field)
if q.LegendFormat == legendFormatAuto {
if len(labels) > 0 {
legend = ""
}
} else if q.LegendFormat != "" {
result := legendFormatRegexp.ReplaceAllFunc([]byte(q.LegendFormat), func(in []byte) []byte {
labelName := strings.Replace(string(in), "{{", "", 1)
labelName = strings.Replace(labelName, "}}", "", 1)
labelName = strings.TrimSpace(labelName)
if val, exists := labels[labelName]; exists {
return []byte(val)
}
return []byte{}
})
legend = string(result)
}
// If legend is empty brackets, use query expression
if legend == "{}" {
return q.Expr
}
return legend
}
func isExemplarFrame(frame *data.Frame) bool {
rt := models.ResultTypeFromFrame(frame)
return rt == models.ResultTypeExemplar
}
func getSeriesLabels(frame *data.Frame) data.Labels {
// series labels are stored on the value field (index 1)
return frame.Fields[1].Labels.Copy()
}

View File

@@ -0,0 +1,58 @@
package querydata
import (
"bytes"
"context"
"io"
"net/http"
"testing"
"github.com/stretchr/testify/assert"
"github.com/grafana/grafana/pkg/promlib/models"
"github.com/grafana/grafana/pkg/promlib/querydata/exemplar"
)
func TestQueryData_parseResponse(t *testing.T) {
qd := QueryData{exemplarSampler: exemplar.NewStandardDeviationSampler}
t.Run("resultType is before result the field must parsed normally", func(t *testing.T) {
resBody := `{"data":{"resultType":"vector", "result":[{"metric":{"__name__":"some_name","environment":"some_env","id":"some_id","instance":"some_instance:1234","job":"some_job","name":"another_name","region":"some_region"},"value":[1.1,"2"]}]},"status":"success"}`
res := &http.Response{Body: io.NopCloser(bytes.NewBufferString(resBody))}
result := qd.parseResponse(context.Background(), &models.Query{}, res, false)
assert.Nil(t, result.Error)
assert.Len(t, result.Frames, 1)
})
t.Run("resultType is after the result field must parsed normally", func(t *testing.T) {
resBody := `{"data":{"result":[{"metric":{"__name__":"some_name","environment":"some_env","id":"some_id","instance":"some_instance:1234","job":"some_job","name":"another_name","region":"some_region"},"value":[1.1,"2"]}],"resultType":"vector"},"status":"success"}`
res := &http.Response{Body: io.NopCloser(bytes.NewBufferString(resBody))}
result := qd.parseResponse(context.Background(), &models.Query{}, res, false)
assert.Nil(t, result.Error)
assert.Len(t, result.Frames, 1)
})
t.Run("no resultType is existed in the data", func(t *testing.T) {
resBody := `{"data":{"result":[{"metric":{"__name__":"some_name","environment":"some_env","id":"some_id","instance":"some_instance:1234","job":"some_job","name":"another_name","region":"some_region"},"value":[1.1,"2"]}]},"status":"success"}`
res := &http.Response{Body: io.NopCloser(bytes.NewBufferString(resBody))}
result := qd.parseResponse(context.Background(), &models.Query{}, res, false)
assert.Error(t, result.Error)
assert.Equal(t, result.Error.Error(), "no resultType found")
})
t.Run("resultType is set as empty string before result", func(t *testing.T) {
resBody := `{"data":{"resultType":"", "result":[{"metric":{"__name__":"some_name","environment":"some_env","id":"some_id","instance":"some_instance:1234","job":"some_job","name":"another_name","region":"some_region"},"value":[1.1,"2"]}]},"status":"success"}`
res := &http.Response{Body: io.NopCloser(bytes.NewBufferString(resBody))}
result := qd.parseResponse(context.Background(), &models.Query{}, res, false)
assert.Error(t, result.Error)
assert.Equal(t, result.Error.Error(), "unknown result type: ")
})
t.Run("resultType is set as empty string after result", func(t *testing.T) {
resBody := `{"data":{"result":[{"metric":{"__name__":"some_name","environment":"some_env","id":"some_id","instance":"some_instance:1234","job":"some_job","name":"another_name","region":"some_region"},"value":[1.1,"2"]}],"resultType":""},"status":"success"}`
res := &http.Response{Body: io.NopCloser(bytes.NewBufferString(resBody))}
result := qd.parseResponse(context.Background(), &models.Query{}, res, false)
assert.Error(t, result.Error)
assert.Equal(t, result.Error.Error(), "unknown result type: ")
})
}

View File

@@ -0,0 +1,86 @@
package resource
import (
"bytes"
"context"
"fmt"
"net/http"
"github.com/grafana/grafana-plugin-sdk-go/backend"
"github.com/grafana/grafana-plugin-sdk-go/backend/log"
"github.com/grafana/grafana-plugin-sdk-go/data/utils/maputil"
"github.com/grafana/grafana/pkg/promlib/client"
"github.com/grafana/grafana/pkg/promlib/utils"
)
type Resource struct {
promClient *client.Client
log log.Logger
}
func New(
httpClient *http.Client,
settings backend.DataSourceInstanceSettings,
plog log.Logger,
) (*Resource, error) {
jsonData, err := utils.GetJsonData(settings)
if err != nil {
return nil, err
}
httpMethod, _ := maputil.GetStringOptional(jsonData, "httpMethod")
if httpMethod == "" {
httpMethod = http.MethodPost
}
return &Resource{
log: plog,
promClient: client.NewClient(httpClient, httpMethod, settings.URL),
}, nil
}
func (r *Resource) Execute(ctx context.Context, req *backend.CallResourceRequest) (*backend.CallResourceResponse, error) {
r.log.FromContext(ctx).Debug("Sending resource query", "URL", req.URL)
resp, err := r.promClient.QueryResource(ctx, req)
if err != nil {
return nil, fmt.Errorf("error querying resource: %v", err)
}
// frontend sets the X-Grafana-Cache with the desired response cache control value
if len(req.GetHTTPHeaders().Get("X-Grafana-Cache")) > 0 {
resp.Header.Set("X-Grafana-Cache", "y")
resp.Header.Set("Cache-Control", req.GetHTTPHeaders().Get("X-Grafana-Cache"))
}
defer func() {
tmpErr := resp.Body.Close()
if tmpErr != nil && err == nil {
err = tmpErr
}
}()
var buf bytes.Buffer
// Should be more efficient than ReadAll. See https://github.com/prometheus/client_golang/pull/976
_, err = buf.ReadFrom(resp.Body)
body := buf.Bytes()
if err != nil {
return nil, err
}
callResponse := &backend.CallResourceResponse{
Status: resp.StatusCode,
Headers: resp.Header,
Body: body,
}
return callResponse, err
}
func (r *Resource) DetectVersion(ctx context.Context, req *backend.CallResourceRequest) (*backend.CallResourceResponse, error) {
newReq := &backend.CallResourceRequest{
PluginContext: req.PluginContext,
Path: "/api/v1/status/buildinfo",
}
return r.Execute(ctx, newReq)
}

View File

@@ -0,0 +1,8 @@
{
"RefId": "A",
"ExemplarQuery": true,
"Start": 1654086510,
"End": 1654086810,
"Step": 15,
"Expr": "histogram_quantile(0.99, sum(rate(traces_spanmetrics_duration_seconds_bucket[15s])) by (le))"
}

File diff suppressed because it is too large Load Diff

File diff suppressed because one or more lines are too long

View File

@@ -0,0 +1,9 @@
{
"RefId": "A",
"RangeQuery": true,
"Start": 1664376185,
"End": 1664376485,
"Step": 1,
"LegendFormat": "__auto",
"Expr": "histogram_quantile(0.95, sum(rate(tns_request_duration_seconds_bucket[$__rate_interval])) by (le))"
}

View File

@@ -0,0 +1,688 @@
// 🌟 This was machine generated. Do not edit. 🌟
//
// Frame[0] {
// "type": "timeseries-multi",
// "typeVersion": [
// 0,
// 0
// ],
// "custom": {
// "resultType": "matrix"
// },
// "executedQueryString": "Expr: histogram_quantile(0.95, sum(rate(tns_request_duration_seconds_bucket[4s])) by (le))\nStep: 1s"
// }
// Name: histogram_quantile(0.95, sum(rate(tns_request_duration_seconds_bucket[4s])) by (le))
// Dimensions: 2 Fields by 301 Rows
// +-----------------------------------+----------------------+
// | Name: Time | Name: Value |
// | Labels: | Labels: |
// | Type: []time.Time | Type: []float64 |
// +-----------------------------------+----------------------+
// | 2022-09-28 14:43:05.491 +0000 UTC | 0.004754464285714286 |
// | 2022-09-28 14:43:06.491 +0000 UTC | 0.004754464285714286 |
// | 2022-09-28 14:43:07.491 +0000 UTC | 0.004754464285714286 |
// | 2022-09-28 14:43:08.491 +0000 UTC | 0.004754464285714286 |
// | 2022-09-28 14:43:09.491 +0000 UTC | 0.004754481132075472 |
// | 2022-09-28 14:43:10.491 +0000 UTC | 0.004754481132075472 |
// | 2022-09-28 14:43:11.491 +0000 UTC | 0.004754481132075472 |
// | 2022-09-28 14:43:12.491 +0000 UTC | 0.004754481132075472 |
// | 2022-09-28 14:43:13.491 +0000 UTC | 0.004754481132075472 |
// | ... | ... |
// +-----------------------------------+----------------------+
//
//
// 🌟 This was machine generated. Do not edit. 🌟
{
"status": 200,
"frames": [
{
"schema": {
"name": "histogram_quantile(0.95, sum(rate(tns_request_duration_seconds_bucket[4s])) by (le))",
"meta": {
"type": "timeseries-multi",
"typeVersion": [
0,
0
],
"custom": {
"resultType": "matrix"
},
"executedQueryString": "Expr: histogram_quantile(0.95, sum(rate(tns_request_duration_seconds_bucket[4s])) by (le))\nStep: 1s"
},
"fields": [
{
"name": "Time",
"type": "time",
"typeInfo": {
"frame": "time.Time"
},
"config": {
"interval": 1000
}
},
{
"name": "Value",
"type": "number",
"typeInfo": {
"frame": "float64"
},
"labels": {},
"config": {
"displayNameFromDS": "histogram_quantile(0.95, sum(rate(tns_request_duration_seconds_bucket[4s])) by (le))"
}
}
]
},
"data": {
"values": [
[
1664376185491,
1664376186491,
1664376187491,
1664376188491,
1664376189491,
1664376190491,
1664376191491,
1664376192491,
1664376193491,
1664376194491,
1664376195491,
1664376196491,
1664376197491,
1664376198491,
1664376199491,
1664376200491,
1664376201491,
1664376202491,
1664376203491,
1664376204491,
1664376205491,
1664376206491,
1664376207491,
1664376208491,
1664376209491,
1664376210491,
1664376211491,
1664376212491,
1664376213491,
1664376214491,
1664376215491,
1664376216491,
1664376217491,
1664376218491,
1664376219491,
1664376220491,
1664376221491,
1664376222491,
1664376223491,
1664376224491,
1664376225491,
1664376226491,
1664376227491,
1664376228491,
1664376229491,
1664376230491,
1664376231491,
1664376232491,
1664376233491,
1664376234491,
1664376235491,
1664376236491,
1664376237491,
1664376238491,
1664376239491,
1664376240491,
1664376241491,
1664376242491,
1664376243491,
1664376244491,
1664376245491,
1664376246491,
1664376247491,
1664376248491,
1664376249491,
1664376250491,
1664376251491,
1664376252491,
1664376253491,
1664376254491,
1664376255491,
1664376256491,
1664376257491,
1664376258491,
1664376259491,
1664376260491,
1664376261491,
1664376262491,
1664376263491,
1664376264491,
1664376265491,
1664376266491,
1664376267491,
1664376268491,
1664376269491,
1664376270491,
1664376271491,
1664376272491,
1664376273491,
1664376274491,
1664376275491,
1664376276491,
1664376277491,
1664376278491,
1664376279491,
1664376280491,
1664376281491,
1664376282491,
1664376283491,
1664376284491,
1664376285491,
1664376286491,
1664376287491,
1664376288491,
1664376289491,
1664376290491,
1664376291491,
1664376292491,
1664376293491,
1664376294491,
1664376295491,
1664376296491,
1664376297491,
1664376298491,
1664376299491,
1664376300491,
1664376301491,
1664376302491,
1664376303491,
1664376304491,
1664376305491,
1664376306491,
1664376307491,
1664376308491,
1664376309491,
1664376310491,
1664376311491,
1664376312491,
1664376313491,
1664376314491,
1664376315491,
1664376316491,
1664376317491,
1664376318491,
1664376319491,
1664376320491,
1664376321491,
1664376322491,
1664376323491,
1664376324491,
1664376325491,
1664376326491,
1664376327491,
1664376328491,
1664376329491,
1664376330491,
1664376331491,
1664376332491,
1664376333491,
1664376334491,
1664376335491,
1664376336491,
1664376337491,
1664376338491,
1664376339491,
1664376340491,
1664376341491,
1664376342491,
1664376343491,
1664376344491,
1664376345491,
1664376346491,
1664376347491,
1664376348491,
1664376349491,
1664376350491,
1664376351491,
1664376352491,
1664376353491,
1664376354491,
1664376355491,
1664376356491,
1664376357491,
1664376358491,
1664376359491,
1664376360491,
1664376361491,
1664376362491,
1664376363491,
1664376364491,
1664376365491,
1664376366491,
1664376367491,
1664376368491,
1664376369491,
1664376370491,
1664376371491,
1664376372491,
1664376373491,
1664376374491,
1664376375491,
1664376376491,
1664376377491,
1664376378491,
1664376379491,
1664376380491,
1664376381491,
1664376382491,
1664376383491,
1664376384491,
1664376385491,
1664376386491,
1664376387491,
1664376388491,
1664376389491,
1664376390491,
1664376391491,
1664376392491,
1664376393491,
1664376394491,
1664376395491,
1664376396491,
1664376397491,
1664376398491,
1664376399491,
1664376400491,
1664376401491,
1664376402491,
1664376403491,
1664376404491,
1664376405491,
1664376406491,
1664376407491,
1664376408491,
1664376409491,
1664376410491,
1664376411491,
1664376412491,
1664376413491,
1664376414491,
1664376415491,
1664376416491,
1664376417491,
1664376418491,
1664376419491,
1664376420491,
1664376421491,
1664376422491,
1664376423491,
1664376424491,
1664376425491,
1664376426491,
1664376427491,
1664376428491,
1664376429491,
1664376430491,
1664376431491,
1664376432491,
1664376433491,
1664376434491,
1664376435491,
1664376436491,
1664376437491,
1664376438491,
1664376439491,
1664376440491,
1664376441491,
1664376442491,
1664376443491,
1664376444491,
1664376445491,
1664376446491,
1664376447491,
1664376448491,
1664376449491,
1664376450491,
1664376451491,
1664376452491,
1664376453491,
1664376454491,
1664376455491,
1664376456491,
1664376457491,
1664376458491,
1664376459491,
1664376460491,
1664376461491,
1664376462491,
1664376463491,
1664376464491,
1664376465491,
1664376466491,
1664376467491,
1664376468491,
1664376469491,
1664376470491,
1664376471491,
1664376472491,
1664376473491,
1664376474491,
1664376475491,
1664376476491,
1664376477491,
1664376478491,
1664376479491,
1664376480491,
1664376481491,
1664376482491,
1664376483491,
1664376484491,
1664376485491
],
[
0.004754464285714286,
0.004754464285714286,
0.004754464285714286,
0.004754464285714286,
0.004754481132075472,
0.004754481132075472,
0.004754481132075472,
0.004754481132075472,
0.004754481132075472,
0.004754481132075472,
0.004754532442748091,
0.004754532442748091,
0.004754532442748091,
0.004754532442748091,
0.004754532442748091,
0.004754532442748091,
0.004754532442748091,
0.004754532442748091,
0.004754532442748091,
0.004754620622568094,
0.004754620622568094,
0.004754620622568094,
0.004754620622568094,
0.004754620622568094,
0.004754620622568094,
0.00475462962962963,
0.00475462962962963,
0.00475462962962963,
0.00475462962962963,
0.00475462962962963,
0.00475462962962963,
0.00475462962962963,
0.00475462962962963,
0.00475462962962963,
0.004754625121713728,
0.004754625121713728,
0.004754625121713728,
0.004754625121713728,
0.004754625121713728,
0.004754625121713728,
0.004754638671874999,
0.004754638671874999,
0.004754638671874999,
0.004754638671874999,
0.004754638671874999,
0.004754638671874999,
0.004754638671874999,
0.004754638671874999,
0.004754638671874999,
0.00475,
0.00475,
0.00475,
0.00475,
0.00475,
0.00475,
0.00475,
0.00475,
0.00475,
0.00475,
0.00475,
0.00475,
0.00475,
0.00475,
0.00475,
0.00475,
0.00475,
0.00475,
0.00475,
0.00475,
0.00475,
0.00475,
0.00475,
0.00475,
0.00475,
0.00475,
0.00475,
0.00475,
0.00475,
0.00475,
0.004750000000000001,
0.004750000000000001,
0.004750000000000001,
0.004750000000000001,
0.004750000000000001,
0.004750000000000001,
0.00475,
0.00475,
0.00475,
0.00475,
0.00475,
0.00475,
0.00475,
0.00475,
0.00475,
0.00475,
0.00475,
0.00475,
0.00475,
0.00475,
0.00475,
0.00475,
0.00475,
0.00475,
0.00475,
0.00475,
0.00475,
0.00475,
0.00475,
0.00475,
0.004750000000000001,
0.004750000000000001,
0.004750000000000001,
0.004750000000000001,
0.004750000000000001,
0.004750000000000001,
0.00475,
0.00475,
0.00475,
0.00475,
0.00475,
0.00475,
0.00475,
0.00475,
0.00475,
0.07309523809523814,
0.07309523809523814,
0.07309523809523814,
0.07309523809523814,
0.07309523809523814,
0.07309523809523814,
0.09168949771689497,
0.09168949771689497,
0.09168949771689497,
0.09168949771689497,
0.09168949771689497,
0.09168949771689497,
0.09168949771689497,
0.09168949771689497,
0.09168949771689497,
0.09621014492753623,
0.09621014492753623,
0.09621014492753623,
0.09621014492753623,
0.09621014492753623,
0.09621014492753623,
0.09886509635974303,
0.09886509635974303,
0.09886509635974303,
0.09886509635974303,
0.09886509635974303,
0.09886509635974303,
0.09886509635974303,
0.09886509635974303,
0.09886509635974303,
0.09992233009708738,
0.09992233009708738,
0.09992233009708738,
0.09992233009708738,
0.09992233009708738,
0.09992233009708738,
0.09990847784200386,
0.09990847784200386,
0.09990847784200386,
0.09990847784200386,
0.09990847784200386,
0.09990847784200386,
0.09990847784200386,
0.09990847784200386,
0.09990847784200386,
0.09897701149425286,
0.09897701149425286,
0.09897701149425286,
0.09897701149425286,
0.09897701149425286,
0.09897701149425286,
0.09700833333333335,
0.09700833333333335,
0.09700833333333335,
0.09700833333333335,
0.09700833333333335,
0.09700833333333335,
0.09700833333333335,
0.09700833333333335,
0.09700833333333335,
0.09188218390804595,
0.09188218390804595,
0.09188218390804595,
0.09188218390804595,
0.09188218390804595,
0.09188218390804595,
0.05788461538461537,
0.05788461538461537,
0.05788461538461537,
0.05788461538461537,
0.05788461538461537,
0.05788461538461537,
0.05788461538461537,
0.05788461538461537,
0.05788461538461537,
0.004767840375586855,
0.004767840375586855,
0.004767840375586855,
0.004767840375586855,
0.004767840375586855,
0.004767840375586855,
0.004750000000000001,
0.004750000000000001,
0.004750000000000001,
0.004750000000000001,
0.004750000000000001,
0.004750000000000001,
0.004750000000000001,
0.004750000000000001,
0.004750000000000001,
0.00475,
0.00475,
0.00475,
0.00475,
0.00475,
0.00475,
0.00475,
0.00475,
0.00475,
0.00475,
0.00475,
0.00475,
0.00475,
0.00475,
0.00475,
0.00475,
0.00475,
0.00475,
0.00475,
0.00475,
0.00475,
0.00475,
0.00475,
0.00475,
0.00475,
0.00475,
0.00475,
0.00475,
0.00475,
0.00475,
0.00475,
0.00475,
0.00475,
0.00475,
0.00475,
0.00475,
0.00475,
0.00475,
0.00475,
0.00475,
0.00475,
0.00475,
0.00475,
0.00475,
0.00475,
0.00475,
0.00475,
0.00475,
0.00475,
0.00475,
0.00475,
0.00475,
0.00475,
0.00475,
0.00475,
0.00475,
0.00475,
0.00475,
0.00475,
0.00475,
0.004750000000000001,
0.004750000000000001,
0.004750000000000001,
0.004750000000000001,
0.004750000000000001,
0.004750000000000001,
0.004749999999999999,
0.004749999999999999,
0.004749999999999999,
0.004749999999999999,
0.004749999999999999,
0.004749999999999999,
0.004749999999999999,
0.004749999999999999,
0.004749999999999999,
0.00475,
0.00475,
0.00475,
0.00475,
0.00475,
0.00475,
0.00475,
0.00475,
0.00475,
0.00475,
0.00475,
0.00475
]
]
}
}
]
}

File diff suppressed because one or more lines are too long

View File

@@ -0,0 +1,8 @@
{
"RefId": "A",
"RangeQuery": true,
"Start": 1641889530,
"End": 1641889532,
"Step": 1,
"Expr": "1 / 0"
}

View File

@@ -0,0 +1,95 @@
// 🌟 This was machine generated. Do not edit. 🌟
//
// Frame[0] {
// "type": "timeseries-multi",
// "typeVersion": [
// 0,
// 0
// ],
// "custom": {
// "resultType": "matrix"
// },
// "executedQueryString": "Expr: 1 / 0\nStep: 1s"
// }
// Name: 1 / 0
// Dimensions: 2 Fields by 3 Rows
// +-------------------------------+-----------------+
// | Name: Time | Name: Value |
// | Labels: | Labels: |
// | Type: []time.Time | Type: []float64 |
// +-------------------------------+-----------------+
// | 2022-01-11 08:25:30 +0000 UTC | +Inf |
// | 2022-01-11 08:25:31 +0000 UTC | +Inf |
// | 2022-01-11 08:25:32 +0000 UTC | +Inf |
// +-------------------------------+-----------------+
//
//
// 🌟 This was machine generated. Do not edit. 🌟
{
"status": 200,
"frames": [
{
"schema": {
"name": "1 / 0",
"meta": {
"type": "timeseries-multi",
"typeVersion": [
0,
0
],
"custom": {
"resultType": "matrix"
},
"executedQueryString": "Expr: 1 / 0\nStep: 1s"
},
"fields": [
{
"name": "Time",
"type": "time",
"typeInfo": {
"frame": "time.Time"
},
"config": {
"interval": 1000
}
},
{
"name": "Value",
"type": "number",
"typeInfo": {
"frame": "float64"
},
"labels": {},
"config": {
"displayNameFromDS": "1 / 0"
}
}
]
},
"data": {
"values": [
[
1641889530000,
1641889531000,
1641889532000
],
[
null,
null,
null
]
],
"entities": [
null,
{
"Inf": [
0,
1,
2
]
}
]
}
}
]
}

View File

@@ -0,0 +1,16 @@
{
"status": "success",
"data": {
"resultType": "matrix",
"result": [
{
"metric": {},
"values": [
[1641889530, "+Inf"],
[1641889531, "+Inf"],
[1641889532, "+Inf"]
]
}
]
}
}

View File

@@ -0,0 +1,8 @@
{
"RefId": "A",
"RangeQuery": true,
"Start": 1641889530,
"End": 1641889538,
"Step": 1,
"Expr": "test1"
}

View File

@@ -0,0 +1,88 @@
// 🌟 This was machine generated. Do not edit. 🌟
//
// Frame[0] {
// "type": "timeseries-multi",
// "typeVersion": [
// 0,
// 0
// ],
// "custom": {
// "resultType": "matrix"
// },
// "executedQueryString": "Expr: test1\nStep: 1s"
// }
// Name: go_goroutines{job="prometheus"}
// Dimensions: 2 Fields by 3 Rows
// +-------------------------------+------------------------------------------------+
// | Name: Time | Name: Value |
// | Labels: | Labels: __name__=go_goroutines, job=prometheus |
// | Type: []time.Time | Type: []float64 |
// +-------------------------------+------------------------------------------------+
// | 2022-01-11 08:25:33 +0000 UTC | 21 |
// | 2022-01-11 08:25:34 +0000 UTC | 32 |
// | 2022-01-11 08:25:37 +0000 UTC | 43 |
// +-------------------------------+------------------------------------------------+
//
//
// 🌟 This was machine generated. Do not edit. 🌟
{
"status": 200,
"frames": [
{
"schema": {
"name": "go_goroutines{job=\"prometheus\"}",
"meta": {
"type": "timeseries-multi",
"typeVersion": [
0,
0
],
"custom": {
"resultType": "matrix"
},
"executedQueryString": "Expr: test1\nStep: 1s"
},
"fields": [
{
"name": "Time",
"type": "time",
"typeInfo": {
"frame": "time.Time"
},
"config": {
"interval": 1000
}
},
{
"name": "Value",
"type": "number",
"typeInfo": {
"frame": "float64"
},
"labels": {
"__name__": "go_goroutines",
"job": "prometheus"
},
"config": {
"displayNameFromDS": "go_goroutines{job=\"prometheus\"}"
}
}
]
},
"data": {
"values": [
[
1641889533000,
1641889534000,
1641889537000
],
[
21,
32,
43
]
]
}
}
]
}

View File

@@ -0,0 +1,16 @@
{
"status": "success",
"data": {
"resultType": "matrix",
"result": [
{
"metric": { "__name__": "go_goroutines", "job": "prometheus" },
"values": [
[1641889533, "21"],
[1641889534, "32"],
[1641889537, "43"]
]
}
]
}
}

View File

@@ -0,0 +1,7 @@
{
"RefId": "A",
"RangeQuery": true,
"Start": 1641889530,
"End": 1641889532,
"Step": 1
}

View File

@@ -0,0 +1,98 @@
// 🌟 This was machine generated. Do not edit. 🌟
//
// Frame[0] {
// "type": "timeseries-multi",
// "typeVersion": [
// 0,
// 0
// ],
// "custom": {
// "resultType": "matrix"
// },
// "executedQueryString": "Expr: \nStep: 1s"
// }
// Name: {handler="/api/v1/query_range", job="prometheus"}
// Dimensions: 2 Fields by 3 Rows
// +-------------------------------+-----------------------------------------------------+
// | Name: Time | Name: Value |
// | Labels: | Labels: handler=/api/v1/query_range, job=prometheus |
// | Type: []time.Time | Type: []float64 |
// +-------------------------------+-----------------------------------------------------+
// | 2022-01-11 08:25:30 +0000 UTC | NaN |
// | 2022-01-11 08:25:31 +0000 UTC | NaN |
// | 2022-01-11 08:25:32 +0000 UTC | NaN |
// +-------------------------------+-----------------------------------------------------+
//
//
// 🌟 This was machine generated. Do not edit. 🌟
{
"status": 200,
"frames": [
{
"schema": {
"name": "{handler=\"/api/v1/query_range\", job=\"prometheus\"}",
"meta": {
"type": "timeseries-multi",
"typeVersion": [
0,
0
],
"custom": {
"resultType": "matrix"
},
"executedQueryString": "Expr: \nStep: 1s"
},
"fields": [
{
"name": "Time",
"type": "time",
"typeInfo": {
"frame": "time.Time"
},
"config": {
"interval": 1000
}
},
{
"name": "Value",
"type": "number",
"typeInfo": {
"frame": "float64"
},
"labels": {
"handler": "/api/v1/query_range",
"job": "prometheus"
},
"config": {
"displayNameFromDS": "{handler=\"/api/v1/query_range\", job=\"prometheus\"}"
}
}
]
},
"data": {
"values": [
[
1641889530000,
1641889531000,
1641889532000
],
[
null,
null,
null
]
],
"entities": [
null,
{
"NaN": [
0,
1,
2
]
}
]
}
}
]
}

View File

@@ -0,0 +1,16 @@
{
"status": "success",
"data": {
"resultType": "matrix",
"result": [
{
"metric": { "handler": "/api/v1/query_range", "job": "prometheus" },
"values": [
[1641889530, "NaN"],
[1641889531, "NaN"],
[1641889532, "NaN"]
]
}
]
}
}

Some files were not shown because too many files have changed in this diff Show More