Backend Plugins: Convert test data source to use SDK contracts (#29916)

Converts the core testdata data source to use the SDK contracts and by that 
implementing a backend plugin in core Grafana in similar manner as an external one.

Co-authored-by: Will Browne <will.browne@grafana.com>
Co-authored-by: Marcus Efraimsson <marefr@users.noreply.github.com>
Co-authored-by: Ryan McKinley <ryantxu@gmail.com>
This commit is contained in:
Marcus Efraimsson
2021-01-29 18:33:23 +01:00
committed by GitHub
parent b838125ef7
commit 043d6cd584
15 changed files with 1456 additions and 895 deletions

View File

@@ -29,7 +29,7 @@ e2e.scenario({
e2e()
.route({
method: 'POST',
url: '/api/tsdb/query',
url: '/api/ds/query',
})
.as('apiPostQuery');

View File

@@ -10,7 +10,7 @@ describe('Variables - Load options from Url', () => {
e2e()
.route({
method: 'POST',
url: '/api/tsdb/query',
url: '/api/ds/query',
})
.as('query');
@@ -63,7 +63,7 @@ describe('Variables - Load options from Url', () => {
e2e()
.route({
method: 'POST',
url: '/api/tsdb/query',
url: '/api/ds/query',
})
.as('query');
@@ -127,7 +127,7 @@ describe('Variables - Load options from Url', () => {
e2e()
.route({
method: 'POST',
url: '/api/tsdb/query',
url: '/api/ds/query',
})
.as('query');

View File

@@ -10,7 +10,7 @@ describe('Variables - Set options from ui', () => {
e2e()
.route({
method: 'POST',
url: '/api/tsdb/query',
url: '/api/ds/query',
})
.as('query');
@@ -68,7 +68,7 @@ describe('Variables - Set options from ui', () => {
e2e()
.route({
method: 'POST',
url: '/api/tsdb/query',
url: '/api/ds/query',
})
.as('query');
@@ -123,7 +123,7 @@ describe('Variables - Set options from ui', () => {
e2e()
.route({
method: 'POST',
url: '/api/tsdb/query',
url: '/api/ds/query',
})
.as('query');

View File

@@ -112,7 +112,7 @@ export class DataSourceWithBackend<
})
.pipe(
map((rsp: any) => {
return toDataQueryResponse(rsp);
return toDataQueryResponse(rsp, queries as DataQuery[]);
}),
catchError((err) => {
return of(toDataQueryResponse(err));

View File

@@ -1,16 +1,25 @@
import { toDataFrameDTO } from '@grafana/data';
import { DataQuery, toDataFrameDTO, DataFrame } from '@grafana/data';
import { toDataQueryResponse } from './queryResponse';
/* eslint-disable */
const resp = {
data: {
results: {
GC: {
A: {
refId: 'A',
series: null,
tables: null,
dataframes: [
'QVJST1cxAACsAQAAEAAAAAAACgAOAAwACwAEAAoAAAAUAAAAAAAAAQMACgAMAAAACAAEAAoAAAAIAAAAUAAAAAIAAAAoAAAABAAAAOD+//8IAAAADAAAAAIAAABHQwAABQAAAHJlZklkAAAAAP///wgAAAAMAAAAAAAAAAAAAAAEAAAAbmFtZQAAAAACAAAAlAAAAAQAAACG////FAAAAGAAAABgAAAAAAADAWAAAAACAAAALAAAAAQAAABQ////CAAAABAAAAAGAAAAbnVtYmVyAAAEAAAAdHlwZQAAAAB0////CAAAAAwAAAAAAAAAAAAAAAQAAABuYW1lAAAAAAAAAABm////AAACAAAAAAAAABIAGAAUABMAEgAMAAAACAAEABIAAAAUAAAAbAAAAHQAAAAAAAoBdAAAAAIAAAA0AAAABAAAANz///8IAAAAEAAAAAQAAAB0aW1lAAAAAAQAAAB0eXBlAAAAAAgADAAIAAQACAAAAAgAAAAQAAAABAAAAFRpbWUAAAAABAAAAG5hbWUAAAAAAAAAAAAABgAIAAYABgAAAAAAAwAEAAAAVGltZQAAAAC8AAAAFAAAAAAAAAAMABYAFAATAAwABAAMAAAA0AAAAAAAAAAUAAAAAAAAAwMACgAYAAwACAAEAAoAAAAUAAAAWAAAAA0AAAAAAAAAAAAAAAQAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAABoAAAAAAAAAGgAAAAAAAAAAAAAAAAAAABoAAAAAAAAAGgAAAAAAAAAAAAAAAIAAAANAAAAAAAAAAAAAAAAAAAADQAAAAAAAAAAAAAAAAAAAAAAAAAAFp00e2XHFQAIo158ZccVAPqoiH1lxxUA7K6yfmXHFQDetNx/ZccVANC6BoFlxxUAwsAwgmXHFQC0xlqDZccVAKbMhIRlxxUAmNKuhWXHFQCK2NiGZccVAHzeAohlxxUAbuQsiWXHFQAAAAAAAAhAAAAAAAAACEAAAAAAAAAIQAAAAAAAABRAAAAAAAAAFEAAAAAAAAAUQAAAAAAAAAhAAAAAAAAACEAAAAAAAAAIQAAAAAAAABRAAAAAAAAAFEAAAAAAAAAUQAAAAAAAAAhAEAAAAAwAFAASAAwACAAEAAwAAAAQAAAALAAAADgAAAAAAAMAAQAAALgBAAAAAAAAwAAAAAAAAADQAAAAAAAAAAAAAAAAAAAAAAAKAAwAAAAIAAQACgAAAAgAAABQAAAAAgAAACgAAAAEAAAA4P7//wgAAAAMAAAAAgAAAEdDAAAFAAAAcmVmSWQAAAAA////CAAAAAwAAAAAAAAAAAAAAAQAAABuYW1lAAAAAAIAAACUAAAABAAAAIb///8UAAAAYAAAAGAAAAAAAAMBYAAAAAIAAAAsAAAABAAAAFD///8IAAAAEAAAAAYAAABudW1iZXIAAAQAAAB0eXBlAAAAAHT///8IAAAADAAAAAAAAAAAAAAABAAAAG5hbWUAAAAAAAAAAGb///8AAAIAAAAAAAAAEgAYABQAEwASAAwAAAAIAAQAEgAAABQAAABsAAAAdAAAAAAACgF0AAAAAgAAADQAAAAEAAAA3P///wgAAAAQAAAABAAAAHRpbWUAAAAABAAAAHR5cGUAAAAACAAMAAgABAAIAAAACAAAABAAAAAEAAAAVGltZQAAAAAEAAAAbmFtZQAAAAAAAAAAAAAGAAgABgAGAAAAAAADAAQAAABUaW1lAAAAANgBAABBUlJPVzE=',
'QVJST1cxAAD/////cAEAABAAAAAAAAoADgAMAAsABAAKAAAAFAAAAAAAAAEDAAoADAAAAAgABAAKAAAACAAAAFAAAAACAAAAKAAAAAQAAAAg////CAAAAAwAAAABAAAAQQAAAAUAAAByZWZJZAAAAED///8IAAAADAAAAAAAAAAAAAAABAAAAG5hbWUAAAAAAgAAAHwAAAAEAAAAnv///xQAAABAAAAAQAAAAAAAAwFAAAAAAQAAAAQAAACM////CAAAABQAAAAIAAAAQS1zZXJpZXMAAAAABAAAAG5hbWUAAAAAAAAAAIb///8AAAIACAAAAEEtc2VyaWVzAAASABgAFAATABIADAAAAAgABAASAAAAFAAAAEQAAABMAAAAAAAKAUwAAAABAAAADAAAAAgADAAIAAQACAAAAAgAAAAQAAAABAAAAHRpbWUAAAAABAAAAG5hbWUAAAAAAAAAAAAABgAIAAYABgAAAAAAAwAEAAAAdGltZQAAAAAAAAAA/////7gAAAAUAAAAAAAAAAwAFgAUABMADAAEAAwAAABgAAAAAAAAABQAAAAAAAADAwAKABgADAAIAAQACgAAABQAAABYAAAABgAAAAAAAAAAAAAABAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAADAAAAAAAAAAMAAAAAAAAAAAAAAAAAAAADAAAAAAAAAAMAAAAAAAAAAAAAAAAgAAAAYAAAAAAAAAAAAAAAAAAAAGAAAAAAAAAAAAAAAAAAAAQMC/OcElXhZAOAEFxCVeFkCwQtDGJV4WQCiEm8klXhZAoMVmzCVeFkAYBzLPJV4WAAAAAAAA8D8AAAAAAAA0QAAAAAAAgFZAAAAAAAAAPkAAAAAAAAAUQAAAAAAAAAAAEAAAAAwAFAASAAwACAAEAAwAAAAQAAAALAAAADgAAAAAAAMAAQAAAIABAAAAAAAAwAAAAAAAAABgAAAAAAAAAAAAAAAAAAAAAAAKAAwAAAAIAAQACgAAAAgAAABQAAAAAgAAACgAAAAEAAAAIP///wgAAAAMAAAAAQAAAEEAAAAFAAAAcmVmSWQAAABA////CAAAAAwAAAAAAAAAAAAAAAQAAABuYW1lAAAAAAIAAAB8AAAABAAAAJ7///8UAAAAQAAAAEAAAAAAAAMBQAAAAAEAAAAEAAAAjP///wgAAAAUAAAACAAAAEEtc2VyaWVzAAAAAAQAAABuYW1lAAAAAAAAAACG////AAACAAgAAABBLXNlcmllcwAAEgAYABQAEwASAAwAAAAIAAQAEgAAABQAAABEAAAATAAAAAAACgFMAAAAAQAAAAwAAAAIAAwACAAEAAgAAAAIAAAAEAAAAAQAAAB0aW1lAAAAAAQAAABuYW1lAAAAAAAAAAAAAAYACAAGAAYAAAAAAAMABAAAAHRpbWUAAAAAmAEAAEFSUk9XMQ==',
],
},
B: {
refId: 'B',
series: null,
tables: null,
dataframes: [
'QVJST1cxAAD/////cAEAABAAAAAAAAoADgAMAAsABAAKAAAAFAAAAAAAAAEDAAoADAAAAAgABAAKAAAACAAAAFAAAAACAAAAKAAAAAQAAAAg////CAAAAAwAAAABAAAAQgAAAAUAAAByZWZJZAAAAED///8IAAAADAAAAAAAAAAAAAAABAAAAG5hbWUAAAAAAgAAAHwAAAAEAAAAnv///xQAAABAAAAAQAAAAAAAAwFAAAAAAQAAAAQAAACM////CAAAABQAAAAIAAAAQi1zZXJpZXMAAAAABAAAAG5hbWUAAAAAAAAAAIb///8AAAIACAAAAEItc2VyaWVzAAASABgAFAATABIADAAAAAgABAASAAAAFAAAAEQAAABMAAAAAAAKAUwAAAABAAAADAAAAAgADAAIAAQACAAAAAgAAAAQAAAABAAAAHRpbWUAAAAABAAAAG5hbWUAAAAAAAAAAAAABgAIAAYABgAAAAAAAwAEAAAAdGltZQAAAAAAAAAA/////7gAAAAUAAAAAAAAAAwAFgAUABMADAAEAAwAAABgAAAAAAAAABQAAAAAAAADAwAKABgADAAIAAQACgAAABQAAABYAAAABgAAAAAAAAAAAAAABAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAADAAAAAAAAAAMAAAAAAAAAAAAAAAAAAAADAAAAAAAAAAMAAAAAAAAAAAAAAAAgAAAAYAAAAAAAAAAAAAAAAAAAAGAAAAAAAAAAAAAAAAAAAAQMC/OcElXhZAOAEFxCVeFkCwQtDGJV4WQCiEm8klXhZAoMVmzCVeFkAYBzLPJV4WAAAAAAAA8D8AAAAAAAA0QAAAAAAAgFZAAAAAAAAAPkAAAAAAAAAUQAAAAAAAAAAAEAAAAAwAFAASAAwACAAEAAwAAAAQAAAALAAAADgAAAAAAAMAAQAAAIABAAAAAAAAwAAAAAAAAABgAAAAAAAAAAAAAAAAAAAAAAAKAAwAAAAIAAQACgAAAAgAAABQAAAAAgAAACgAAAAEAAAAIP///wgAAAAMAAAAAQAAAEIAAAAFAAAAcmVmSWQAAABA////CAAAAAwAAAAAAAAAAAAAAAQAAABuYW1lAAAAAAIAAAB8AAAABAAAAJ7///8UAAAAQAAAAEAAAAAAAAMBQAAAAAEAAAAEAAAAjP///wgAAAAUAAAACAAAAEItc2VyaWVzAAAAAAQAAABuYW1lAAAAAAAAAACG////AAACAAgAAABCLXNlcmllcwAAEgAYABQAEwASAAwAAAAIAAQAEgAAABQAAABEAAAATAAAAAAACgFMAAAAAQAAAAwAAAAIAAwACAAEAAgAAAAIAAAAEAAAAAQAAAB0aW1lAAAAAAQAAABuYW1lAAAAAAAAAAAAAAYACAAGAAYAAAAAAAMABAAAAHRpbWUAAAAAmAEAAEFSUk9XMQ==',
],
frames: null as any,
},
},
},
@@ -41,9 +50,9 @@ describe('Query Response parser', () => {
test('should parse output with dataframe', () => {
const res = toDataQueryResponse(resp);
const frames = res.data;
for (const frame of frames) {
expect(frame.refId).toEqual('GC');
}
expect(frames).toHaveLength(2);
expect(frames[0].refId).toEqual('A');
expect(frames[1].refId).toEqual('B');
const norm = frames.map((f) => toDataFrameDTO(f));
expect(norm).toMatchInlineSnapshot(`
@@ -53,49 +62,155 @@ describe('Query Response parser', () => {
Object {
"config": Object {},
"labels": undefined,
"name": "Time",
"name": "time",
"type": "time",
"values": Array [
1569334575000,
1569334580000,
1569334585000,
1569334590000,
1569334595000,
1569334600000,
1569334605000,
1569334610000,
1569334615000,
1569334620000,
1569334625000,
1569334630000,
1569334635000,
1611767228473,
1611767240473,
1611767252473,
1611767264473,
1611767276473,
1611767288473,
],
},
Object {
"config": Object {},
"labels": undefined,
"name": "",
"name": "A-series",
"type": "number",
"values": Array [
3,
3,
3,
1,
20,
90,
30,
5,
5,
5,
3,
3,
3,
5,
5,
5,
3,
0,
],
},
],
"meta": undefined,
"name": undefined,
"refId": "GC",
"refId": "A",
},
Object {
"fields": Array [
Object {
"config": Object {},
"labels": undefined,
"name": "time",
"type": "time",
"values": Array [
1611767228473,
1611767240473,
1611767252473,
1611767264473,
1611767276473,
1611767288473,
],
},
Object {
"config": Object {},
"labels": undefined,
"name": "B-series",
"type": "number",
"values": Array [
1,
20,
90,
30,
5,
0,
],
},
],
"meta": undefined,
"name": undefined,
"refId": "B",
},
]
`);
});
test('should parse output with dataframe in order of queries', () => {
const queries: DataQuery[] = [{ refId: 'B' }, { refId: 'A' }];
const res = toDataQueryResponse(resp, queries);
const frames = res.data;
expect(frames).toHaveLength(2);
expect(frames[0].refId).toEqual('B');
expect(frames[1].refId).toEqual('A');
const norm = frames.map((f) => toDataFrameDTO(f));
expect(norm).toMatchInlineSnapshot(`
Array [
Object {
"fields": Array [
Object {
"config": Object {},
"labels": undefined,
"name": "time",
"type": "time",
"values": Array [
1611767228473,
1611767240473,
1611767252473,
1611767264473,
1611767276473,
1611767288473,
],
},
Object {
"config": Object {},
"labels": undefined,
"name": "B-series",
"type": "number",
"values": Array [
1,
20,
90,
30,
5,
0,
],
},
],
"meta": undefined,
"name": undefined,
"refId": "B",
},
Object {
"fields": Array [
Object {
"config": Object {},
"labels": undefined,
"name": "time",
"type": "time",
"values": Array [
1611767228473,
1611767240473,
1611767252473,
1611767264473,
1611767276473,
1611767288473,
],
},
Object {
"config": Object {},
"labels": undefined,
"name": "A-series",
"type": "number",
"values": Array [
1,
20,
90,
30,
5,
0,
],
},
],
"meta": undefined,
"name": undefined,
"refId": "A",
},
]
`);
@@ -106,6 +221,35 @@ describe('Query Response parser', () => {
expect(frames.length).toEqual(0);
});
test('keeps query order', () => {
const resp = {
data: {
results: {
X: {
series: [
{ name: 'Requests/s', points: [[13.594958983547151, 1611839862951]], tables: null, dataframes: null },
],
},
B: {
series: [
{ name: 'Requests/s', points: [[13.594958983547151, 1611839862951]], tables: null, dataframes: null },
],
},
A: {
series: [
{ name: 'Requests/s', points: [[13.594958983547151, 1611839862951]], tables: null, dataframes: null },
],
},
},
},
};
const queries: DataQuery[] = [{ refId: 'A' }, { refId: 'B' }];
const ids = (toDataQueryResponse(resp, queries).data as DataFrame[]).map((f) => f.refId);
expect(ids).toEqual(['A', 'B', 'X']);
});
test('resultWithError', () => {
// Generated from:
// qdr.Responses[q.GetRefID()] = backend.DataResponse{

View File

@@ -11,6 +11,7 @@ import {
DataFrame,
MetricFindValue,
FieldType,
DataQuery,
} from '@grafana/data';
interface DataResponse {
@@ -24,56 +25,84 @@ interface DataResponse {
/**
* Parse the results from /api/ds/query into a DataQueryResponse
*
* @param res - the HTTP response data.
* @param queries - optional DataQuery array that will order the response based on the order of query refId's.
*
* @public
*/
export function toDataQueryResponse(res: any): DataQueryResponse {
export function toDataQueryResponse(res: any, queries?: DataQuery[]): DataQueryResponse {
const rsp: DataQueryResponse = { data: [], state: LoadingState.Done };
if (res.data?.results) {
const results: KeyValue = res.data.results;
for (const refId of Object.keys(results)) {
const resultIDs = Object.keys(results);
const refIDs = queries ? queries.map((q) => q.refId) : resultIDs;
const usedResultIDs = new Set<string>(resultIDs);
const data: DataResponse[] = [];
for (const refId of refIDs) {
const dr = results[refId] as DataResponse;
if (dr) {
if (dr.error) {
if (!rsp.error) {
rsp.error = {
refId,
message: dr.error,
};
if (!dr) {
continue;
}
dr.refId = refId;
usedResultIDs.delete(refId);
data.push(dr);
}
// Add any refIds that do not match the query targets
if (usedResultIDs.size) {
for (const refId of usedResultIDs) {
const dr = results[refId] as DataResponse;
if (!dr) {
continue;
}
dr.refId = refId;
usedResultIDs.delete(refId);
data.push(dr);
}
}
for (const dr of data) {
if (dr.error) {
if (!rsp.error) {
rsp.error = {
refId: dr.refId,
message: dr.error,
};
rsp.state = LoadingState.Error;
}
}
if (dr.series?.length) {
for (const s of dr.series) {
if (!s.refId) {
s.refId = dr.refId;
}
rsp.data.push(toDataFrame(s));
}
}
if (dr.tables?.length) {
for (const s of dr.tables) {
if (!s.refId) {
s.refId = dr.refId;
}
rsp.data.push(toDataFrame(s));
}
}
if (dr.dataframes) {
for (const b64 of dr.dataframes) {
try {
const t = base64StringToArrowTable(b64);
const f = arrowTableToDataFrame(t);
if (!f.refId) {
f.refId = dr.refId;
}
rsp.data.push(f);
} catch (err) {
rsp.state = LoadingState.Error;
}
}
if (dr.series && dr.series.length) {
for (const s of dr.series) {
if (!s.refId) {
s.refId = refId;
}
rsp.data.push(toDataFrame(s));
}
}
if (dr.tables && dr.tables.length) {
for (const s of dr.tables) {
if (!s.refId) {
s.refId = refId;
}
rsp.data.push(toDataFrame(s));
}
}
if (dr.dataframes) {
for (const b64 of dr.dataframes) {
try {
const t = base64StringToArrowTable(b64);
const f = arrowTableToDataFrame(t);
if (!f.refId) {
f.refId = refId;
}
rsp.data.push(f);
} catch (err) {
rsp.state = LoadingState.Error;
rsp.error = toDataQueryError(err);
}
rsp.error = toDataQueryError(err);
}
}
}

View File

@@ -350,7 +350,6 @@ func (hs *HTTPServer) registerRoutes() {
// metrics
apiRoute.Post("/tsdb/query", bind(dtos.MetricRequest{}), routing.Wrap(hs.QueryMetrics))
apiRoute.Get("/tsdb/testdata/scenarios", routing.Wrap(GetTestDataScenarios))
apiRoute.Get("/tsdb/testdata/gensql", reqGrafanaAdmin, routing.Wrap(GenerateSQLTestData))
apiRoute.Get("/tsdb/testdata/random-walk", routing.Wrap(GetTestDataRandomWalk))
@@ -395,9 +394,6 @@ func (hs *HTTPServer) registerRoutes() {
annotationsRoute.Post("/graphite", reqEditorRole, bind(dtos.PostGraphiteAnnotationsCmd{}), routing.Wrap(PostGraphiteAnnotation))
})
// error test
r.Get("/metrics/error", routing.Wrap(GenerateError))
// short urls
apiRoute.Post("/short-urls", bind(dtos.CreateShortURLCmd{}), routing.Wrap(hs.createShortURL))
}, reqSignedIn)

View File

@@ -3,7 +3,6 @@ package api
import (
"context"
"errors"
"sort"
"github.com/grafana/grafana/pkg/expr"
"github.com/grafana/grafana/pkg/models"
@@ -13,7 +12,6 @@ import (
"github.com/grafana/grafana/pkg/bus"
"github.com/grafana/grafana/pkg/components/simplejson"
"github.com/grafana/grafana/pkg/tsdb"
"github.com/grafana/grafana/pkg/tsdb/testdatasource"
"github.com/grafana/grafana/pkg/util"
)
@@ -202,36 +200,6 @@ func (hs *HTTPServer) QueryMetrics(c *models.ReqContext, reqDto dtos.MetricReque
return response.JSON(statusCode, &resp)
}
// GET /api/tsdb/testdata/scenarios
func GetTestDataScenarios(c *models.ReqContext) response.Response {
result := make([]interface{}, 0)
scenarioIds := make([]string, 0)
for id := range testdatasource.ScenarioRegistry {
scenarioIds = append(scenarioIds, id)
}
sort.Strings(scenarioIds)
for _, scenarioId := range scenarioIds {
scenario := testdatasource.ScenarioRegistry[scenarioId]
result = append(result, map[string]interface{}{
"id": scenario.Id,
"name": scenario.Name,
"description": scenario.Description,
"stringInput": scenario.StringInput,
})
}
return response.JSON(200, &result)
}
// GenerateError generates a index out of range error
func GenerateError(c *models.ReqContext) response.Response {
var array []string
// nolint: govet
return response.JSON(200, array[20])
}
// GET /api/tsdb/testdata/gensql
func GenerateSQLTestData(c *models.ReqContext) response.Response {
if err := bus.Dispatch(&models.InsertSQLTestDataCommand{}); err != nil {
@@ -250,7 +218,10 @@ func GetTestDataRandomWalk(c *models.ReqContext) response.Response {
timeRange := tsdb.NewTimeRange(from, to)
request := &tsdb.TsdbQuery{TimeRange: timeRange}
dsInfo := &models.DataSource{Type: "testdata"}
dsInfo := &models.DataSource{
Type: "testdata",
JsonData: simplejson.New(),
}
request.Queries = append(request.Queries, &tsdb.Query{
RefId: "A",
IntervalMs: intervalMs,

View File

@@ -11,7 +11,6 @@ import (
)
// corePlugin represents a plugin that's part of Grafana core.
// nolint:unused
type corePlugin struct {
pluginID string
logger log.Logger
@@ -55,7 +54,7 @@ func (cp *corePlugin) Stop(ctx context.Context) error {
}
func (cp *corePlugin) IsManaged() bool {
return false
return true
}
func (cp *corePlugin) Exited() bool {

View File

@@ -19,7 +19,7 @@ func TestCorePlugin(t *testing.T) {
require.NotNil(t, p)
require.NoError(t, p.Start(context.Background()))
require.NoError(t, p.Stop(context.Background()))
require.False(t, p.IsManaged())
require.True(t, p.IsManaged())
require.False(t, p.Exited())
_, err = p.CollectMetrics(context.Background())
@@ -50,7 +50,7 @@ func TestCorePlugin(t *testing.T) {
require.NotNil(t, p)
require.NoError(t, p.Start(context.Background()))
require.NoError(t, p.Stop(context.Background()))
require.False(t, p.IsManaged())
require.True(t, p.IsManaged())
require.False(t, p.Exited())
_, err = p.CollectMetrics(context.Background())

View File

@@ -0,0 +1,157 @@
package testdatasource
import (
"encoding/json"
"fmt"
"io"
"io/ioutil"
"net/http"
"sort"
"strconv"
"time"
"github.com/grafana/grafana/pkg/infra/log"
"github.com/grafana/grafana-plugin-sdk-go/backend/resource/httpadapter"
)
func (p *testDataPlugin) registerRoutes(mux *http.ServeMux) {
mux.HandleFunc("/", p.testGetHandler)
mux.HandleFunc("/scenarios", p.getScenariosHandler)
mux.HandleFunc("/stream", p.testStreamHandler)
mux.Handle("/test", createJSONHandler(p.logger))
mux.Handle("/test/json", createJSONHandler(p.logger))
mux.HandleFunc("/boom", p.testPanicHandler)
}
func (p *testDataPlugin) testGetHandler(rw http.ResponseWriter, req *http.Request) {
p.logger.Debug("Received resource call", "url", req.URL.String(), "method", req.Method)
if req.Method != http.MethodGet {
return
}
if _, err := rw.Write([]byte("Hello world from test datasource!")); err != nil {
p.logger.Error("Failed to write response", "error", err)
return
}
rw.WriteHeader(http.StatusOK)
}
func (p *testDataPlugin) getScenariosHandler(rw http.ResponseWriter, req *http.Request) {
result := make([]interface{}, 0)
scenarioIds := make([]string, 0)
for id := range p.scenarios {
scenarioIds = append(scenarioIds, id)
}
sort.Strings(scenarioIds)
for _, scenarioID := range scenarioIds {
scenario := p.scenarios[scenarioID]
result = append(result, map[string]interface{}{
"id": scenario.ID,
"name": scenario.Name,
"description": scenario.Description,
"stringInput": scenario.StringInput,
})
}
bytes, err := json.Marshal(&result)
if err != nil {
p.logger.Error("Failed to marshal response body to JSON", "error", err)
}
rw.Header().Set("Content-Type", "application/json")
rw.WriteHeader(http.StatusOK)
if _, err := rw.Write(bytes); err != nil {
p.logger.Error("Failed to write response", "error", err)
}
}
func (p *testDataPlugin) testStreamHandler(rw http.ResponseWriter, req *http.Request) {
p.logger.Debug("Received resource call", "url", req.URL.String(), "method", req.Method)
if req.Method != http.MethodGet {
return
}
count := 10
countstr := req.URL.Query().Get("count")
if countstr != "" {
if i, err := strconv.Atoi(countstr); err == nil {
count = i
}
}
sleep := req.URL.Query().Get("sleep")
sleepDuration, err := time.ParseDuration(sleep)
if err != nil {
sleepDuration = time.Millisecond
}
rw.Header().Set("Content-Type", "text/plain")
rw.WriteHeader(http.StatusOK)
for i := 1; i <= count; i++ {
if _, err := io.WriteString(rw, fmt.Sprintf("Message #%d", i)); err != nil {
p.logger.Error("Failed to write response", "error", err)
return
}
rw.(http.Flusher).Flush()
time.Sleep(sleepDuration)
}
}
func createJSONHandler(logger log.Logger) http.Handler {
return http.HandlerFunc(func(rw http.ResponseWriter, req *http.Request) {
logger.Debug("Received resource call", "url", req.URL.String(), "method", req.Method)
var reqData map[string]interface{}
if req.Body != nil {
defer func() {
if err := req.Body.Close(); err != nil {
logger.Warn("Failed to close response body", "err", err)
}
}()
b, err := ioutil.ReadAll(req.Body)
if err != nil {
logger.Error("Failed to read request body to bytes", "error", err)
} else {
err := json.Unmarshal(b, &reqData)
if err != nil {
logger.Error("Failed to unmarshal request body to JSON", "error", err)
}
logger.Debug("Received resource call body", "body", reqData)
}
}
config := httpadapter.PluginConfigFromContext(req.Context())
data := map[string]interface{}{
"message": "Hello world from test datasource!",
"request": map[string]interface{}{
"method": req.Method,
"url": req.URL,
"headers": req.Header,
"body": reqData,
"config": config,
},
}
bytes, err := json.Marshal(&data)
if err != nil {
logger.Error("Failed to marshal response body to JSON", "error", err)
}
rw.Header().Set("Content-Type", "application/json")
rw.WriteHeader(http.StatusOK)
if _, err := rw.Write(bytes); err != nil {
logger.Error("Failed to write response", "error", err)
}
})
}
func (p *testDataPlugin) testPanicHandler(rw http.ResponseWriter, req *http.Request) {
panic("BOOM")
}

File diff suppressed because it is too large Load Diff

View File

@@ -1,55 +1,115 @@
package testdatasource
import (
"context"
"fmt"
"testing"
"time"
"github.com/grafana/grafana-plugin-sdk-go/backend"
"github.com/grafana/grafana-plugin-sdk-go/data"
"github.com/grafana/grafana/pkg/components/simplejson"
"github.com/grafana/grafana/pkg/tsdb"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
)
func TestTestdataScenarios(t *testing.T) {
p := &testDataPlugin{}
t.Run("random walk ", func(t *testing.T) {
scenario := ScenarioRegistry["random_walk"]
t.Run("Should start at the requested value", func(t *testing.T) {
req := &tsdb.TsdbQuery{
TimeRange: tsdb.NewFakeTimeRange("5m", "now", time.Now()),
Queries: []*tsdb.Query{
{RefId: "A", IntervalMs: 100, MaxDataPoints: 100, Model: simplejson.New()},
timeRange := tsdb.NewFakeTimeRange("5m", "now", time.Now())
model := simplejson.New()
model.Set("startValue", 1.234)
modelBytes, err := model.MarshalJSON()
require.NoError(t, err)
query := backend.DataQuery{
RefID: "A",
TimeRange: backend.TimeRange{
From: timeRange.MustGetFrom(),
To: timeRange.MustGetTo(),
},
Interval: 100 * time.Millisecond,
MaxDataPoints: 100,
JSON: modelBytes,
}
query := req.Queries[0]
query.Model.Set("startValue", 1.234)
result := scenario.Handler(req.Queries[0], req)
require.NotNil(t, result.Series)
req := &backend.QueryDataRequest{
PluginContext: backend.PluginContext{},
Queries: []backend.DataQuery{query},
}
points := result.Series[0].Points
require.Equal(t, 1.234, points[0][0].Float64)
resp, err := p.handleRandomWalkScenario(context.Background(), req)
require.NoError(t, err)
require.NotNil(t, resp)
dResp, exists := resp.Responses[query.RefID]
require.True(t, exists)
require.NoError(t, dResp.Error)
require.Len(t, dResp.Frames, 1)
frame := dResp.Frames[0]
require.Len(t, frame.Fields, 2)
require.Equal(t, "time", frame.Fields[0].Name)
require.Equal(t, "A-series", frame.Fields[1].Name)
val, ok := frame.Fields[1].ConcreteAt(0)
require.True(t, ok)
require.Equal(t, 1.234, val)
})
})
t.Run("random walk table", func(t *testing.T) {
scenario := ScenarioRegistry["random_walk_table"]
t.Run("Should return a table that looks like value/min/max", func(t *testing.T) {
req := &tsdb.TsdbQuery{
TimeRange: tsdb.NewFakeTimeRange("5m", "now", time.Now()),
Queries: []*tsdb.Query{
{RefId: "A", IntervalMs: 100, MaxDataPoints: 100, Model: simplejson.New()},
timeRange := tsdb.NewFakeTimeRange("5m", "now", time.Now())
model := simplejson.New()
modelBytes, err := model.MarshalJSON()
require.NoError(t, err)
query := backend.DataQuery{
RefID: "A",
TimeRange: backend.TimeRange{
From: timeRange.MustGetFrom(),
To: timeRange.MustGetTo(),
},
Interval: 100 * time.Millisecond,
MaxDataPoints: 100,
JSON: modelBytes,
}
result := scenario.Handler(req.Queries[0], req)
table := result.Tables[0]
req := &backend.QueryDataRequest{
PluginContext: backend.PluginContext{},
Queries: []backend.DataQuery{query},
}
require.Greater(t, len(table.Rows), 50)
for _, row := range table.Rows {
value := row[1]
min := row[2]
max := row[3]
resp, err := p.handleRandomWalkTableScenario(context.Background(), req)
require.NoError(t, err)
require.NotNil(t, resp)
dResp, exists := resp.Responses[query.RefID]
require.True(t, exists)
require.NoError(t, dResp.Error)
require.Len(t, dResp.Frames, 1)
frame := dResp.Frames[0]
require.Greater(t, frame.Rows(), 50)
require.Len(t, frame.Fields, 5)
require.Equal(t, "Time", frame.Fields[0].Name)
require.Equal(t, "Value", frame.Fields[1].Name)
require.Equal(t, "Min", frame.Fields[2].Name)
require.Equal(t, "Max", frame.Fields[3].Name)
require.Equal(t, "Info", frame.Fields[4].Name)
for i := 0; i < frame.Rows(); i++ {
value, ok := frame.ConcreteAt(1, i)
require.True(t, ok)
min, ok := frame.ConcreteAt(2, i)
require.True(t, ok)
max, ok := frame.ConcreteAt(3, i)
require.True(t, ok)
require.Less(t, min, value)
require.Greater(t, max, value)
@@ -57,66 +117,98 @@ func TestTestdataScenarios(t *testing.T) {
})
t.Run("Should return a table with some nil values", func(t *testing.T) {
req := &tsdb.TsdbQuery{
TimeRange: tsdb.NewFakeTimeRange("5m", "now", time.Now()),
Queries: []*tsdb.Query{
{RefId: "A", IntervalMs: 100, MaxDataPoints: 100, Model: simplejson.New()},
timeRange := tsdb.NewFakeTimeRange("5m", "now", time.Now())
model := simplejson.New()
model.Set("withNil", true)
modelBytes, err := model.MarshalJSON()
require.NoError(t, err)
query := backend.DataQuery{
RefID: "A",
TimeRange: backend.TimeRange{
From: timeRange.MustGetFrom(),
To: timeRange.MustGetTo(),
},
Interval: 100 * time.Millisecond,
MaxDataPoints: 100,
JSON: modelBytes,
}
query := req.Queries[0]
query.Model.Set("withNil", true)
result := scenario.Handler(req.Queries[0], req)
table := result.Tables[0]
req := &backend.QueryDataRequest{
PluginContext: backend.PluginContext{},
Queries: []backend.DataQuery{query},
}
nil1 := false
nil2 := false
nil3 := false
resp, err := p.handleRandomWalkTableScenario(context.Background(), req)
require.NoError(t, err)
require.NotNil(t, resp)
require.Greater(t, len(table.Rows), 50)
for _, row := range table.Rows {
if row[1] == nil {
nil1 = true
dResp, exists := resp.Responses[query.RefID]
require.True(t, exists)
require.NoError(t, dResp.Error)
require.Len(t, dResp.Frames, 1)
frame := dResp.Frames[0]
require.Greater(t, frame.Rows(), 50)
require.Len(t, frame.Fields, 5)
require.Equal(t, "Time", frame.Fields[0].Name)
require.Equal(t, "Value", frame.Fields[1].Name)
require.Equal(t, "Min", frame.Fields[2].Name)
require.Equal(t, "Max", frame.Fields[3].Name)
require.Equal(t, "Info", frame.Fields[4].Name)
valNil := false
minNil := false
maxNil := false
for i := 0; i < frame.Rows(); i++ {
_, ok := frame.ConcreteAt(1, i)
if !ok {
valNil = true
}
if row[2] == nil {
nil2 = true
_, ok = frame.ConcreteAt(2, i)
if !ok {
minNil = true
}
if row[3] == nil {
nil3 = true
_, ok = frame.ConcreteAt(3, i)
if !ok {
maxNil = true
}
}
require.True(t, nil1)
require.True(t, nil2)
require.True(t, nil3)
require.True(t, valNil)
require.True(t, minNil)
require.True(t, maxNil)
})
})
}
func TestParseLabels(t *testing.T) {
expectedTags := map[string]string{
expectedTags := data.Labels{
"job": "foo",
"instance": "bar",
}
query1 := tsdb.Query{
Model: simplejson.NewFromAny(map[string]interface{}{
tcs := []struct {
model map[string]interface{}
}{
{model: map[string]interface{}{
"labels": `{job="foo", instance="bar"}`,
}),
}
require.Equal(t, expectedTags, parseLabels(&query1))
query2 := tsdb.Query{
Model: simplejson.NewFromAny(map[string]interface{}{
}},
{model: map[string]interface{}{
"labels": `job=foo, instance=bar`,
}),
}
require.Equal(t, expectedTags, parseLabels(&query2))
query3 := tsdb.Query{
Model: simplejson.NewFromAny(map[string]interface{}{
}},
{model: map[string]interface{}{
"labels": `job = foo,instance = bar`,
}),
}},
}
for i, tc := range tcs {
model := simplejson.NewFromAny(tc.model)
assert.Equal(t, expectedTags, parseLabels(model), fmt.Sprintf("Actual tags in test case %d doesn't match expected tags", i+1))
}
require.Equal(t, expectedTags, parseLabels(&query3))
}

View File

@@ -1,42 +1,42 @@
package testdatasource
import (
"context"
"net/http"
"github.com/grafana/grafana-plugin-sdk-go/backend"
"github.com/grafana/grafana-plugin-sdk-go/backend/datasource"
"github.com/grafana/grafana-plugin-sdk-go/backend/resource/httpadapter"
"github.com/grafana/grafana/pkg/infra/log"
"github.com/grafana/grafana/pkg/models"
"github.com/grafana/grafana/pkg/tsdb"
"github.com/grafana/grafana/pkg/plugins/backendplugin"
"github.com/grafana/grafana/pkg/plugins/backendplugin/coreplugin"
"github.com/grafana/grafana/pkg/registry"
)
type TestDataExecutor struct {
*models.DataSource
log log.Logger
}
func NewTestDataExecutor(dsInfo *models.DataSource) (tsdb.TsdbQueryEndpoint, error) {
return &TestDataExecutor{
DataSource: dsInfo,
log: log.New("tsdb.testdata"),
}, nil
}
func init() {
tsdb.RegisterTsdbQueryEndpoint("testdata", NewTestDataExecutor)
registry.RegisterService(&testDataPlugin{})
}
func (e *TestDataExecutor) Query(ctx context.Context, dsInfo *models.DataSource, tsdbQuery *tsdb.TsdbQuery) (*tsdb.Response, error) {
result := &tsdb.Response{}
result.Results = make(map[string]*tsdb.QueryResult)
type testDataPlugin struct {
BackendPluginManager backendplugin.Manager `inject:""`
logger log.Logger
scenarios map[string]*Scenario
queryMux *datasource.QueryTypeMux
}
for _, query := range tsdbQuery.Queries {
scenarioId := query.Model.Get("scenarioId").MustString("random_walk")
if scenario, exist := ScenarioRegistry[scenarioId]; exist {
result.Results[query.RefId] = scenario.Handler(query, tsdbQuery)
result.Results[query.RefId].RefId = query.RefId
} else {
e.log.Error("Scenario not found", "scenarioId", scenarioId)
}
func (p *testDataPlugin) Init() error {
p.logger = log.New("tsdb.testdata")
p.scenarios = map[string]*Scenario{}
p.queryMux = datasource.NewQueryTypeMux()
p.registerScenarios()
resourceMux := http.NewServeMux()
p.registerRoutes(resourceMux)
factory := coreplugin.New(backend.ServeOpts{
QueryDataHandler: p.queryMux,
CallResourceHandler: httpadapter.New(resourceMux),
})
err := p.BackendPluginManager.Register("testdata", factory)
if err != nil {
p.logger.Error("Failed to register plugin", "error", err)
}
return result, nil
return nil
}

View File

@@ -1,6 +1,5 @@
import set from 'lodash/set';
import { from, merge, Observable, of } from 'rxjs';
import { delay, map } from 'rxjs/operators';
import { delay } from 'rxjs/operators';
import {
AnnotationEvent,
@@ -8,20 +7,17 @@ import {
arrowTableToDataFrame,
base64StringToArrowTable,
DataFrame,
DataQueryError,
DataQueryRequest,
DataQueryResponse,
DataSourceApi,
DataSourceInstanceSettings,
DataTopic,
LiveChannelScope,
LoadingState,
TableData,
TimeRange,
TimeSeries,
} from '@grafana/data';
import { Scenario, TestDataQuery } from './types';
import {
DataSourceWithBackend,
getBackendSrv,
getLiveMeasurementsObserver,
getTemplateSrv,
@@ -34,9 +30,7 @@ import { getSearchFilterScopedVar } from 'app/features/variables/utils';
import { TestDataVariableSupport } from './variables';
import { generateRandomNodes, savedNodesResponse } from './nodeGraphUtils';
type TestData = TimeSeries | TableData;
export class TestDataDataSource extends DataSourceApi<TestDataQuery> {
export class TestDataDataSource extends DataSourceWithBackend<TestDataQuery> {
scenariosCache?: Promise<Scenario[]>;
constructor(
@@ -48,7 +42,7 @@ export class TestDataDataSource extends DataSourceApi<TestDataQuery> {
}
query(options: DataQueryRequest<TestDataQuery>): Observable<DataQueryResponse> {
const queries: any[] = [];
const backendQueries: TestDataQuery[] = [];
const streams: Array<Observable<DataQueryResponse>> = [];
// Start streams and prepare queries
@@ -80,68 +74,21 @@ export class TestDataDataSource extends DataSourceApi<TestDataQuery> {
streams.push(this.nodesQuery(target, options));
break;
default:
queries.push({
...target,
intervalMs: options.intervalMs,
maxDataPoints: options.maxDataPoints,
datasourceId: this.id,
alias: this.templateSrv.replace(target.alias || '', options.scopedVars),
});
backendQueries.push(target);
}
}
if (queries.length) {
const stream = getBackendSrv()
.fetch({
method: 'POST',
url: '/api/tsdb/query',
data: {
from: options.range.from.valueOf().toString(),
to: options.range.to.valueOf().toString(),
queries: queries,
},
})
.pipe(map((res) => this.processQueryResult(queries, res)));
streams.push(stream);
if (backendQueries.length) {
const backendOpts = {
...options,
targets: backendQueries,
};
streams.push(super.query(backendOpts));
}
return merge(...streams);
}
processQueryResult(queries: any, res: any): DataQueryResponse {
const data: TestData[] = [];
let error: DataQueryError | undefined = undefined;
for (const query of queries) {
const results = res.data.results[query.refId];
for (const t of results.tables || []) {
const table = t as TableData;
table.refId = query.refId;
table.name = query.alias;
if (query.scenarioId === 'logs') {
set(table, 'meta.preferredVisualisationType', 'logs');
}
data.push(table);
}
for (const series of results.series || []) {
data.push({ target: series.name, datapoints: series.points, refId: query.refId, tags: series.tags });
}
if (results.error) {
error = {
message: results.error,
};
}
}
return { data, error };
}
annotationDataTopicTest(target: TestDataQuery, req: DataQueryRequest<TestDataQuery>): Observable<DataQueryResponse> {
return new Observable<DataQueryResponse>((observer) => {
const events = this.buildFakeAnnotationEvents(req.range, 10);
@@ -190,7 +137,7 @@ export class TestDataDataSource extends DataSourceApi<TestDataQuery> {
getScenarios(): Promise<Scenario[]> {
if (!this.scenariosCache) {
this.scenariosCache = getBackendSrv().get('/api/tsdb/testdata/scenarios');
this.scenariosCache = this.getResource('scenarios');
}
return this.scenariosCache;