mirror of
https://github.com/grafana/grafana.git
synced 2025-02-25 18:55:37 -06:00
Tempo: Show graph view of the trace (#33635)
* Add transform * Add test for transform * Add test * Update test
This commit is contained in:
parent
1e3d19e483
commit
24e52004a1
@ -9,6 +9,12 @@ describe('Tempo data source', () => {
|
||||
setupBackendSrv(
|
||||
new MutableDataFrame({
|
||||
fields: [
|
||||
{ name: 'traceID', values: ['04450900759028499335'] },
|
||||
{ name: 'spanID', values: ['4322526419282105830'] },
|
||||
{ name: 'parentSpanID', values: [''] },
|
||||
{ name: 'operationName', values: ['store.validateQueryTimeRange'] },
|
||||
{ name: 'startTime', values: [1619712655875.4539] },
|
||||
{ name: 'duration', values: [14.984] },
|
||||
{ name: 'serviceTags', values: ['{"key":"servicetag1","value":"service"}'] },
|
||||
{ name: 'logs', values: ['{"timestamp":12345,"fields":[{"key":"count","value":1}]}'] },
|
||||
{ name: 'tags', values: ['{"key":"tag1","value":"val1"}'] },
|
||||
@ -17,20 +23,50 @@ describe('Tempo data source', () => {
|
||||
})
|
||||
);
|
||||
const ds = new TempoDatasource(defaultSettings);
|
||||
await expect(ds.query({ targets: [{ refId: 'refid1' }] } as any)).toEmitValuesWith((response) => {
|
||||
const fields = (response[0].data[0] as DataFrame).fields;
|
||||
expect(
|
||||
fields.map((f) => ({
|
||||
name: f.name,
|
||||
values: f.values.toArray(),
|
||||
}))
|
||||
).toMatchObject([
|
||||
{ name: 'serviceTags', values: [{ key: 'servicetag1', value: 'service' }] },
|
||||
{ name: 'logs', values: [{ timestamp: 12345, fields: [{ key: 'count', value: 1 }] }] },
|
||||
{ name: 'tags', values: [{ key: 'tag1', value: 'val1' }] },
|
||||
{ name: 'serviceName', values: ['service'] },
|
||||
]);
|
||||
});
|
||||
const response = await ds.query({ targets: [{ refId: 'refid1' }] } as any).toPromise();
|
||||
|
||||
expect(
|
||||
(response.data[0] as DataFrame).fields.map((f) => ({
|
||||
name: f.name,
|
||||
values: f.values.toArray(),
|
||||
}))
|
||||
).toMatchObject([
|
||||
{ name: 'traceID', values: ['04450900759028499335'] },
|
||||
{ name: 'spanID', values: ['4322526419282105830'] },
|
||||
{ name: 'parentSpanID', values: [''] },
|
||||
{ name: 'operationName', values: ['store.validateQueryTimeRange'] },
|
||||
{ name: 'startTime', values: [1619712655875.4539] },
|
||||
{ name: 'duration', values: [14.984] },
|
||||
{ name: 'serviceTags', values: [{ key: 'servicetag1', value: 'service' }] },
|
||||
{ name: 'logs', values: [{ timestamp: 12345, fields: [{ key: 'count', value: 1 }] }] },
|
||||
{ name: 'tags', values: [{ key: 'tag1', value: 'val1' }] },
|
||||
{ name: 'serviceName', values: ['service'] },
|
||||
]);
|
||||
|
||||
expect(
|
||||
(response.data[1] as DataFrame).fields.map((f) => ({
|
||||
name: f.name,
|
||||
values: f.values.toArray(),
|
||||
}))
|
||||
).toMatchObject([
|
||||
{ name: 'id', values: ['4322526419282105830'] },
|
||||
{ name: 'title', values: ['service'] },
|
||||
{ name: 'subTitle', values: ['store.validateQueryTimeRange'] },
|
||||
{ name: 'mainStat', values: ['total: 14.98ms (100%)'] },
|
||||
{ name: 'secondaryStat', values: ['self: 14.98ms (100%)'] },
|
||||
{ name: 'color', values: [1.000007560204647] },
|
||||
]);
|
||||
|
||||
expect(
|
||||
(response.data[2] as DataFrame).fields.map((f) => ({
|
||||
name: f.name,
|
||||
values: f.values.toArray(),
|
||||
}))
|
||||
).toMatchObject([
|
||||
{ name: 'id', values: [] },
|
||||
{ name: 'target', values: [] },
|
||||
{ name: 'source', values: [] },
|
||||
]);
|
||||
});
|
||||
});
|
||||
|
||||
|
@ -12,6 +12,7 @@ import {
|
||||
import { DataSourceWithBackend } from '@grafana/runtime';
|
||||
import { Observable } from 'rxjs';
|
||||
import { map } from 'rxjs/operators';
|
||||
import { createGraphFrames } from './graphTransform';
|
||||
|
||||
export type TempoQuery = {
|
||||
query: string;
|
||||
@ -28,7 +29,6 @@ export class TempoDatasource extends DataSourceWithBackend<TempoQuery> {
|
||||
if (response.error) {
|
||||
return response;
|
||||
}
|
||||
|
||||
// We need to parse some of the fields which contain stringified json.
|
||||
// Seems like we can't just map the values as the frame we got from backend has some default processing
|
||||
// and will stringify the json back when we try to set it. So we create a new field and swap it instead.
|
||||
@ -38,26 +38,12 @@ export class TempoDatasource extends DataSourceWithBackend<TempoQuery> {
|
||||
return emptyDataQueryResponse;
|
||||
}
|
||||
|
||||
for (const fieldName of ['serviceTags', 'logs', 'tags']) {
|
||||
const field = frame.fields.find((f) => f.name === fieldName);
|
||||
if (field) {
|
||||
const fieldIndex = frame.fields.indexOf(field);
|
||||
const values = new ArrayVector();
|
||||
const newField: Field = {
|
||||
...field,
|
||||
values,
|
||||
type: FieldType.other,
|
||||
};
|
||||
parseJsonFields(frame);
|
||||
|
||||
for (let i = 0; i < field.values.length; i++) {
|
||||
const value = field.values.get(i);
|
||||
values.set(i, value === '' ? undefined : JSON.parse(value));
|
||||
}
|
||||
frame.fields[fieldIndex] = newField;
|
||||
}
|
||||
}
|
||||
|
||||
return response;
|
||||
return {
|
||||
...response,
|
||||
data: [...response.data, ...createGraphFrames(frame)],
|
||||
};
|
||||
})
|
||||
);
|
||||
}
|
||||
@ -77,6 +63,30 @@ export class TempoDatasource extends DataSourceWithBackend<TempoQuery> {
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Change fields which are json string into JS objects. Modifies the frame in place.
|
||||
*/
|
||||
function parseJsonFields(frame: DataFrame) {
|
||||
for (const fieldName of ['serviceTags', 'logs', 'tags']) {
|
||||
const field = frame.fields.find((f) => f.name === fieldName);
|
||||
if (field) {
|
||||
const fieldIndex = frame.fields.indexOf(field);
|
||||
const values = new ArrayVector();
|
||||
const newField: Field = {
|
||||
...field,
|
||||
values,
|
||||
type: FieldType.other,
|
||||
};
|
||||
|
||||
for (let i = 0; i < field.values.length; i++) {
|
||||
const value = field.values.get(i);
|
||||
values.set(i, value === '' ? undefined : JSON.parse(value));
|
||||
}
|
||||
frame.fields[fieldIndex] = newField;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const emptyDataQueryResponse = {
|
||||
data: [
|
||||
new MutableDataFrame({
|
||||
|
64
public/app/plugins/datasource/tempo/graphTransform.test.ts
Normal file
64
public/app/plugins/datasource/tempo/graphTransform.test.ts
Normal file
@ -0,0 +1,64 @@
|
||||
import { createGraphFrames } from './graphTransform';
|
||||
import { bigResponse } from './testResponse';
|
||||
import { DataFrameView, MutableDataFrame } from '@grafana/data';
|
||||
|
||||
describe('createGraphFrames', () => {
|
||||
it('transforms basic response into nodes and edges frame', async () => {
|
||||
const frames = createGraphFrames(bigResponse);
|
||||
expect(frames.length).toBe(2);
|
||||
expect(frames[0].length).toBe(30);
|
||||
expect(frames[1].length).toBe(29);
|
||||
|
||||
let view = new DataFrameView(frames[0]);
|
||||
expect(view.get(0)).toMatchObject({
|
||||
id: '4322526419282105830',
|
||||
title: 'loki-all',
|
||||
subTitle: 'store.validateQueryTimeRange',
|
||||
mainStat: 'total: 0ms (0.02%)',
|
||||
secondaryStat: 'self: 0ms (100%)',
|
||||
color: 0.00021968356127648162,
|
||||
});
|
||||
|
||||
expect(view.get(29)).toMatchObject({
|
||||
id: '4450900759028499335',
|
||||
title: 'loki-all',
|
||||
subTitle: 'HTTP GET - loki_api_v1_query_range',
|
||||
mainStat: 'total: 18.21ms (100%)',
|
||||
secondaryStat: 'self: 3.22ms (17.71%)',
|
||||
color: 0.17707117189595056,
|
||||
});
|
||||
|
||||
view = new DataFrameView(frames[1]);
|
||||
expect(view.get(28)).toMatchObject({
|
||||
id: '4450900759028499335--4790760741274015949',
|
||||
});
|
||||
});
|
||||
|
||||
it('handles single span response', async () => {
|
||||
const frames = createGraphFrames(singleSpanResponse);
|
||||
expect(frames.length).toBe(2);
|
||||
expect(frames[0].length).toBe(1);
|
||||
|
||||
const view = new DataFrameView(frames[0]);
|
||||
expect(view.get(0)).toMatchObject({
|
||||
id: '4322526419282105830',
|
||||
title: 'loki-all',
|
||||
subTitle: 'store.validateQueryTimeRange',
|
||||
mainStat: 'total: 14.98ms (100%)',
|
||||
secondaryStat: 'self: 14.98ms (100%)',
|
||||
color: 1.000007560204647,
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
const singleSpanResponse = new MutableDataFrame({
|
||||
fields: [
|
||||
{ name: 'traceID', values: ['04450900759028499335'] },
|
||||
{ name: 'spanID', values: ['4322526419282105830'] },
|
||||
{ name: 'parentSpanID', values: [''] },
|
||||
{ name: 'operationName', values: ['store.validateQueryTimeRange'] },
|
||||
{ name: 'serviceName', values: ['loki-all'] },
|
||||
{ name: 'startTime', values: [1619712655875.4539] },
|
||||
{ name: 'duration', values: [14.984] },
|
||||
],
|
||||
});
|
198
public/app/plugins/datasource/tempo/graphTransform.ts
Normal file
198
public/app/plugins/datasource/tempo/graphTransform.ts
Normal file
@ -0,0 +1,198 @@
|
||||
import { DataFrame, DataFrameView, FieldType, MutableDataFrame } from '@grafana/data';
|
||||
import { NodeGraphDataFrameFieldNames as Fields } from '@grafana/ui';
|
||||
|
||||
interface Row {
|
||||
traceID: string;
|
||||
spanID: string;
|
||||
parentSpanID: string;
|
||||
operationName: string;
|
||||
serviceName: string;
|
||||
serviceTags: string;
|
||||
startTime: number;
|
||||
duration: number;
|
||||
logs: string;
|
||||
tags: string;
|
||||
}
|
||||
|
||||
interface Node {
|
||||
[Fields.id]: string;
|
||||
[Fields.title]: string;
|
||||
[Fields.subTitle]: string;
|
||||
[Fields.mainStat]: string;
|
||||
[Fields.secondaryStat]: string;
|
||||
[Fields.color]: number;
|
||||
}
|
||||
|
||||
interface Edge {
|
||||
[Fields.id]: string;
|
||||
[Fields.target]: string;
|
||||
[Fields.source]: string;
|
||||
}
|
||||
|
||||
export function createGraphFrames(data: DataFrame): DataFrame[] {
|
||||
const { nodes, edges } = convertTraceToGraph(data);
|
||||
|
||||
const nodesFrame = new MutableDataFrame({
|
||||
fields: [
|
||||
{ name: Fields.id, type: FieldType.string },
|
||||
{ name: Fields.title, type: FieldType.string },
|
||||
{ name: Fields.subTitle, type: FieldType.string },
|
||||
{ name: Fields.mainStat, type: FieldType.string },
|
||||
{ name: Fields.secondaryStat, type: FieldType.string },
|
||||
{ name: Fields.color, type: FieldType.number, config: { color: { mode: 'continuous-GrYlRd' } } },
|
||||
],
|
||||
meta: {
|
||||
preferredVisualisationType: 'nodeGraph',
|
||||
},
|
||||
});
|
||||
|
||||
for (const node of nodes) {
|
||||
nodesFrame.add(node);
|
||||
}
|
||||
|
||||
const edgesFrame = new MutableDataFrame({
|
||||
fields: [
|
||||
{ name: Fields.id, type: FieldType.string },
|
||||
{ name: Fields.target, type: FieldType.string },
|
||||
{ name: Fields.source, type: FieldType.string },
|
||||
],
|
||||
meta: {
|
||||
preferredVisualisationType: 'nodeGraph',
|
||||
},
|
||||
});
|
||||
|
||||
for (const edge of edges) {
|
||||
edgesFrame.add(edge);
|
||||
}
|
||||
|
||||
return [nodesFrame, edgesFrame];
|
||||
}
|
||||
|
||||
function convertTraceToGraph(data: DataFrame): { nodes: Node[]; edges: Edge[] } {
|
||||
const nodes: Node[] = [];
|
||||
const edges: Edge[] = [];
|
||||
|
||||
const view = new DataFrameView<Row>(data);
|
||||
|
||||
const traceDuration = findTraceDuration(view);
|
||||
const spanMap = makeSpanMap(view);
|
||||
|
||||
for (let i = 0; i < view.length; i++) {
|
||||
const row = view.get(i);
|
||||
|
||||
const childrenDuration = getDuration(spanMap[row.spanID].children.map((c) => spanMap[c].span));
|
||||
const selfDuration = row.duration - childrenDuration;
|
||||
|
||||
nodes.push({
|
||||
[Fields.id]: row.spanID,
|
||||
[Fields.title]: row.serviceName ?? '',
|
||||
[Fields.subTitle]: row.operationName,
|
||||
[Fields.mainStat]: `total: ${toFixedNoTrailingZeros(row.duration)}ms (${toFixedNoTrailingZeros(
|
||||
(row.duration / traceDuration) * 100
|
||||
)}%)`,
|
||||
[Fields.secondaryStat]: `self: ${toFixedNoTrailingZeros(selfDuration)}ms (${toFixedNoTrailingZeros(
|
||||
(selfDuration / row.duration) * 100
|
||||
)}%)`,
|
||||
[Fields.color]: selfDuration / traceDuration,
|
||||
});
|
||||
|
||||
if (row.parentSpanID) {
|
||||
edges.push({
|
||||
[Fields.id]: row.parentSpanID + '--' + row.spanID,
|
||||
[Fields.target]: row.spanID,
|
||||
[Fields.source]: row.parentSpanID,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
return { nodes, edges };
|
||||
}
|
||||
|
||||
function toFixedNoTrailingZeros(n: number) {
|
||||
return parseFloat(n.toFixed(2));
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the duration of the whole trace as it isn't a part of the response data.
|
||||
* Note: Seems like this should be the same as just longest span, but this is probably safer.
|
||||
*/
|
||||
function findTraceDuration(view: DataFrameView<Row>): number {
|
||||
let traceEndTime = 0;
|
||||
let traceStartTime = Infinity;
|
||||
|
||||
for (let i = 0; i < view.length; i++) {
|
||||
const row = view.get(i);
|
||||
|
||||
if (row.startTime < traceStartTime) {
|
||||
traceStartTime = row.startTime;
|
||||
}
|
||||
|
||||
if (row.startTime + row.duration > traceEndTime) {
|
||||
traceEndTime = row.startTime + row.duration;
|
||||
}
|
||||
}
|
||||
|
||||
return traceEndTime - traceStartTime;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns a map of the spans with children array for easier processing.
|
||||
*/
|
||||
function makeSpanMap(view: DataFrameView<Row>): { [id: string]: { span: Row; children: string[] } } {
|
||||
const spanMap: { [id: string]: { span?: Row; children: string[] } } = {};
|
||||
|
||||
for (let i = 0; i < view.length; i++) {
|
||||
const row = view.get(i);
|
||||
|
||||
if (!spanMap[row.spanID]) {
|
||||
spanMap[row.spanID] = {
|
||||
// Need copy because of how the view works
|
||||
span: { ...row },
|
||||
children: [],
|
||||
};
|
||||
} else {
|
||||
spanMap[row.spanID].span = { ...row };
|
||||
}
|
||||
if (!spanMap[row.parentSpanID]) {
|
||||
spanMap[row.parentSpanID] = {
|
||||
span: undefined,
|
||||
children: [row.spanID],
|
||||
};
|
||||
} else {
|
||||
spanMap[row.parentSpanID].children.push(row.spanID);
|
||||
}
|
||||
}
|
||||
return spanMap as { [id: string]: { span: Row; children: string[] } };
|
||||
}
|
||||
|
||||
/**
|
||||
* Get non overlapping duration of the spans.
|
||||
*/
|
||||
function getDuration(rows: Row[]): number {
|
||||
const ranges = rows.map<[number, number]>((r) => [r.startTime, r.startTime + r.duration]);
|
||||
ranges.sort((a, b) => a[0] - b[0]);
|
||||
const mergedRanges = ranges.reduce((acc, range) => {
|
||||
if (!acc.length) {
|
||||
return [range];
|
||||
}
|
||||
const tail = acc.slice(-1)[0];
|
||||
const [prevStart, prevEnd] = tail;
|
||||
const [start, end] = range;
|
||||
if (end < prevEnd) {
|
||||
// In this case the range is completely inside the prev range so we can just ignore it.
|
||||
return acc;
|
||||
}
|
||||
|
||||
if (start > prevEnd) {
|
||||
// There is no overlap so we can just add it to stack
|
||||
return [...acc, range];
|
||||
}
|
||||
|
||||
// We know there is overlap and current range ends later than previous so we can just extend the range
|
||||
return [...acc.slice(0, -1), [prevStart, end]] as Array<[number, number]>;
|
||||
}, [] as Array<[number, number]>);
|
||||
|
||||
return mergedRanges.reduce((acc, range) => {
|
||||
return acc + (range[1] - range[0]);
|
||||
}, 0);
|
||||
}
|
1850
public/app/plugins/datasource/tempo/testResponse.ts
Normal file
1850
public/app/plugins/datasource/tempo/testResponse.ts
Normal file
File diff suppressed because it is too large
Load Diff
Loading…
Reference in New Issue
Block a user