mirror of
https://github.com/grafana/grafana.git
synced 2025-02-09 23:16:16 -06:00
Cloud Monitoring: Reduce request size when listing labels (#44365)
This commit is contained in:
parent
7593fc0a20
commit
d1083b9597
@ -85,16 +85,13 @@ func (timeSeriesFilter *cloudMonitoringTimeSeriesFilter) run(ctx context.Context
|
||||
//nolint: gocyclo
|
||||
func (timeSeriesFilter *cloudMonitoringTimeSeriesFilter) parseResponse(queryRes *backend.DataResponse,
|
||||
response cloudMonitoringResponse, executedQueryString string) error {
|
||||
labels := make(map[string]map[string]bool)
|
||||
frames := data.Frames{}
|
||||
|
||||
customFrameMeta := map[string]interface{}{}
|
||||
customFrameMeta["alignmentPeriod"] = timeSeriesFilter.Params.Get("aggregation.alignmentPeriod")
|
||||
customFrameMeta["perSeriesAligner"] = timeSeriesFilter.Params.Get("aggregation.perSeriesAligner")
|
||||
for _, series := range response.TimeSeries {
|
||||
seriesLabels := data.Labels{}
|
||||
defaultMetricName := series.Metric.Type
|
||||
labels["resource.type"] = map[string]bool{series.Resource.Type: true}
|
||||
labels := make(map[string]string)
|
||||
labels["resource.type"] = series.Resource.Type
|
||||
seriesLabels["resource.type"] = series.Resource.Type
|
||||
|
||||
frame := data.NewFrameOfFieldTypes("", len(series.Points), data.FieldTypeTime, data.FieldTypeFloat64)
|
||||
@ -104,10 +101,7 @@ func (timeSeriesFilter *cloudMonitoringTimeSeriesFilter) parseResponse(queryRes
|
||||
}
|
||||
|
||||
for key, value := range series.Metric.Labels {
|
||||
if _, ok := labels["metric.label."+key]; !ok {
|
||||
labels["metric.label."+key] = map[string]bool{}
|
||||
}
|
||||
labels["metric.label."+key][value] = true
|
||||
labels["metric.label."+key] = value
|
||||
seriesLabels["metric.label."+key] = value
|
||||
|
||||
if len(timeSeriesFilter.GroupBys) == 0 || containsLabel(timeSeriesFilter.GroupBys, "metric.label."+key) {
|
||||
@ -116,10 +110,7 @@ func (timeSeriesFilter *cloudMonitoringTimeSeriesFilter) parseResponse(queryRes
|
||||
}
|
||||
|
||||
for key, value := range series.Resource.Labels {
|
||||
if _, ok := labels["resource.label."+key]; !ok {
|
||||
labels["resource.label."+key] = map[string]bool{}
|
||||
}
|
||||
labels["resource.label."+key][value] = true
|
||||
labels["resource.label."+key] = value
|
||||
seriesLabels["resource.label."+key] = value
|
||||
|
||||
if containsLabel(timeSeriesFilter.GroupBys, "resource.label."+key) {
|
||||
@ -130,22 +121,19 @@ func (timeSeriesFilter *cloudMonitoringTimeSeriesFilter) parseResponse(queryRes
|
||||
for labelType, labelTypeValues := range series.MetaData {
|
||||
for labelKey, labelValue := range labelTypeValues {
|
||||
key := toSnakeCase(fmt.Sprintf("metadata.%s.%s", labelType, labelKey))
|
||||
if _, ok := labels[key]; !ok {
|
||||
labels[key] = map[string]bool{}
|
||||
}
|
||||
|
||||
switch v := labelValue.(type) {
|
||||
case string:
|
||||
labels[key][v] = true
|
||||
labels[key] = v
|
||||
seriesLabels[key] = v
|
||||
case bool:
|
||||
strVal := strconv.FormatBool(v)
|
||||
labels[key][strVal] = true
|
||||
labels[key] = strVal
|
||||
seriesLabels[key] = strVal
|
||||
case []interface{}:
|
||||
for _, v := range v {
|
||||
strVal := v.(string)
|
||||
labels[key][strVal] = true
|
||||
labels[key] = strVal
|
||||
if len(seriesLabels[key]) > 0 {
|
||||
strVal = fmt.Sprintf("%s, %s", seriesLabels[key], strVal)
|
||||
}
|
||||
@ -155,6 +143,17 @@ func (timeSeriesFilter *cloudMonitoringTimeSeriesFilter) parseResponse(queryRes
|
||||
}
|
||||
}
|
||||
|
||||
customFrameMeta := map[string]interface{}{}
|
||||
customFrameMeta["alignmentPeriod"] = timeSeriesFilter.Params.Get("aggregation.alignmentPeriod")
|
||||
customFrameMeta["perSeriesAligner"] = timeSeriesFilter.Params.Get("aggregation.perSeriesAligner")
|
||||
customFrameMeta["labels"] = labels
|
||||
customFrameMeta["groupBys"] = timeSeriesFilter.GroupBys
|
||||
if frame.Meta != nil {
|
||||
frame.Meta.Custom = customFrameMeta
|
||||
} else {
|
||||
frame.SetMeta(&data.FrameMeta{Custom: customFrameMeta})
|
||||
}
|
||||
|
||||
// reverse the order to be ascending
|
||||
if series.ValueType != "DISTRIBUTION" {
|
||||
timeSeriesFilter.handleNonDistributionSeries(series, defaultMetricName, seriesLabels, frame)
|
||||
@ -234,23 +233,6 @@ func (timeSeriesFilter *cloudMonitoringTimeSeriesFilter) parseResponse(queryRes
|
||||
frames = addConfigData(frames, dl, response.Unit)
|
||||
}
|
||||
|
||||
labelsByKey := make(map[string][]string)
|
||||
for key, values := range labels {
|
||||
for value := range values {
|
||||
labelsByKey[key] = append(labelsByKey[key], value)
|
||||
}
|
||||
}
|
||||
customFrameMeta["labels"] = labelsByKey
|
||||
customFrameMeta["groupBys"] = timeSeriesFilter.GroupBys
|
||||
|
||||
for _, frame := range frames {
|
||||
if frame.Meta != nil {
|
||||
frame.Meta.Custom = customFrameMeta
|
||||
} else {
|
||||
frame.SetMeta(&data.FrameMeta{Custom: customFrameMeta})
|
||||
}
|
||||
}
|
||||
|
||||
queryRes.Frames = frames
|
||||
|
||||
return nil
|
||||
|
@ -370,6 +370,22 @@ func TestTimeSeriesFilter(t *testing.T) {
|
||||
assert.Equal(t, "test-proj - asia-northeast1-c - 6724404429462225363 - 200", frames[0].Fields[1].Name)
|
||||
})
|
||||
})
|
||||
|
||||
t.Run("Parse labels", func(t *testing.T) {
|
||||
data, err := loadTestFile("./test-data/5-series-response-meta-data.json")
|
||||
require.NoError(t, err)
|
||||
assert.Equal(t, 3, len(data.TimeSeries))
|
||||
res := &backend.DataResponse{}
|
||||
query := &cloudMonitoringTimeSeriesFilter{Params: url.Values{}}
|
||||
err = query.parseResponse(res, data, "")
|
||||
require.NoError(t, err)
|
||||
frames := res.Frames
|
||||
custom, ok := frames[0].Meta.Custom.(map[string]interface{})
|
||||
require.True(t, ok)
|
||||
labels, ok := custom["labels"].(map[string]string)
|
||||
require.True(t, ok)
|
||||
assert.Equal(t, "114250375703598695", labels["resource.label.instance_id"])
|
||||
})
|
||||
}
|
||||
|
||||
func loadTestFile(path string) (cloudMonitoringResponse, error) {
|
||||
|
@ -82,10 +82,8 @@ func (timeSeriesQuery cloudMonitoringTimeSeriesQuery) run(ctx context.Context, r
|
||||
|
||||
func (timeSeriesQuery cloudMonitoringTimeSeriesQuery) parseResponse(queryRes *backend.DataResponse,
|
||||
response cloudMonitoringResponse, executedQueryString string) error {
|
||||
labels := make(map[string]map[string]bool)
|
||||
frames := data.Frames{}
|
||||
|
||||
customFrameMeta := map[string]interface{}{}
|
||||
for _, series := range response.TimeSeriesData {
|
||||
seriesLabels := make(map[string]string)
|
||||
frame := data.NewFrameOfFieldTypes("", len(series.PointData), data.FieldTypeTime, data.FieldTypeFloat64)
|
||||
@ -93,25 +91,23 @@ func (timeSeriesQuery cloudMonitoringTimeSeriesQuery) parseResponse(queryRes *ba
|
||||
frame.Meta = &data.FrameMeta{
|
||||
ExecutedQueryString: executedQueryString,
|
||||
}
|
||||
labels := make(map[string]string)
|
||||
|
||||
for n, d := range response.TimeSeriesDescriptor.LabelDescriptors {
|
||||
key := toSnakeCase(d.Key)
|
||||
key = strings.Replace(key, ".", ".label.", 1)
|
||||
if _, ok := labels[key]; !ok {
|
||||
labels[key] = map[string]bool{}
|
||||
}
|
||||
|
||||
labelValue := series.LabelValues[n]
|
||||
switch d.ValueType {
|
||||
case "BOOL":
|
||||
strVal := strconv.FormatBool(labelValue.BoolValue)
|
||||
labels[key][strVal] = true
|
||||
labels[key] = strVal
|
||||
seriesLabels[key] = strVal
|
||||
case "INT64":
|
||||
labels[key][labelValue.Int64Value] = true
|
||||
labels[key] = labelValue.Int64Value
|
||||
seriesLabels[key] = labelValue.Int64Value
|
||||
default:
|
||||
labels[key][labelValue.StringValue] = true
|
||||
labels[key] = labelValue.StringValue
|
||||
seriesLabels[key] = labelValue.StringValue
|
||||
}
|
||||
}
|
||||
@ -125,10 +121,7 @@ func (timeSeriesQuery cloudMonitoringTimeSeriesQuery) parseResponse(queryRes *ba
|
||||
continue
|
||||
}
|
||||
|
||||
if _, ok := labels["metric.name"]; !ok {
|
||||
labels["metric.name"] = map[string]bool{}
|
||||
}
|
||||
labels["metric.name"][d.Key] = true
|
||||
labels["metric.name"] = d.Key
|
||||
seriesLabels["metric.name"] = d.Key
|
||||
defaultMetricName := d.Key
|
||||
|
||||
@ -239,27 +232,19 @@ func (timeSeriesQuery cloudMonitoringTimeSeriesQuery) parseResponse(queryRes *ba
|
||||
frames = append(frames, buckets[i])
|
||||
}
|
||||
}
|
||||
}
|
||||
if len(response.TimeSeriesData) > 0 {
|
||||
dl := timeSeriesQuery.buildDeepLink()
|
||||
frames = addConfigData(frames, dl, response.Unit)
|
||||
}
|
||||
|
||||
labelsByKey := make(map[string][]string)
|
||||
for key, values := range labels {
|
||||
for value := range values {
|
||||
labelsByKey[key] = append(labelsByKey[key], value)
|
||||
}
|
||||
}
|
||||
customFrameMeta["labels"] = labelsByKey
|
||||
|
||||
for _, frame := range frames {
|
||||
customFrameMeta := map[string]interface{}{}
|
||||
customFrameMeta["labels"] = labels
|
||||
if frame.Meta != nil {
|
||||
frame.Meta.Custom = customFrameMeta
|
||||
} else {
|
||||
frame.SetMeta(&data.FrameMeta{Custom: customFrameMeta})
|
||||
}
|
||||
}
|
||||
if len(response.TimeSeriesData) > 0 {
|
||||
dl := timeSeriesQuery.buildDeepLink()
|
||||
frames = addConfigData(frames, dl, response.Unit)
|
||||
}
|
||||
|
||||
queryRes.Frames = frames
|
||||
|
||||
|
@ -77,4 +77,28 @@ func TestTimeSeriesQuery(t *testing.T) {
|
||||
assert.Equal(t, "test-proj - asia-northeast1-c - 6724404429462225363 - 200", frames[0].Fields[1].Name)
|
||||
})
|
||||
})
|
||||
|
||||
t.Run("Parse labels", func(t *testing.T) {
|
||||
data, err := loadTestFile("./test-data/7-series-response-mql.json")
|
||||
require.NoError(t, err)
|
||||
|
||||
fromStart := time.Date(2018, 3, 15, 13, 0, 0, 0, time.UTC).In(time.Local)
|
||||
res := &backend.DataResponse{}
|
||||
query := &cloudMonitoringTimeSeriesQuery{
|
||||
ProjectName: "test-proj",
|
||||
Query: "test-query",
|
||||
timeRange: backend.TimeRange{
|
||||
From: fromStart,
|
||||
To: fromStart.Add(34 * time.Minute),
|
||||
},
|
||||
}
|
||||
err = query.parseResponse(res, data, "")
|
||||
require.NoError(t, err)
|
||||
frames := res.Frames
|
||||
custom, ok := frames[0].Meta.Custom.(map[string]interface{})
|
||||
require.True(t, ok)
|
||||
labels, ok := custom["labels"].(map[string]string)
|
||||
require.True(t, ok)
|
||||
assert.Equal(t, "6724404429462225363", labels["resource.label.instance_id"])
|
||||
})
|
||||
}
|
||||
|
@ -179,7 +179,26 @@ export default class CloudMonitoringDatasource extends DataSourceWithBackend<
|
||||
const dataQueryResponse = toDataQueryResponse({
|
||||
data: data,
|
||||
});
|
||||
return dataQueryResponse?.data[0]?.meta?.custom?.labels ?? {};
|
||||
const labels = dataQueryResponse?.data
|
||||
.map((f) => f.meta?.custom?.labels)
|
||||
.filter((p) => !!p)
|
||||
.reduce((acc, labels) => {
|
||||
for (let key in labels) {
|
||||
if (!acc[key]) {
|
||||
acc[key] = new Set<string>();
|
||||
}
|
||||
if (labels[key]) {
|
||||
acc[key].add(labels[key]);
|
||||
}
|
||||
}
|
||||
return acc;
|
||||
}, {});
|
||||
return Object.fromEntries(
|
||||
Object.entries(labels).map((l: any) => {
|
||||
l[1] = Array.from(l[1]);
|
||||
return l;
|
||||
})
|
||||
);
|
||||
})
|
||||
)
|
||||
);
|
||||
|
Loading…
Reference in New Issue
Block a user