elastic: remove unused debug-code (#59712)

* elastic: remove unused debug-code

* removed unused structs
This commit is contained in:
Gábor Farkas 2022-12-05 10:21:15 +01:00 committed by GitHub
parent bf85bf9b7a
commit b8b2de1ac4
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
11 changed files with 27 additions and 180 deletions

View File

@ -5,7 +5,6 @@ import (
"context" "context"
"encoding/json" "encoding/json"
"fmt" "fmt"
"io"
"net/http" "net/http"
"net/url" "net/url"
"path" "path"
@ -16,7 +15,6 @@ import (
"github.com/Masterminds/semver" "github.com/Masterminds/semver"
"github.com/grafana/grafana-plugin-sdk-go/backend" "github.com/grafana/grafana-plugin-sdk-go/backend"
"github.com/grafana/grafana/pkg/components/simplejson"
"github.com/grafana/grafana/pkg/infra/log" "github.com/grafana/grafana/pkg/infra/log"
"github.com/grafana/grafana/pkg/tsdb/intervalv2" "github.com/grafana/grafana/pkg/tsdb/intervalv2"
) )
@ -43,7 +41,6 @@ type Client interface {
GetMinInterval(queryInterval string) (time.Duration, error) GetMinInterval(queryInterval string) (time.Duration, error)
ExecuteMultisearch(r *MultiSearchRequest) (*MultiSearchResponse, error) ExecuteMultisearch(r *MultiSearchRequest) (*MultiSearchResponse, error)
MultiSearch() *MultiSearchRequestBuilder MultiSearch() *MultiSearchRequestBuilder
EnableDebug()
} }
// NewClient creates a new elasticsearch client // NewClient creates a new elasticsearch client
@ -72,13 +69,12 @@ var NewClient = func(ctx context.Context, ds *DatasourceInfo, timeRange backend.
} }
type baseClientImpl struct { type baseClientImpl struct {
ctx context.Context ctx context.Context
ds *DatasourceInfo ds *DatasourceInfo
timeField string timeField string
indices []string indices []string
timeRange backend.TimeRange timeRange backend.TimeRange
debugEnabled bool logger log.Logger
logger log.Logger
} }
func (c *baseClientImpl) GetTimeField() string { func (c *baseClientImpl) GetTimeField() string {
@ -96,7 +92,7 @@ type multiRequest struct {
interval intervalv2.Interval interval intervalv2.Interval
} }
func (c *baseClientImpl) executeBatchRequest(uriPath, uriQuery string, requests []*multiRequest) (*response, error) { func (c *baseClientImpl) executeBatchRequest(uriPath, uriQuery string, requests []*multiRequest) (*http.Response, error) {
bytes, err := c.encodeBatchRequests(requests) bytes, err := c.encodeBatchRequests(requests)
if err != nil { if err != nil {
return nil, err return nil, err
@ -134,7 +130,7 @@ func (c *baseClientImpl) encodeBatchRequests(requests []*multiRequest) ([]byte,
return payload.Bytes(), nil return payload.Bytes(), nil
} }
func (c *baseClientImpl) executeRequest(method, uriPath, uriQuery string, body []byte) (*response, error) { func (c *baseClientImpl) executeRequest(method, uriPath, uriQuery string, body []byte) (*http.Response, error) {
u, err := url.Parse(c.ds.URL) u, err := url.Parse(c.ds.URL)
if err != nil { if err != nil {
return nil, err return nil, err
@ -154,15 +150,6 @@ func (c *baseClientImpl) executeRequest(method, uriPath, uriQuery string, body [
c.logger.Debug("Executing request", "url", req.URL.String(), "method", method) c.logger.Debug("Executing request", "url", req.URL.String(), "method", method)
var reqInfo *SearchRequestInfo
if c.debugEnabled {
reqInfo = &SearchRequestInfo{
Method: req.Method,
Url: req.URL.String(),
Data: string(body),
}
}
req.Header.Set("Content-Type", "application/x-ndjson") req.Header.Set("Content-Type", "application/x-ndjson")
start := time.Now() start := time.Now()
@ -175,10 +162,8 @@ func (c *baseClientImpl) executeRequest(method, uriPath, uriQuery string, body [
if err != nil { if err != nil {
return nil, err return nil, err
} }
return &response{
httpResponse: resp, return resp, nil
reqInfo: reqInfo,
}, nil
} }
func (c *baseClientImpl) ExecuteMultisearch(r *MultiSearchRequest) (*MultiSearchResponse, error) { func (c *baseClientImpl) ExecuteMultisearch(r *MultiSearchRequest) (*MultiSearchResponse, error) {
@ -190,7 +175,7 @@ func (c *baseClientImpl) ExecuteMultisearch(r *MultiSearchRequest) (*MultiSearch
if err != nil { if err != nil {
return nil, err return nil, err
} }
res := clientRes.httpResponse res := clientRes
defer func() { defer func() {
if err := res.Body.Close(); err != nil { if err := res.Body.Close(); err != nil {
c.logger.Warn("Failed to close response body", "err", err) c.logger.Warn("Failed to close response body", "err", err)
@ -202,18 +187,6 @@ func (c *baseClientImpl) ExecuteMultisearch(r *MultiSearchRequest) (*MultiSearch
start := time.Now() start := time.Now()
c.logger.Debug("Decoding multisearch json response") c.logger.Debug("Decoding multisearch json response")
var bodyBytes []byte
if c.debugEnabled {
tmpBytes, err := io.ReadAll(res.Body)
if err != nil {
c.logger.Error("Failed to read http response bytes", "error", err)
} else {
bodyBytes = make([]byte, len(tmpBytes))
copy(bodyBytes, tmpBytes)
res.Body = io.NopCloser(bytes.NewBuffer(tmpBytes))
}
}
var msr MultiSearchResponse var msr MultiSearchResponse
dec := json.NewDecoder(res.Body) dec := json.NewDecoder(res.Body)
err = dec.Decode(&msr) err = dec.Decode(&msr)
@ -226,24 +199,6 @@ func (c *baseClientImpl) ExecuteMultisearch(r *MultiSearchRequest) (*MultiSearch
msr.Status = res.StatusCode msr.Status = res.StatusCode
if c.debugEnabled {
bodyJSON, err := simplejson.NewFromReader(bytes.NewBuffer(bodyBytes))
var data *simplejson.Json
if err != nil {
c.logger.Error("Failed to decode http response into json", "error", err)
} else {
data = bodyJSON
}
msr.DebugInfo = &SearchDebugInfo{
Request: clientRes.reqInfo,
Response: &SearchResponseInfo{
Status: res.StatusCode,
Data: data,
},
}
}
return &msr, nil return &msr, nil
} }
@ -286,7 +241,3 @@ func (c *baseClientImpl) getMultiSearchQueryParameters() string {
func (c *baseClientImpl) MultiSearch() *MultiSearchRequestBuilder { func (c *baseClientImpl) MultiSearch() *MultiSearchRequestBuilder {
return NewMultiSearchRequestBuilder() return NewMultiSearchRequestBuilder()
} }
func (c *baseClientImpl) EnableDebug() {
c.debugEnabled = true
}

View File

@ -2,33 +2,10 @@ package es
import ( import (
"encoding/json" "encoding/json"
"net/http"
"github.com/grafana/grafana/pkg/components/simplejson"
"github.com/grafana/grafana/pkg/tsdb/intervalv2" "github.com/grafana/grafana/pkg/tsdb/intervalv2"
) )
type response struct {
httpResponse *http.Response
reqInfo *SearchRequestInfo
}
type SearchRequestInfo struct {
Method string `json:"method"`
Url string `json:"url"`
Data string `json:"data"`
}
type SearchResponseInfo struct {
Status int `json:"status"`
Data *simplejson.Json `json:"data"`
}
type SearchDebugInfo struct {
Request *SearchRequestInfo `json:"request"`
Response *SearchResponseInfo `json:"response"`
}
// SearchRequest represents a search request // SearchRequest represents a search request
type SearchRequest struct { type SearchRequest struct {
Index string Index string
@ -83,7 +60,6 @@ type MultiSearchRequest struct {
type MultiSearchResponse struct { type MultiSearchResponse struct {
Status int `json:"status,omitempty"` Status int `json:"status,omitempty"`
Responses []*SearchResponse `json:"responses"` Responses []*SearchResponse `json:"responses"`
DebugInfo *SearchDebugInfo `json:"-"`
} }
// Query represents a query // Query represents a query

View File

@ -31,14 +31,12 @@ const (
type responseParser struct { type responseParser struct {
Responses []*es.SearchResponse Responses []*es.SearchResponse
Targets []*Query Targets []*Query
DebugInfo *es.SearchDebugInfo
} }
var newResponseParser = func(responses []*es.SearchResponse, targets []*Query, debugInfo *es.SearchDebugInfo) *responseParser { var newResponseParser = func(responses []*es.SearchResponse, targets []*Query) *responseParser {
return &responseParser{ return &responseParser{
Responses: responses, Responses: responses,
Targets: targets, Targets: targets,
DebugInfo: debugInfo,
} }
} }
@ -53,22 +51,11 @@ func (rp *responseParser) getTimeSeries() (*backend.QueryDataResponse, error) {
for i, res := range rp.Responses { for i, res := range rp.Responses {
target := rp.Targets[i] target := rp.Targets[i]
var debugInfo *simplejson.Json
if rp.DebugInfo != nil && i == 0 {
debugInfo = simplejson.NewFromAny(rp.DebugInfo)
}
if res.Error != nil { if res.Error != nil {
errResult := getErrorFromElasticResponse(res) errResult := getErrorFromElasticResponse(res)
result.Responses[target.RefID] = backend.DataResponse{ result.Responses[target.RefID] = backend.DataResponse{
Error: errors.New(errResult), Error: errors.New(errResult),
Frames: data.Frames{ }
&data.Frame{
Meta: &data.FrameMeta{
Custom: debugInfo,
},
},
}}
continue continue
} }
@ -82,11 +69,6 @@ func (rp *responseParser) getTimeSeries() (*backend.QueryDataResponse, error) {
rp.nameFields(queryRes, target) rp.nameFields(queryRes, target)
rp.trimDatapoints(queryRes, target) rp.trimDatapoints(queryRes, target)
for _, frame := range queryRes.Frames {
frame.Meta = &data.FrameMeta{
Custom: debugInfo,
}
}
result.Responses[target.RefID] = queryRes result.Responses[target.RefID] = queryRes
} }
return &result, nil return &result, nil

View File

@ -1216,5 +1216,5 @@ func newResponseParserForTest(tsdbQueries map[string]string, responseBody string
return nil, err return nil, err
} }
return newResponseParser(response.Responses, queries, nil), nil return newResponseParser(response.Responses, queries), nil
} }

View File

@ -1,8 +1,6 @@
// 🌟 This was machine generated. Do not edit. 🌟 // 🌟 This was machine generated. Do not edit. 🌟
// //
// Frame[0] { // Frame[0]
// "custom": null
// }
// Name: // Name:
// Dimensions: 2 Fields by 3 Rows // Dimensions: 2 Fields by 3 Rows
// +-------------------------------+------------------+ // +-------------------------------+------------------+
@ -22,9 +20,6 @@
"frames": [ "frames": [
{ {
"schema": { "schema": {
"meta": {
"custom": null
},
"fields": [ "fields": [
{ {
"name": "time", "name": "time",

View File

@ -1,8 +1,6 @@
// 🌟 This was machine generated. Do not edit. 🌟 // 🌟 This was machine generated. Do not edit. 🌟
// //
// Frame[0] { // Frame[0]
// "custom": null
// }
// Name: // Name:
// Dimensions: 2 Fields by 3 Rows // Dimensions: 2 Fields by 3 Rows
// +-------------------------------+--------------------+ // +-------------------------------+--------------------+
@ -17,9 +15,7 @@
// //
// //
// //
// Frame[1] { // Frame[1]
// "custom": null
// }
// Name: // Name:
// Dimensions: 2 Fields by 3 Rows // Dimensions: 2 Fields by 3 Rows
// +-------------------------------+--------------------+ // +-------------------------------+--------------------+
@ -34,9 +30,7 @@
// //
// //
// //
// Frame[2] { // Frame[2]
// "custom": null
// }
// Name: // Name:
// Dimensions: 2 Fields by 3 Rows // Dimensions: 2 Fields by 3 Rows
// +-------------------------------+--------------------+ // +-------------------------------+--------------------+
@ -51,9 +45,7 @@
// //
// //
// //
// Frame[3] { // Frame[3]
// "custom": null
// }
// Name: // Name:
// Dimensions: 2 Fields by 3 Rows // Dimensions: 2 Fields by 3 Rows
// +-------------------------------+--------------------+ // +-------------------------------+--------------------+
@ -68,9 +60,7 @@
// //
// //
// //
// Frame[4] { // Frame[4]
// "custom": null
// }
// Name: // Name:
// Dimensions: 2 Fields by 3 Rows // Dimensions: 2 Fields by 3 Rows
// +-------------------------------+--------------------+ // +-------------------------------+--------------------+
@ -85,9 +75,7 @@
// //
// //
// //
// Frame[5] { // Frame[5]
// "custom": null
// }
// Name: // Name:
// Dimensions: 2 Fields by 3 Rows // Dimensions: 2 Fields by 3 Rows
// +-------------------------------+--------------------+ // +-------------------------------+--------------------+
@ -107,9 +95,6 @@
"frames": [ "frames": [
{ {
"schema": { "schema": {
"meta": {
"custom": null
},
"fields": [ "fields": [
{ {
"name": "time", "name": "time",
@ -154,9 +139,6 @@
}, },
{ {
"schema": { "schema": {
"meta": {
"custom": null
},
"fields": [ "fields": [
{ {
"name": "time", "name": "time",
@ -201,9 +183,6 @@
}, },
{ {
"schema": { "schema": {
"meta": {
"custom": null
},
"fields": [ "fields": [
{ {
"name": "time", "name": "time",
@ -248,9 +227,6 @@
}, },
{ {
"schema": { "schema": {
"meta": {
"custom": null
},
"fields": [ "fields": [
{ {
"name": "time", "name": "time",
@ -295,9 +271,6 @@
}, },
{ {
"schema": { "schema": {
"meta": {
"custom": null
},
"fields": [ "fields": [
{ {
"name": "time", "name": "time",
@ -342,9 +315,6 @@
}, },
{ {
"schema": { "schema": {
"meta": {
"custom": null
},
"fields": [ "fields": [
{ {
"name": "time", "name": "time",

View File

@ -1,8 +1,6 @@
// 🌟 This was machine generated. Do not edit. 🌟 // 🌟 This was machine generated. Do not edit. 🌟
// //
// Frame[0] { // Frame[0]
// "custom": null
// }
// Name: // Name:
// Dimensions: 2 Fields by 3 Rows // Dimensions: 2 Fields by 3 Rows
// +-------------------------------+-------------------+ // +-------------------------------+-------------------+
@ -22,9 +20,6 @@
"frames": [ "frames": [
{ {
"schema": { "schema": {
"meta": {
"custom": null
},
"fields": [ "fields": [
{ {
"name": "time", "name": "time",

View File

@ -1,8 +1,6 @@
// 🌟 This was machine generated. Do not edit. 🌟 // 🌟 This was machine generated. Do not edit. 🌟
// //
// Frame[0] { // Frame[0]
// "custom": null
// }
// Name: // Name:
// Dimensions: 2 Fields by 3 Rows // Dimensions: 2 Fields by 3 Rows
// +-------------------------------+---------------------+ // +-------------------------------+---------------------+
@ -22,9 +20,6 @@
"frames": [ "frames": [
{ {
"schema": { "schema": {
"meta": {
"custom": null
},
"fields": [ "fields": [
{ {
"name": "time", "name": "time",

View File

@ -1,8 +1,6 @@
// 🌟 This was machine generated. Do not edit. 🌟 // 🌟 This was machine generated. Do not edit. 🌟
// //
// Frame[0] { // Frame[0]
// "custom": null
// }
// Name: // Name:
// Dimensions: 2 Fields by 4 Rows // Dimensions: 2 Fields by 4 Rows
// +-------------------------------+--------------------+ // +-------------------------------+--------------------+
@ -18,9 +16,7 @@
// //
// //
// //
// Frame[1] { // Frame[1]
// "custom": null
// }
// Name: // Name:
// Dimensions: 2 Fields by 4 Rows // Dimensions: 2 Fields by 4 Rows
// +-------------------------------+--------------------+ // +-------------------------------+--------------------+
@ -36,9 +32,7 @@
// //
// //
// //
// Frame[2] { // Frame[2]
// "custom": null
// }
// Name: // Name:
// Dimensions: 2 Fields by 4 Rows // Dimensions: 2 Fields by 4 Rows
// +-------------------------------+--------------------+ // +-------------------------------+--------------------+
@ -59,9 +53,6 @@
"frames": [ "frames": [
{ {
"schema": { "schema": {
"meta": {
"custom": null
},
"fields": [ "fields": [
{ {
"name": "time", "name": "time",
@ -108,9 +99,6 @@
}, },
{ {
"schema": { "schema": {
"meta": {
"custom": null
},
"fields": [ "fields": [
{ {
"name": "time", "name": "time",
@ -157,9 +145,6 @@
}, },
{ {
"schema": { "schema": {
"meta": {
"custom": null
},
"fields": [ "fields": [
{ {
"name": "time", "name": "time",

View File

@ -57,7 +57,7 @@ func (e *timeSeriesQuery) execute() (*backend.QueryDataResponse, error) {
return &backend.QueryDataResponse{}, err return &backend.QueryDataResponse{}, err
} }
rp := newResponseParser(res.Responses, queries, res.DebugInfo) rp := newResponseParser(res.Responses, queries)
return rp.getTimeSeries() return rp.getTimeSeries()
} }

View File

@ -1713,8 +1713,6 @@ func newFakeClient() *fakeClient {
} }
} }
func (c *fakeClient) EnableDebug() {}
func (c *fakeClient) GetTimeField() string { func (c *fakeClient) GetTimeField() string {
return c.timeField return c.timeField
} }