elastic: remove unused debug-code (#59712)

* elastic: remove unused debug-code

* removed unused structs
This commit is contained in:
Gábor Farkas 2022-12-05 10:21:15 +01:00 committed by GitHub
parent bf85bf9b7a
commit b8b2de1ac4
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
11 changed files with 27 additions and 180 deletions

View File

@ -5,7 +5,6 @@ import (
"context"
"encoding/json"
"fmt"
"io"
"net/http"
"net/url"
"path"
@ -16,7 +15,6 @@ import (
"github.com/Masterminds/semver"
"github.com/grafana/grafana-plugin-sdk-go/backend"
"github.com/grafana/grafana/pkg/components/simplejson"
"github.com/grafana/grafana/pkg/infra/log"
"github.com/grafana/grafana/pkg/tsdb/intervalv2"
)
@ -43,7 +41,6 @@ type Client interface {
GetMinInterval(queryInterval string) (time.Duration, error)
ExecuteMultisearch(r *MultiSearchRequest) (*MultiSearchResponse, error)
MultiSearch() *MultiSearchRequestBuilder
EnableDebug()
}
// NewClient creates a new elasticsearch client
@ -72,13 +69,12 @@ var NewClient = func(ctx context.Context, ds *DatasourceInfo, timeRange backend.
}
type baseClientImpl struct {
ctx context.Context
ds *DatasourceInfo
timeField string
indices []string
timeRange backend.TimeRange
debugEnabled bool
logger log.Logger
ctx context.Context
ds *DatasourceInfo
timeField string
indices []string
timeRange backend.TimeRange
logger log.Logger
}
func (c *baseClientImpl) GetTimeField() string {
@ -96,7 +92,7 @@ type multiRequest struct {
interval intervalv2.Interval
}
func (c *baseClientImpl) executeBatchRequest(uriPath, uriQuery string, requests []*multiRequest) (*response, error) {
func (c *baseClientImpl) executeBatchRequest(uriPath, uriQuery string, requests []*multiRequest) (*http.Response, error) {
bytes, err := c.encodeBatchRequests(requests)
if err != nil {
return nil, err
@ -134,7 +130,7 @@ func (c *baseClientImpl) encodeBatchRequests(requests []*multiRequest) ([]byte,
return payload.Bytes(), nil
}
func (c *baseClientImpl) executeRequest(method, uriPath, uriQuery string, body []byte) (*response, error) {
func (c *baseClientImpl) executeRequest(method, uriPath, uriQuery string, body []byte) (*http.Response, error) {
u, err := url.Parse(c.ds.URL)
if err != nil {
return nil, err
@ -154,15 +150,6 @@ func (c *baseClientImpl) executeRequest(method, uriPath, uriQuery string, body [
c.logger.Debug("Executing request", "url", req.URL.String(), "method", method)
var reqInfo *SearchRequestInfo
if c.debugEnabled {
reqInfo = &SearchRequestInfo{
Method: req.Method,
Url: req.URL.String(),
Data: string(body),
}
}
req.Header.Set("Content-Type", "application/x-ndjson")
start := time.Now()
@ -175,10 +162,8 @@ func (c *baseClientImpl) executeRequest(method, uriPath, uriQuery string, body [
if err != nil {
return nil, err
}
return &response{
httpResponse: resp,
reqInfo: reqInfo,
}, nil
return resp, nil
}
func (c *baseClientImpl) ExecuteMultisearch(r *MultiSearchRequest) (*MultiSearchResponse, error) {
@ -190,7 +175,7 @@ func (c *baseClientImpl) ExecuteMultisearch(r *MultiSearchRequest) (*MultiSearch
if err != nil {
return nil, err
}
res := clientRes.httpResponse
res := clientRes
defer func() {
if err := res.Body.Close(); err != nil {
c.logger.Warn("Failed to close response body", "err", err)
@ -202,18 +187,6 @@ func (c *baseClientImpl) ExecuteMultisearch(r *MultiSearchRequest) (*MultiSearch
start := time.Now()
c.logger.Debug("Decoding multisearch json response")
var bodyBytes []byte
if c.debugEnabled {
tmpBytes, err := io.ReadAll(res.Body)
if err != nil {
c.logger.Error("Failed to read http response bytes", "error", err)
} else {
bodyBytes = make([]byte, len(tmpBytes))
copy(bodyBytes, tmpBytes)
res.Body = io.NopCloser(bytes.NewBuffer(tmpBytes))
}
}
var msr MultiSearchResponse
dec := json.NewDecoder(res.Body)
err = dec.Decode(&msr)
@ -226,24 +199,6 @@ func (c *baseClientImpl) ExecuteMultisearch(r *MultiSearchRequest) (*MultiSearch
msr.Status = res.StatusCode
if c.debugEnabled {
bodyJSON, err := simplejson.NewFromReader(bytes.NewBuffer(bodyBytes))
var data *simplejson.Json
if err != nil {
c.logger.Error("Failed to decode http response into json", "error", err)
} else {
data = bodyJSON
}
msr.DebugInfo = &SearchDebugInfo{
Request: clientRes.reqInfo,
Response: &SearchResponseInfo{
Status: res.StatusCode,
Data: data,
},
}
}
return &msr, nil
}
@ -286,7 +241,3 @@ func (c *baseClientImpl) getMultiSearchQueryParameters() string {
func (c *baseClientImpl) MultiSearch() *MultiSearchRequestBuilder {
return NewMultiSearchRequestBuilder()
}
func (c *baseClientImpl) EnableDebug() {
c.debugEnabled = true
}

View File

@ -2,33 +2,10 @@ package es
import (
"encoding/json"
"net/http"
"github.com/grafana/grafana/pkg/components/simplejson"
"github.com/grafana/grafana/pkg/tsdb/intervalv2"
)
type response struct {
httpResponse *http.Response
reqInfo *SearchRequestInfo
}
type SearchRequestInfo struct {
Method string `json:"method"`
Url string `json:"url"`
Data string `json:"data"`
}
type SearchResponseInfo struct {
Status int `json:"status"`
Data *simplejson.Json `json:"data"`
}
type SearchDebugInfo struct {
Request *SearchRequestInfo `json:"request"`
Response *SearchResponseInfo `json:"response"`
}
// SearchRequest represents a search request
type SearchRequest struct {
Index string
@ -83,7 +60,6 @@ type MultiSearchRequest struct {
type MultiSearchResponse struct {
Status int `json:"status,omitempty"`
Responses []*SearchResponse `json:"responses"`
DebugInfo *SearchDebugInfo `json:"-"`
}
// Query represents a query

View File

@ -31,14 +31,12 @@ const (
type responseParser struct {
Responses []*es.SearchResponse
Targets []*Query
DebugInfo *es.SearchDebugInfo
}
var newResponseParser = func(responses []*es.SearchResponse, targets []*Query, debugInfo *es.SearchDebugInfo) *responseParser {
var newResponseParser = func(responses []*es.SearchResponse, targets []*Query) *responseParser {
return &responseParser{
Responses: responses,
Targets: targets,
DebugInfo: debugInfo,
}
}
@ -53,22 +51,11 @@ func (rp *responseParser) getTimeSeries() (*backend.QueryDataResponse, error) {
for i, res := range rp.Responses {
target := rp.Targets[i]
var debugInfo *simplejson.Json
if rp.DebugInfo != nil && i == 0 {
debugInfo = simplejson.NewFromAny(rp.DebugInfo)
}
if res.Error != nil {
errResult := getErrorFromElasticResponse(res)
result.Responses[target.RefID] = backend.DataResponse{
Error: errors.New(errResult),
Frames: data.Frames{
&data.Frame{
Meta: &data.FrameMeta{
Custom: debugInfo,
},
},
}}
}
continue
}
@ -82,11 +69,6 @@ func (rp *responseParser) getTimeSeries() (*backend.QueryDataResponse, error) {
rp.nameFields(queryRes, target)
rp.trimDatapoints(queryRes, target)
for _, frame := range queryRes.Frames {
frame.Meta = &data.FrameMeta{
Custom: debugInfo,
}
}
result.Responses[target.RefID] = queryRes
}
return &result, nil

View File

@ -1216,5 +1216,5 @@ func newResponseParserForTest(tsdbQueries map[string]string, responseBody string
return nil, err
}
return newResponseParser(response.Responses, queries, nil), nil
return newResponseParser(response.Responses, queries), nil
}

View File

@ -1,8 +1,6 @@
// 🌟 This was machine generated. Do not edit. 🌟
//
// Frame[0] {
// "custom": null
// }
// Frame[0]
// Name:
// Dimensions: 2 Fields by 3 Rows
// +-------------------------------+------------------+
@ -22,9 +20,6 @@
"frames": [
{
"schema": {
"meta": {
"custom": null
},
"fields": [
{
"name": "time",

View File

@ -1,8 +1,6 @@
// 🌟 This was machine generated. Do not edit. 🌟
//
// Frame[0] {
// "custom": null
// }
// Frame[0]
// Name:
// Dimensions: 2 Fields by 3 Rows
// +-------------------------------+--------------------+
@ -17,9 +15,7 @@
//
//
//
// Frame[1] {
// "custom": null
// }
// Frame[1]
// Name:
// Dimensions: 2 Fields by 3 Rows
// +-------------------------------+--------------------+
@ -34,9 +30,7 @@
//
//
//
// Frame[2] {
// "custom": null
// }
// Frame[2]
// Name:
// Dimensions: 2 Fields by 3 Rows
// +-------------------------------+--------------------+
@ -51,9 +45,7 @@
//
//
//
// Frame[3] {
// "custom": null
// }
// Frame[3]
// Name:
// Dimensions: 2 Fields by 3 Rows
// +-------------------------------+--------------------+
@ -68,9 +60,7 @@
//
//
//
// Frame[4] {
// "custom": null
// }
// Frame[4]
// Name:
// Dimensions: 2 Fields by 3 Rows
// +-------------------------------+--------------------+
@ -85,9 +75,7 @@
//
//
//
// Frame[5] {
// "custom": null
// }
// Frame[5]
// Name:
// Dimensions: 2 Fields by 3 Rows
// +-------------------------------+--------------------+
@ -107,9 +95,6 @@
"frames": [
{
"schema": {
"meta": {
"custom": null
},
"fields": [
{
"name": "time",
@ -154,9 +139,6 @@
},
{
"schema": {
"meta": {
"custom": null
},
"fields": [
{
"name": "time",
@ -201,9 +183,6 @@
},
{
"schema": {
"meta": {
"custom": null
},
"fields": [
{
"name": "time",
@ -248,9 +227,6 @@
},
{
"schema": {
"meta": {
"custom": null
},
"fields": [
{
"name": "time",
@ -295,9 +271,6 @@
},
{
"schema": {
"meta": {
"custom": null
},
"fields": [
{
"name": "time",
@ -342,9 +315,6 @@
},
{
"schema": {
"meta": {
"custom": null
},
"fields": [
{
"name": "time",

View File

@ -1,8 +1,6 @@
// 🌟 This was machine generated. Do not edit. 🌟
//
// Frame[0] {
// "custom": null
// }
// Frame[0]
// Name:
// Dimensions: 2 Fields by 3 Rows
// +-------------------------------+-------------------+
@ -22,9 +20,6 @@
"frames": [
{
"schema": {
"meta": {
"custom": null
},
"fields": [
{
"name": "time",

View File

@ -1,8 +1,6 @@
// 🌟 This was machine generated. Do not edit. 🌟
//
// Frame[0] {
// "custom": null
// }
// Frame[0]
// Name:
// Dimensions: 2 Fields by 3 Rows
// +-------------------------------+---------------------+
@ -22,9 +20,6 @@
"frames": [
{
"schema": {
"meta": {
"custom": null
},
"fields": [
{
"name": "time",

View File

@ -1,8 +1,6 @@
// 🌟 This was machine generated. Do not edit. 🌟
//
// Frame[0] {
// "custom": null
// }
// Frame[0]
// Name:
// Dimensions: 2 Fields by 4 Rows
// +-------------------------------+--------------------+
@ -18,9 +16,7 @@
//
//
//
// Frame[1] {
// "custom": null
// }
// Frame[1]
// Name:
// Dimensions: 2 Fields by 4 Rows
// +-------------------------------+--------------------+
@ -36,9 +32,7 @@
//
//
//
// Frame[2] {
// "custom": null
// }
// Frame[2]
// Name:
// Dimensions: 2 Fields by 4 Rows
// +-------------------------------+--------------------+
@ -59,9 +53,6 @@
"frames": [
{
"schema": {
"meta": {
"custom": null
},
"fields": [
{
"name": "time",
@ -108,9 +99,6 @@
},
{
"schema": {
"meta": {
"custom": null
},
"fields": [
{
"name": "time",
@ -157,9 +145,6 @@
},
{
"schema": {
"meta": {
"custom": null
},
"fields": [
{
"name": "time",

View File

@ -57,7 +57,7 @@ func (e *timeSeriesQuery) execute() (*backend.QueryDataResponse, error) {
return &backend.QueryDataResponse{}, err
}
rp := newResponseParser(res.Responses, queries, res.DebugInfo)
rp := newResponseParser(res.Responses, queries)
return rp.getTimeSeries()
}

View File

@ -1713,8 +1713,6 @@ func newFakeClient() *fakeClient {
}
}
func (c *fakeClient) EnableDebug() {}
func (c *fakeClient) GetTimeField() string {
return c.timeField
}