Chore: Update prometheus, loki, graphite and influx plugins to support contextual logs. (#57708)

This commit is contained in:
Yuriy Tseretyan 2022-10-27 12:05:06 -04:00 committed by GitHub
parent 9aac0d32f9
commit facf2b1ee8
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
21 changed files with 149 additions and 142 deletions

View File

@ -29,8 +29,9 @@ import (
"github.com/grafana/grafana/pkg/tsdb/legacydata"
)
var logger = log.New("tsdb.graphite")
type Service struct {
logger log.Logger
im instancemgmt.InstanceManager
tracer tracing.Tracer
}
@ -42,7 +43,6 @@ const (
func ProvideService(httpClientProvider httpclient.Provider, tracer tracing.Tracer) *Service {
return &Service{
logger: log.New("tsdb.graphite"),
im: datasource.NewInstanceManager(newInstanceSettings(httpClientProvider)),
tracer: tracer,
}
@ -90,6 +90,8 @@ func (s *Service) QueryData(ctx context.Context, req *backend.QueryDataRequest)
return nil, fmt.Errorf("query contains no queries")
}
logger := logger.FromContext(ctx)
// get datasource info from context
dsInfo, err := s.getDSInfo(req.PluginContext)
if err != nil {
@ -119,7 +121,7 @@ func (s *Service) QueryData(ctx context.Context, req *backend.QueryDataRequest)
if err != nil {
return nil, err
}
s.logger.Debug("graphite", "query", model)
logger.Debug("graphite", "query", model)
currTarget := ""
if fullTarget, err := model.Get(TargetFullModelField).String(); err == nil {
currTarget = fullTarget
@ -127,7 +129,7 @@ func (s *Service) QueryData(ctx context.Context, req *backend.QueryDataRequest)
currTarget = model.Get(TargetModelField).MustString()
}
if currTarget == "" {
s.logger.Debug("graphite", "empty query target", model)
logger.Debug("graphite", "empty query target", model)
emptyQueries = append(emptyQueries, fmt.Sprintf("Query: %v has no target", model))
continue
}
@ -137,17 +139,17 @@ func (s *Service) QueryData(ctx context.Context, req *backend.QueryDataRequest)
var result = backend.QueryDataResponse{}
if target == "" {
s.logger.Error("No targets in query model", "models without targets", strings.Join(emptyQueries, "\n"))
logger.Error("No targets in query model", "models without targets", strings.Join(emptyQueries, "\n"))
return &result, errors.New("no query target found for the alert rule")
}
formData["target"] = []string{target}
if setting.Env == setting.Dev {
s.logger.Debug("Graphite request", "params", formData)
logger.Debug("Graphite request", "params", formData)
}
graphiteReq, err := s.createRequest(ctx, dsInfo, formData)
graphiteReq, err := s.createRequest(ctx, logger, dsInfo, formData)
if err != nil {
return &result, err
}
@ -173,7 +175,7 @@ func (s *Service) QueryData(ctx context.Context, req *backend.QueryDataRequest)
return &result, err
}
frames, err := s.toDataFrames(res)
frames, err := s.toDataFrames(logger, res)
if err != nil {
span.RecordError(err)
span.SetStatus(codes.Error, err.Error())
@ -191,34 +193,34 @@ func (s *Service) QueryData(ctx context.Context, req *backend.QueryDataRequest)
return &result, nil
}
func (s *Service) parseResponse(res *http.Response) ([]TargetResponseDTO, error) {
func (s *Service) parseResponse(logger log.Logger, res *http.Response) ([]TargetResponseDTO, error) {
body, err := io.ReadAll(res.Body)
if err != nil {
return nil, err
}
defer func() {
if err := res.Body.Close(); err != nil {
s.logger.Warn("Failed to close response body", "err", err)
logger.Warn("Failed to close response body", "err", err)
}
}()
if res.StatusCode/100 != 2 {
s.logger.Info("Request failed", "status", res.Status, "body", string(body))
logger.Info("Request failed", "status", res.Status, "body", string(body))
return nil, fmt.Errorf("request failed, status: %s", res.Status)
}
var data []TargetResponseDTO
err = json.Unmarshal(body, &data)
if err != nil {
s.logger.Info("Failed to unmarshal graphite response", "error", err, "status", res.Status, "body", string(body))
logger.Info("Failed to unmarshal graphite response", "error", err, "status", res.Status, "body", string(body))
return nil, err
}
return data, nil
}
func (s *Service) toDataFrames(response *http.Response) (frames data.Frames, error error) {
responseData, err := s.parseResponse(response)
func (s *Service) toDataFrames(logger log.Logger, response *http.Response) (frames data.Frames, error error) {
responseData, err := s.parseResponse(logger, response)
if err != nil {
return nil, err
}
@ -253,13 +255,13 @@ func (s *Service) toDataFrames(response *http.Response) (frames data.Frames, err
data.NewField("value", tags, values).SetConfig(&data.FieldConfig{DisplayNameFromDS: name})))
if setting.Env == setting.Dev {
s.logger.Debug("Graphite response", "target", series.Target, "datapoints", len(series.DataPoints))
logger.Debug("Graphite response", "target", series.Target, "datapoints", len(series.DataPoints))
}
}
return frames, nil
}
func (s *Service) createRequest(ctx context.Context, dsInfo *datasourceInfo, data url.Values) (*http.Request, error) {
func (s *Service) createRequest(ctx context.Context, l log.Logger, dsInfo *datasourceInfo, data url.Values) (*http.Request, error) {
u, err := url.Parse(dsInfo.URL)
if err != nil {
return nil, err
@ -268,7 +270,7 @@ func (s *Service) createRequest(ctx context.Context, dsInfo *datasourceInfo, dat
req, err := http.NewRequestWithContext(ctx, http.MethodPost, u.String(), strings.NewReader(data.Encode()))
if err != nil {
s.logger.Info("Failed to create request", "error", err)
logger.Info("Failed to create request", "error", err)
return nil, fmt.Errorf("failed to create request: %w", err)
}

View File

@ -10,7 +10,6 @@ import (
"time"
"github.com/grafana/grafana-plugin-sdk-go/data"
"github.com/grafana/grafana/pkg/infra/log"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
)
@ -50,7 +49,7 @@ func TestFixIntervalFormat(t *testing.T) {
})
}
service := &Service{logger: log.New("tsdb.graphite")}
service := &Service{}
t.Run("Converts response without tags to data frames", func(*testing.T) {
body := `
@ -69,7 +68,7 @@ func TestFixIntervalFormat(t *testing.T) {
expectedFrames := data.Frames{expectedFrame}
httpResponse := &http.Response{StatusCode: 200, Body: io.NopCloser(strings.NewReader(body))}
dataFrames, err := service.toDataFrames(httpResponse)
dataFrames, err := service.toDataFrames(logger, httpResponse)
require.NoError(t, err)
if !reflect.DeepEqual(expectedFrames, dataFrames) {
@ -102,7 +101,7 @@ func TestFixIntervalFormat(t *testing.T) {
expectedFrames := data.Frames{expectedFrame}
httpResponse := &http.Response{StatusCode: 200, Body: io.NopCloser(strings.NewReader(body))}
dataFrames, err := service.toDataFrames(httpResponse)
dataFrames, err := service.toDataFrames(logger, httpResponse)
require.NoError(t, err)
if !reflect.DeepEqual(expectedFrames, dataFrames) {

View File

@ -9,28 +9,30 @@ import (
"github.com/grafana/grafana-plugin-sdk-go/backend"
"github.com/grafana/grafana-plugin-sdk-go/data"
"github.com/influxdata/influxdb-client-go/v2/api"
"github.com/grafana/grafana/pkg/infra/log"
)
const maxPointsEnforceFactor float64 = 10
// executeQuery runs a flux query using the queryModel to interpolate the query and the runner to execute it.
// maxSeries somehow limits the response.
func executeQuery(ctx context.Context, query queryModel, runner queryRunner, maxSeries int) (dr backend.DataResponse) {
func executeQuery(ctx context.Context, logger log.Logger, query queryModel, runner queryRunner, maxSeries int) (dr backend.DataResponse) {
dr = backend.DataResponse{}
flux := interpolate(query)
glog.Debug("Executing Flux query", "flux", flux)
logger.Debug("Executing Flux query", "flux", flux)
tables, err := runner.runQuery(ctx, flux)
if err != nil {
glog.Warn("Flux query failed", "err", err, "query", flux)
logger.Warn("Flux query failed", "err", err, "query", flux)
dr.Error = err
} else {
// we only enforce a larger number than maxDataPoints
maxPointsEnforced := int(float64(query.MaxDataPoints) * maxPointsEnforceFactor)
dr = readDataFrames(tables, maxPointsEnforced, maxSeries)
dr = readDataFrames(logger, tables, maxPointsEnforced, maxSeries)
if dr.Error != nil {
// we check if a too-many-data-points error happened, and if it is so,
@ -62,8 +64,8 @@ func executeQuery(ctx context.Context, query queryModel, runner queryRunner, max
return dr
}
func readDataFrames(result *api.QueryTableResult, maxPoints int, maxSeries int) (dr backend.DataResponse) {
glog.Debug("Reading data frames from query result", "maxPoints", maxPoints, "maxSeries", maxSeries)
func readDataFrames(logger log.Logger, result *api.QueryTableResult, maxPoints int, maxSeries int) (dr backend.DataResponse) {
logger.Debug("Reading data frames from query result", "maxPoints", maxPoints, "maxSeries", maxSeries)
dr = backend.DataResponse{}
builder := &frameBuilder{

View File

@ -14,12 +14,13 @@ import (
"github.com/grafana/grafana-plugin-sdk-go/backend"
"github.com/grafana/grafana-plugin-sdk-go/data"
"github.com/grafana/grafana-plugin-sdk-go/experimental"
"github.com/grafana/grafana/pkg/components/simplejson"
"github.com/grafana/grafana/pkg/tsdb/influxdb/models"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
"github.com/xorcare/pointer"
"github.com/grafana/grafana/pkg/components/simplejson"
"github.com/grafana/grafana/pkg/tsdb/influxdb/models"
influxdb2 "github.com/influxdata/influxdb-client-go/v2"
"github.com/influxdata/influxdb-client-go/v2/api"
)
@ -62,7 +63,7 @@ func executeMockedQuery(t *testing.T, name string, query queryModel) *backend.Da
testDataPath: name + ".csv",
}
dr := executeQuery(context.Background(), query, runner, 50)
dr := executeQuery(context.Background(), glog, query, runner, 50)
return &dr
}
@ -226,7 +227,7 @@ func TestRealQuery(t *testing.T) {
runner, err := runnerFromDataSource(dsInfo)
require.NoError(t, err)
dr := executeQuery(context.Background(), queryModel{
dr := executeQuery(context.Background(), glog, queryModel{
MaxDataPoints: 100,
RawQuery: "buckets()",
}, runner, 50)

View File

@ -5,10 +5,11 @@ import (
"fmt"
"github.com/grafana/grafana-plugin-sdk-go/backend"
"github.com/grafana/grafana/pkg/infra/log"
"github.com/grafana/grafana/pkg/tsdb/influxdb/models"
influxdb2 "github.com/influxdata/influxdb-client-go/v2"
"github.com/influxdata/influxdb-client-go/v2/api"
"github.com/grafana/grafana/pkg/infra/log"
"github.com/grafana/grafana/pkg/tsdb/influxdb/models"
)
var (
@ -18,8 +19,9 @@ var (
// Query builds flux queries, executes them, and returns the results.
func Query(ctx context.Context, dsInfo *models.DatasourceInfo, tsdbQuery backend.QueryDataRequest) (
*backend.QueryDataResponse, error) {
logger := glog.FromContext(ctx)
tRes := backend.NewQueryDataResponse()
glog.Debug("Received a query", "query", tsdbQuery)
logger.Debug("Received a query", "query", tsdbQuery)
r, err := runnerFromDataSource(dsInfo)
if err != nil {
return &backend.QueryDataResponse{}, err
@ -36,7 +38,7 @@ func Query(ctx context.Context, dsInfo *models.DatasourceInfo, tsdbQuery backend
// If the default changes also update labels/placeholder in config page.
maxSeries := dsInfo.MaxSeries
res := executeQuery(ctx, *qm, r, maxSeries)
res := executeQuery(ctx, logger, *qm, r, maxSeries)
tRes.Responses[query.RefID] = res
}

View File

@ -7,6 +7,8 @@ import (
"time"
"github.com/grafana/grafana-plugin-sdk-go/backend"
"github.com/grafana/grafana/pkg/infra/log"
"github.com/grafana/grafana/pkg/tsdb/influxdb/flux"
"github.com/grafana/grafana/pkg/tsdb/influxdb/models"
)
@ -17,28 +19,30 @@ const (
func (s *Service) CheckHealth(ctx context.Context, req *backend.CheckHealthRequest) (*backend.CheckHealthResult,
error) {
logger := logger.FromContext(ctx)
dsInfo, err := s.getDSInfo(req.PluginContext)
if err != nil {
return getHealthCheckMessage(s, "error getting datasource info", err)
return getHealthCheckMessage(logger, "error getting datasource info", err)
}
if dsInfo == nil {
return getHealthCheckMessage(s, "", errors.New("invalid datasource info received"))
return getHealthCheckMessage(logger, "", errors.New("invalid datasource info received"))
}
switch dsInfo.Version {
case influxVersionFlux:
return CheckFluxHealth(ctx, dsInfo, s, req)
return CheckFluxHealth(ctx, dsInfo, req)
case influxVersionInfluxQL:
return CheckInfluxQLHealth(ctx, dsInfo, s)
default:
return getHealthCheckMessage(s, "", errors.New("unknown influx version"))
return getHealthCheckMessage(logger, "", errors.New("unknown influx version"))
}
}
func CheckFluxHealth(ctx context.Context, dsInfo *models.DatasourceInfo, s *Service,
func CheckFluxHealth(ctx context.Context, dsInfo *models.DatasourceInfo,
req *backend.CheckHealthRequest) (*backend.CheckHealthResult,
error) {
logger := logger.FromContext(ctx)
ds, err := flux.Query(ctx, dsInfo, backend.QueryDataRequest{
PluginContext: req.PluginContext,
Queries: []backend.DataQuery{
@ -56,40 +60,41 @@ func CheckFluxHealth(ctx context.Context, dsInfo *models.DatasourceInfo, s *Serv
})
if err != nil {
return getHealthCheckMessage(s, "error performing flux query", err)
return getHealthCheckMessage(logger, "error performing flux query", err)
}
if res, ok := ds.Responses[refID]; ok {
if res.Error != nil {
return getHealthCheckMessage(s, "error reading buckets", res.Error)
return getHealthCheckMessage(logger, "error reading buckets", res.Error)
}
if len(res.Frames) > 0 && len(res.Frames[0].Fields) > 0 {
return getHealthCheckMessage(s, fmt.Sprintf("%d buckets found", res.Frames[0].Fields[0].Len()), nil)
return getHealthCheckMessage(logger, fmt.Sprintf("%d buckets found", res.Frames[0].Fields[0].Len()), nil)
}
}
return getHealthCheckMessage(s, "", errors.New("error getting flux query buckets"))
return getHealthCheckMessage(logger, "", errors.New("error getting flux query buckets"))
}
func CheckInfluxQLHealth(ctx context.Context, dsInfo *models.DatasourceInfo, s *Service) (*backend.CheckHealthResult, error) {
logger := logger.FromContext(ctx)
queryString := "SHOW measurements"
hcRequest, err := s.createRequest(ctx, dsInfo, queryString)
hcRequest, err := s.createRequest(ctx, logger, dsInfo, queryString)
if err != nil {
return getHealthCheckMessage(s, "error creating influxDB healthcheck request", err)
return getHealthCheckMessage(logger, "error creating influxDB healthcheck request", err)
}
res, err := dsInfo.HTTPClient.Do(hcRequest)
if err != nil {
return getHealthCheckMessage(s, "error performing influxQL query", err)
return getHealthCheckMessage(logger, "error performing influxQL query", err)
}
defer func() {
if err := res.Body.Close(); err != nil {
s.glog.Warn("failed to close response body", "err", err)
logger.Warn("failed to close response body", "err", err)
}
}()
if res.StatusCode/100 != 2 {
return getHealthCheckMessage(s, "", fmt.Errorf("error reading InfluxDB. Status Code: %d", res.StatusCode))
return getHealthCheckMessage(logger, "", fmt.Errorf("error reading InfluxDB. Status Code: %d", res.StatusCode))
}
resp := s.responseParser.Parse(res.Body, []Query{{
RefID: refID,
@ -98,22 +103,22 @@ func CheckInfluxQLHealth(ctx context.Context, dsInfo *models.DatasourceInfo, s *
}})
if res, ok := resp.Responses[refID]; ok {
if res.Error != nil {
return getHealthCheckMessage(s, "error reading influxDB", res.Error)
return getHealthCheckMessage(logger, "error reading influxDB", res.Error)
}
if len(res.Frames) == 0 {
return getHealthCheckMessage(s, "0 measurements found", nil)
return getHealthCheckMessage(logger, "0 measurements found", nil)
}
if len(res.Frames) > 0 && len(res.Frames[0].Fields) > 0 {
return getHealthCheckMessage(s, fmt.Sprintf("%d measurements found", res.Frames[0].Fields[0].Len()), nil)
return getHealthCheckMessage(logger, fmt.Sprintf("%d measurements found", res.Frames[0].Fields[0].Len()), nil)
}
}
return getHealthCheckMessage(s, "", errors.New("error connecting influxDB influxQL"))
return getHealthCheckMessage(logger, "", errors.New("error connecting influxDB influxQL"))
}
func getHealthCheckMessage(s *Service, message string, err error) (*backend.CheckHealthResult, error) {
func getHealthCheckMessage(logger log.Logger, message string, err error) (*backend.CheckHealthResult, error) {
if err == nil {
return &backend.CheckHealthResult{
Status: backend.HealthStatusOk,
@ -121,7 +126,7 @@ func getHealthCheckMessage(s *Service, message string, err error) (*backend.Chec
}, nil
}
s.glog.Warn("error performing influxdb healthcheck", "err", err.Error())
logger.Warn("error performing influxdb healthcheck", "err", err.Error())
errorMessage := fmt.Sprintf("%s %s", err.Error(), message)
return &backend.CheckHealthResult{

View File

@ -13,6 +13,7 @@ import (
"github.com/grafana/grafana-plugin-sdk-go/backend"
"github.com/grafana/grafana-plugin-sdk-go/backend/datasource"
"github.com/grafana/grafana-plugin-sdk-go/backend/instancemgmt"
"github.com/grafana/grafana/pkg/infra/httpclient"
"github.com/grafana/grafana/pkg/infra/log"
"github.com/grafana/grafana/pkg/setting"
@ -20,10 +21,11 @@ import (
"github.com/grafana/grafana/pkg/tsdb/influxdb/models"
)
var logger log.Logger = log.New("tsdb.influxdb")
type Service struct {
queryParser *InfluxdbQueryParser
responseParser *ResponseParser
glog log.Logger
im instancemgmt.InstanceManager
}
@ -34,7 +36,6 @@ func ProvideService(httpClient httpclient.Provider) *Service {
return &Service{
queryParser: &InfluxdbQueryParser{},
responseParser: &ResponseParser{},
glog: log.New("tsdb.influxdb"),
im: datasource.NewInstanceManager(newInstanceSettings(httpClient)),
}
}
@ -85,7 +86,8 @@ func newInstanceSettings(httpClientProvider httpclient.Provider) datasource.Inst
}
func (s *Service) QueryData(ctx context.Context, req *backend.QueryDataRequest) (*backend.QueryDataResponse, error) {
s.glog.Debug("Received a query request", "numQueries", len(req.Queries))
logger := logger.FromContext(ctx)
logger.Debug("Received a query request", "numQueries", len(req.Queries))
dsInfo, err := s.getDSInfo(req.PluginContext)
if err != nil {
@ -96,7 +98,7 @@ func (s *Service) QueryData(ctx context.Context, req *backend.QueryDataRequest)
return flux.Query(ctx, dsInfo, *req)
}
s.glog.Debug("Making a non-Flux type query")
logger.Debug("Making a non-Flux type query")
var allRawQueries string
var queries []Query
@ -119,10 +121,10 @@ func (s *Service) QueryData(ctx context.Context, req *backend.QueryDataRequest)
}
if setting.Env == setting.Dev {
s.glog.Debug("Influxdb query", "raw query", allRawQueries)
logger.Debug("Influxdb query", "raw query", allRawQueries)
}
request, err := s.createRequest(ctx, dsInfo, allRawQueries)
request, err := s.createRequest(ctx, logger, dsInfo, allRawQueries)
if err != nil {
return &backend.QueryDataResponse{}, err
}
@ -133,7 +135,7 @@ func (s *Service) QueryData(ctx context.Context, req *backend.QueryDataRequest)
}
defer func() {
if err := res.Body.Close(); err != nil {
s.glog.Warn("Failed to close response body", "err", err)
logger.Warn("Failed to close response body", "err", err)
}
}()
if res.StatusCode/100 != 2 {
@ -145,7 +147,7 @@ func (s *Service) QueryData(ctx context.Context, req *backend.QueryDataRequest)
return resp, nil
}
func (s *Service) createRequest(ctx context.Context, dsInfo *models.DatasourceInfo, query string) (*http.Request, error) {
func (s *Service) createRequest(ctx context.Context, logger log.Logger, dsInfo *models.DatasourceInfo, query string) (*http.Request, error) {
u, err := url.Parse(dsInfo.URL)
if err != nil {
return nil, err
@ -187,7 +189,7 @@ func (s *Service) createRequest(ctx context.Context, dsInfo *models.DatasourceIn
req.URL.RawQuery = params.Encode()
s.glog.Debug("Influxdb request", "url", req.URL.String())
logger.Debug("Influxdb request", "url", req.URL.String())
return req, nil
}

View File

@ -6,10 +6,10 @@ import (
"net/url"
"testing"
"github.com/grafana/grafana/pkg/infra/log"
"github.com/grafana/grafana/pkg/tsdb/influxdb/models"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
"github.com/grafana/grafana/pkg/tsdb/influxdb/models"
)
func TestExecutor_createRequest(t *testing.T) {
@ -22,11 +22,10 @@ func TestExecutor_createRequest(t *testing.T) {
s := &Service{
queryParser: &InfluxdbQueryParser{},
responseParser: &ResponseParser{},
glog: log.New("test"),
}
t.Run("createRequest with GET httpMode", func(t *testing.T) {
req, err := s.createRequest(context.Background(), datasource, query)
req, err := s.createRequest(context.Background(), logger, datasource, query)
require.NoError(t, err)
@ -40,7 +39,7 @@ func TestExecutor_createRequest(t *testing.T) {
t.Run("createRequest with POST httpMode", func(t *testing.T) {
datasource.HTTPMode = "POST"
req, err := s.createRequest(context.Background(), datasource, query)
req, err := s.createRequest(context.Background(), logger, datasource, query)
require.NoError(t, err)
assert.Equal(t, "POST", req.Method)
@ -59,7 +58,7 @@ func TestExecutor_createRequest(t *testing.T) {
t.Run("createRequest with PUT httpMode", func(t *testing.T) {
datasource.HTTPMode = "PUT"
_, err := s.createRequest(context.Background(), datasource, query)
_, err := s.createRequest(context.Background(), logger, datasource, query)
require.EqualError(t, err, ErrInvalidHttpMode.Error())
})
}

View File

@ -11,8 +11,8 @@ import (
"github.com/grafana/grafana-plugin-sdk-go/backend"
sdkhttpclient "github.com/grafana/grafana-plugin-sdk-go/backend/httpclient"
"github.com/grafana/grafana-plugin-sdk-go/backend/instancemgmt"
"github.com/grafana/grafana/pkg/infra/httpclient"
"github.com/grafana/grafana/pkg/infra/log"
"github.com/grafana/grafana/pkg/tsdb/influxdb/models"
)
@ -114,7 +114,6 @@ func GetMockService(version string, rt RoundTripper) *Service {
return &Service{
queryParser: &InfluxdbQueryParser{},
responseParser: &ResponseParser{},
glog: log.New("tsdb.influxdb"),
im: &fakeInstance{
version: version,
fakeRoundTripper: rt,

View File

@ -8,9 +8,10 @@ import (
"testing"
"github.com/grafana/grafana-plugin-sdk-go/backend"
"github.com/stretchr/testify/require"
"github.com/grafana/grafana/pkg/infra/log"
"github.com/grafana/grafana/pkg/infra/tracing"
"github.com/stretchr/testify/require"
)
type mockedRoundTripperForOauth struct {
@ -122,7 +123,7 @@ func TestOauthForwardIdentity(t *testing.T) {
tracer := tracing.InitializeTracerForTest()
data, err := queryData(context.Background(), &req, &dsInfo, log.New("testlog"), tracer)
data, err := queryData(context.Background(), &req, &dsInfo, tracer)
// we do a basic check that the result is OK
require.NoError(t, err)
require.Len(t, data.Responses, 1)

View File

@ -13,17 +13,19 @@ import (
"github.com/grafana/grafana-plugin-sdk-go/backend/datasource"
"github.com/grafana/grafana-plugin-sdk-go/backend/instancemgmt"
"github.com/grafana/grafana-plugin-sdk-go/data"
"go.opentelemetry.io/otel/attribute"
"github.com/grafana/grafana/pkg/infra/httpclient"
"github.com/grafana/grafana/pkg/infra/log"
"github.com/grafana/grafana/pkg/infra/tracing"
"github.com/grafana/grafana/pkg/services/featuremgmt"
"go.opentelemetry.io/otel/attribute"
)
var logger = log.New("tsdb.loki")
type Service struct {
im instancemgmt.InstanceManager
features featuremgmt.FeatureToggles
plog log.Logger
tracer tracing.Tracer
}
@ -37,7 +39,6 @@ func ProvideService(httpClientProvider httpclient.Provider, features featuremgmt
return &Service{
im: datasource.NewInstanceManager(newInstanceSettings(httpClientProvider)),
features: features,
plog: log.New("tsdb.loki"),
tracer: tracer,
}
}
@ -115,8 +116,7 @@ func (s *Service) CallResource(ctx context.Context, req *backend.CallResourceReq
if err != nil {
return err
}
return callResource(ctx, req, sender, dsInfo, s.plog)
return callResource(ctx, req, sender, dsInfo, logger.FromContext(ctx))
}
func getAuthHeadersForCallResource(headers map[string][]string) map[string]string {
@ -170,13 +170,13 @@ func (s *Service) QueryData(ctx context.Context, req *backend.QueryDataRequest)
return result, err
}
return queryData(ctx, req, dsInfo, s.plog, s.tracer)
return queryData(ctx, req, dsInfo, s.tracer)
}
func queryData(ctx context.Context, req *backend.QueryDataRequest, dsInfo *datasourceInfo, plog log.Logger, tracer tracing.Tracer) (*backend.QueryDataResponse, error) {
func queryData(ctx context.Context, req *backend.QueryDataRequest, dsInfo *datasourceInfo, tracer tracing.Tracer) (*backend.QueryDataResponse, error) {
result := backend.NewQueryDataResponse()
api := newLokiAPI(dsInfo.HTTPClient, dsInfo.URL, plog, req.Headers)
api := newLokiAPI(dsInfo.HTTPClient, dsInfo.URL, logger.FromContext(ctx), req.Headers)
queries, err := parseQuery(req)
if err != nil {
@ -184,13 +184,15 @@ func queryData(ctx context.Context, req *backend.QueryDataRequest, dsInfo *datas
}
for _, query := range queries {
plog.Debug("Sending query", "start", query.Start, "end", query.End, "step", query.Step, "query", query.Expr)
_, span := tracer.Start(ctx, "alerting.loki")
span.SetAttributes("expr", query.Expr, attribute.Key("expr").String(query.Expr))
span.SetAttributes("start_unixnano", query.Start, attribute.Key("start_unixnano").Int64(query.Start.UnixNano()))
span.SetAttributes("stop_unixnano", query.End, attribute.Key("stop_unixnano").Int64(query.End.UnixNano()))
defer span.End()
logger := logger.FromContext(ctx) // get logger with trace-id and other contextual info
logger.Debug("Sending query", "start", query.Start, "end", query.End, "step", query.Step, "query", query.Expr)
frames, err := runQuery(ctx, api, query)
queryRes := backend.DataResponse{}

View File

@ -13,6 +13,7 @@ import (
"github.com/gorilla/websocket"
"github.com/grafana/grafana-plugin-sdk-go/backend"
"github.com/grafana/grafana-plugin-sdk-go/data"
"github.com/grafana/grafana/pkg/services/featuremgmt"
)
@ -74,6 +75,7 @@ func (s *Service) RunStream(ctx context.Context, req *backend.RunStreamRequest,
return fmt.Errorf("missing expr in cuannel")
}
logger := logger.FromContext(ctx)
count := int64(0)
interrupt := make(chan os.Signal, 1)
@ -99,10 +101,10 @@ func (s *Service) RunStream(ctx context.Context, req *backend.RunStreamRequest,
}
wsurl.RawQuery = params.Encode()
s.plog.Info("connecting to websocket", "url", wsurl)
logger.Info("connecting to websocket", "url", wsurl)
c, r, err := websocket.DefaultDialer.Dial(wsurl.String(), nil)
if err != nil {
s.plog.Error("error connecting to websocket", "err", err)
logger.Error("error connecting to websocket", "err", err)
return fmt.Errorf("error connecting to websocket")
}
@ -114,7 +116,7 @@ func (s *Service) RunStream(ctx context.Context, req *backend.RunStreamRequest,
_ = r.Body.Close()
}
err = c.Close()
s.plog.Error("closing loki websocket", "err", err)
logger.Error("closing loki websocket", "err", err)
}()
prev := data.FrameJSONCache{}
@ -126,7 +128,7 @@ func (s *Service) RunStream(ctx context.Context, req *backend.RunStreamRequest,
for {
_, message, err := c.ReadMessage()
if err != nil {
s.plog.Error("websocket read:", "err", err)
logger.Error("websocket read:", "err", err)
return
}
@ -153,7 +155,7 @@ func (s *Service) RunStream(ctx context.Context, req *backend.RunStreamRequest,
}
if err != nil {
s.plog.Error("websocket write:", "err", err, "raw", message)
logger.Error("websocket write:", "err", err, "raw", message)
return
}
}
@ -165,14 +167,14 @@ func (s *Service) RunStream(ctx context.Context, req *backend.RunStreamRequest,
for {
select {
case <-done:
s.plog.Info("socket done")
logger.Info("socket done")
return nil
case <-ctx.Done():
s.plog.Info("stop streaming (context canceled)")
logger.Info("stop streaming (context canceled)")
return nil
case t := <-ticker.C:
count++
s.plog.Error("loki websocket ping?", "time", t, "count", count)
logger.Error("loki websocket ping?", "time", t, "count", count)
}
}
}

View File

@ -19,7 +19,7 @@ import (
"github.com/prometheus/client_golang/api"
apiv1 "github.com/prometheus/client_golang/api/prometheus/v1"
"github.com/grafana/grafana/pkg/infra/log"
"github.com/grafana/grafana/pkg/infra/log/logtest"
"github.com/grafana/grafana/pkg/infra/tracing"
"github.com/grafana/grafana/pkg/tsdb/intervalv2"
)
@ -140,7 +140,7 @@ func runQuery(response []byte, sq storedPrometheusQuery) (*backend.QueryDataResp
intervalCalculator: intervalv2.NewCalculator(),
tracer: tracer,
TimeInterval: "15s",
log: &fakeLogger{},
log: &logtest.Fake{},
client: api,
}
@ -178,12 +178,3 @@ func runQuery(response []byte, sq storedPrometheusQuery) (*backend.QueryDataResp
return b.runQueries(context.Background(), queries)
}
type fakeLogger struct {
log.Logger
}
func (fl *fakeLogger) Debug(testMessage string, ctx ...interface{}) {}
func (fl *fakeLogger) Info(testMessage string, ctx ...interface{}) {}
func (fl *fakeLogger) Warn(testMessage string, ctx ...interface{}) {}
func (fl *fakeLogger) Error(testMessage string, ctx ...interface{}) {}

View File

@ -11,8 +11,10 @@ import (
"testing"
"time"
"github.com/grafana/grafana/pkg/infra/tracing"
"github.com/stretchr/testify/require"
"github.com/grafana/grafana/pkg/infra/log/logtest"
"github.com/grafana/grafana/pkg/infra/tracing"
)
// when memory-profiling this benchmark, these commands are recommended:
@ -34,7 +36,7 @@ func BenchmarkExemplarJson(b *testing.B) {
tracer := tracing.InitializeTracerForTest()
s := Buffered{tracer: tracer, log: &fakeLogger{}, client: api}
s := Buffered{tracer: tracer, log: &logtest.Fake{}, client: api}
b.ResetTimer()
for n := 0; n < b.N; n++ {
@ -52,7 +54,7 @@ func BenchmarkRangeJson(b *testing.B) {
api, err := makeMockedApi(resp)
require.NoError(b, err)
s := Buffered{tracer: tracing.InitializeTracerForTest(), log: &fakeLogger{}, client: api}
s := Buffered{tracer: tracing.InitializeTracerForTest(), log: &logtest.Fake{}, client: api}
b.ResetTimer()
for n := 0; n < b.N; n++ {

View File

@ -15,15 +15,16 @@ import (
"github.com/grafana/grafana-plugin-sdk-go/backend"
sdkHTTPClient "github.com/grafana/grafana-plugin-sdk-go/backend/httpclient"
"github.com/grafana/grafana-plugin-sdk-go/data"
apiv1 "github.com/prometheus/client_golang/api/prometheus/v1"
"github.com/prometheus/common/model"
"go.opentelemetry.io/otel/attribute"
"github.com/grafana/grafana/pkg/infra/log"
"github.com/grafana/grafana/pkg/infra/tracing"
"github.com/grafana/grafana/pkg/tsdb/intervalv2"
"github.com/grafana/grafana/pkg/tsdb/prometheus/middleware"
"github.com/grafana/grafana/pkg/tsdb/prometheus/utils"
"github.com/grafana/grafana/pkg/util/maputil"
apiv1 "github.com/prometheus/client_golang/api/prometheus/v1"
"github.com/prometheus/common/model"
"go.opentelemetry.io/otel/attribute"
)
// Internal interval and range variables
@ -120,10 +121,7 @@ func (b *Buffered) runQueries(ctx context.Context, queries []*PrometheusQuery) (
result := backend.QueryDataResponse{
Responses: backend.Responses{},
}
for _, query := range queries {
b.log.Debug("Sending query", "start", query.Start, "end", query.End, "step", query.Step, "query", query.Expr)
ctx, endSpan := utils.StartTrace(ctx, b.tracer, "datasource.prometheus", []utils.Attribute{
{Key: "expr", Value: query.Expr, Kv: attribute.Key("expr").String(query.Expr)},
{Key: "start_unixnano", Value: query.Start, Kv: attribute.Key("start_unixnano").Int64(query.Start.UnixNano())},
@ -131,6 +129,9 @@ func (b *Buffered) runQueries(ctx context.Context, queries []*PrometheusQuery) (
})
defer endSpan()
logger := b.log.FromContext(ctx) // read trace-id and other info from the context
logger.Debug("Sending query", "start", query.Start, "end", query.End, "step", query.Step, "query", query.Expr)
response := make(map[TimeSeriesQueryType]interface{})
timeRange := apiv1.Range{
@ -143,7 +144,7 @@ func (b *Buffered) runQueries(ctx context.Context, queries []*PrometheusQuery) (
if query.RangeQuery {
rangeResponse, _, err := b.client.QueryRange(ctx, query.Expr, timeRange)
if err != nil {
b.log.Error("Range query failed", "query", query.Expr, "err", err)
logger.Error("Range query failed", "query", query.Expr, "err", err)
result.Responses[query.RefId] = backend.DataResponse{Error: err}
continue
}
@ -153,7 +154,7 @@ func (b *Buffered) runQueries(ctx context.Context, queries []*PrometheusQuery) (
if query.InstantQuery {
instantResponse, _, err := b.client.Query(ctx, query.Expr, query.End)
if err != nil {
b.log.Error("Instant query failed", "query", query.Expr, "err", err)
logger.Error("Instant query failed", "query", query.Expr, "err", err)
result.Responses[query.RefId] = backend.DataResponse{Error: err}
continue
}
@ -165,7 +166,7 @@ func (b *Buffered) runQueries(ctx context.Context, queries []*PrometheusQuery) (
if query.ExemplarQuery {
exemplarResponse, err := b.client.QueryExemplars(ctx, query.Expr, timeRange.Start, timeRange.End)
if err != nil {
b.log.Error("Exemplar query failed", "query", query.Expr, "err", err)
logger.Error("Exemplar query failed", "query", query.Expr, "err", err)
} else {
response[ExemplarQueryType] = exemplarResponse
}

View File

@ -13,6 +13,11 @@ import (
"github.com/grafana/grafana-plugin-sdk-go/backend"
"github.com/grafana/grafana-plugin-sdk-go/backend/datasource"
"github.com/grafana/grafana-plugin-sdk-go/backend/instancemgmt"
"github.com/patrickmn/go-cache"
apiv1 "github.com/prometheus/client_golang/api/prometheus/v1"
"github.com/yudai/gojsondiff"
"github.com/yudai/gojsondiff/formatter"
"github.com/grafana/grafana/pkg/infra/httpclient"
"github.com/grafana/grafana/pkg/infra/log"
"github.com/grafana/grafana/pkg/infra/tracing"
@ -21,10 +26,6 @@ import (
"github.com/grafana/grafana/pkg/tsdb/prometheus/buffered"
"github.com/grafana/grafana/pkg/tsdb/prometheus/querydata"
"github.com/grafana/grafana/pkg/tsdb/prometheus/resource"
"github.com/patrickmn/go-cache"
apiv1 "github.com/prometheus/client_golang/api/prometheus/v1"
"github.com/yudai/gojsondiff"
"github.com/yudai/gojsondiff/formatter"
)
var plog = log.New("tsdb.prometheus")
@ -110,7 +111,7 @@ func (s *Service) QueryData(ctx context.Context, req *backend.QueryDataRequest)
var data *backend.QueryDataResponse
var err error
plog.Debug("PrometheusStreamingJSONParserTest", "req", req)
plog.FromContext(ctx).Debug("PrometheusStreamingJSONParserTest", "req", req)
wg.Add(1)
go func() {

View File

@ -17,7 +17,6 @@ import (
"github.com/grafana/grafana-plugin-sdk-go/backend"
"github.com/grafana/grafana-plugin-sdk-go/experimental"
"github.com/grafana/grafana/pkg/infra/log"
"github.com/grafana/grafana/pkg/tsdb/prometheus/models"
)
@ -157,12 +156,3 @@ func runQuery(response []byte, q *backend.QueryDataRequest, wide bool) (*backend
tCtx.httpProvider.setResponse(res)
return tCtx.queryData.Execute(context.Background(), q)
}
type fakeLogger struct {
log.Logger
}
func (fl *fakeLogger) Debug(testMessage string, ctx ...interface{}) {}
func (fl *fakeLogger) Info(testMessage string, ctx ...interface{}) {}
func (fl *fakeLogger) Warn(testMessage string, ctx ...interface{}) {}
func (fl *fakeLogger) Error(testMessage string, ctx ...interface{}) {}

View File

@ -8,6 +8,8 @@ import (
"github.com/grafana/grafana-plugin-sdk-go/backend"
"github.com/grafana/grafana-plugin-sdk-go/data"
"go.opentelemetry.io/otel/attribute"
"github.com/grafana/grafana/pkg/infra/log"
"github.com/grafana/grafana/pkg/infra/tracing"
"github.com/grafana/grafana/pkg/services/featuremgmt"
@ -16,7 +18,6 @@ import (
"github.com/grafana/grafana/pkg/tsdb/prometheus/models"
"github.com/grafana/grafana/pkg/tsdb/prometheus/utils"
"github.com/grafana/grafana/pkg/util/maputil"
"go.opentelemetry.io/otel/attribute"
)
const legendFormatAuto = "__auto"
@ -90,7 +91,7 @@ func (s *QueryData) Execute(ctx context.Context, req *backend.QueryDataRequest)
return &result, err
}
if r == nil {
s.log.Debug("Received nilresponse from runQuery", "query", query.Expr)
s.log.FromContext(ctx).Debug("Received nilresponse from runQuery", "query", query.Expr)
continue
}
result.Responses[q.RefID] = *r
@ -100,11 +101,12 @@ func (s *QueryData) Execute(ctx context.Context, req *backend.QueryDataRequest)
}
func (s *QueryData) fetch(ctx context.Context, client *client.Client, q *models.Query, headers map[string]string) (*backend.DataResponse, error) {
s.log.Debug("Sending query", "start", q.Start, "end", q.End, "step", q.Step, "query", q.Expr)
traceCtx, end := s.trace(ctx, q)
defer end()
logger := s.log.FromContext(traceCtx)
logger.Debug("Sending query", "start", q.Start, "end", q.End, "step", q.Step, "query", q.Expr)
response := &backend.DataResponse{
Frames: data.Frames{},
Error: nil,
@ -131,7 +133,7 @@ func (s *QueryData) fetch(ctx context.Context, client *client.Client, q *models.
if err != nil {
// If exemplar query returns error, we want to only log it and
// continue with other results processing
s.log.Error("Exemplar query failed", "query", q.Expr, "err", err)
logger.Error("Exemplar query failed", "query", q.Expr, "err", err)
}
if res != nil {
response.Frames = append(response.Frames, res.Frames...)

View File

@ -13,15 +13,17 @@ import (
"github.com/grafana/grafana-plugin-sdk-go/backend"
sdkhttpclient "github.com/grafana/grafana-plugin-sdk-go/backend/httpclient"
"github.com/grafana/grafana-plugin-sdk-go/data"
apiv1 "github.com/prometheus/client_golang/api/prometheus/v1"
p "github.com/prometheus/common/model"
"github.com/stretchr/testify/require"
"github.com/grafana/grafana/pkg/infra/httpclient"
"github.com/grafana/grafana/pkg/infra/log/logtest"
"github.com/grafana/grafana/pkg/infra/tracing"
"github.com/grafana/grafana/pkg/setting"
"github.com/grafana/grafana/pkg/tsdb/prometheus/buffered"
"github.com/grafana/grafana/pkg/tsdb/prometheus/models"
"github.com/grafana/grafana/pkg/tsdb/prometheus/querydata"
apiv1 "github.com/prometheus/client_golang/api/prometheus/v1"
p "github.com/prometheus/common/model"
"github.com/stretchr/testify/require"
)
func TestPrometheus_parseTimeSeriesResponse(t *testing.T) {
@ -415,7 +417,7 @@ func setup(wideFrames bool) (*testContext, error) {
features := &fakeFeatureToggles{flags: map[string]bool{"prometheusStreamingJSONParser": true, "prometheusWideSeries": wideFrames}}
opts, err := buffered.CreateTransportOptions(settings, &setting.Cfg{}, &fakeLogger{})
opts, err := buffered.CreateTransportOptions(settings, &setting.Cfg{}, &logtest.Fake{})
if err != nil {
return nil, err
}
@ -425,7 +427,7 @@ func setup(wideFrames bool) (*testContext, error) {
return nil, err
}
queryData, _ := querydata.New(httpClient, features, tracer, settings, &fakeLogger{})
queryData, _ := querydata.New(httpClient, features, tracer, settings, &logtest.Fake{})
return &testContext{
httpProvider: httpProvider,

View File

@ -10,15 +10,16 @@ import (
"github.com/grafana/grafana-plugin-sdk-go/backend"
"github.com/grafana/grafana-plugin-sdk-go/data"
jsoniter "github.com/json-iterator/go"
"github.com/grafana/grafana/pkg/tsdb/prometheus/models"
"github.com/grafana/grafana/pkg/util/converter"
jsoniter "github.com/json-iterator/go"
)
func (s *QueryData) parseResponse(ctx context.Context, q *models.Query, res *http.Response) (*backend.DataResponse, error) {
defer func() {
if err := res.Body.Close(); err != nil {
s.log.Error("Failed to close response body", "err", err)
s.log.FromContext(ctx).Error("Failed to close response body", "err", err)
}
}()

View File

@ -7,6 +7,7 @@ import (
"net/http"
"github.com/grafana/grafana-plugin-sdk-go/backend"
"github.com/grafana/grafana/pkg/infra/log"
"github.com/grafana/grafana/pkg/tsdb/prometheus/client"
"github.com/grafana/grafana/pkg/tsdb/prometheus/utils"
@ -70,7 +71,7 @@ func (r *Resource) Execute(ctx context.Context, req *backend.CallResourceRequest
delHopHeaders(req.Headers)
delStopHeaders(req.Headers)
r.log.Debug("Sending resource query", "URL", req.URL)
r.log.FromContext(ctx).Debug("Sending resource query", "URL", req.URL)
resp, err := r.promClient.QueryResource(ctx, req)
if err != nil {
return nil, fmt.Errorf("error querying resource: %v", err)