2016-12-07 04:10:42 -06:00
|
|
|
package models
|
|
|
|
|
|
|
|
import (
|
|
|
|
"crypto/tls"
|
|
|
|
"crypto/x509"
|
2017-09-28 08:55:32 -05:00
|
|
|
"errors"
|
2019-10-11 07:28:52 -05:00
|
|
|
"fmt"
|
2016-12-07 04:10:42 -06:00
|
|
|
"net"
|
|
|
|
"net/http"
|
|
|
|
"sync"
|
|
|
|
"time"
|
2019-02-11 06:42:05 -06:00
|
|
|
|
|
|
|
"github.com/grafana/grafana/pkg/setting"
|
2020-09-09 00:47:05 -05:00
|
|
|
"github.com/prometheus/client_golang/prometheus"
|
|
|
|
"github.com/prometheus/client_golang/prometheus/promhttp"
|
2016-12-07 04:10:42 -06:00
|
|
|
)
|
|
|
|
|
2020-09-09 00:47:05 -05:00
|
|
|
var datasourceRequestCounter = prometheus.NewCounterVec(
|
|
|
|
prometheus.CounterOpts{
|
|
|
|
Namespace: "grafana",
|
|
|
|
Name: "datasource_request_total",
|
|
|
|
Help: "A counter for outgoing requests for a datasource",
|
|
|
|
},
|
|
|
|
[]string{"datasource", "code", "method"},
|
|
|
|
)
|
|
|
|
|
|
|
|
var datasourceRequestSummary = prometheus.NewSummaryVec(
|
|
|
|
prometheus.SummaryOpts{
|
|
|
|
Namespace: "grafana",
|
|
|
|
Name: "datasource_request_duration_seconds",
|
|
|
|
Help: "summary of outgoing datasource requests sent from Grafana",
|
|
|
|
Objectives: map[float64]float64{0.5: 0.05, 0.9: 0.01, 0.99: 0.001},
|
|
|
|
}, []string{"datasource", "code", "method"},
|
|
|
|
)
|
|
|
|
|
|
|
|
var datasourceResponseSummary = prometheus.NewSummaryVec(
|
|
|
|
prometheus.SummaryOpts{
|
|
|
|
Namespace: "grafana",
|
|
|
|
Name: "datasource_response_size_bytes",
|
|
|
|
Help: "summary of datasource response sizes returned to Grafana",
|
|
|
|
Objectives: map[float64]float64{0.5: 0.05, 0.9: 0.01, 0.99: 0.001},
|
|
|
|
}, []string{"datasource"},
|
|
|
|
)
|
|
|
|
|
|
|
|
var datasourceRequestsInFlight = prometheus.NewGaugeVec(
|
|
|
|
prometheus.GaugeOpts{
|
|
|
|
Namespace: "grafana",
|
|
|
|
Name: "datasource_request_in_flight",
|
|
|
|
Help: "A gauge of outgoing datasource requests currently being sent by Grafana",
|
|
|
|
},
|
|
|
|
[]string{"datasource"},
|
|
|
|
)
|
|
|
|
|
|
|
|
func init() {
|
|
|
|
prometheus.MustRegister(datasourceRequestSummary,
|
|
|
|
datasourceRequestCounter,
|
|
|
|
datasourceRequestsInFlight,
|
|
|
|
datasourceResponseSummary)
|
|
|
|
}
|
|
|
|
|
2016-12-07 04:10:42 -06:00
|
|
|
type proxyTransportCache struct {
|
|
|
|
cache map[int64]cachedTransport
|
|
|
|
sync.Mutex
|
|
|
|
}
|
|
|
|
|
2019-10-11 07:28:52 -05:00
|
|
|
// dataSourceTransport implements http.RoundTripper (https://golang.org/pkg/net/http/#RoundTripper)
|
|
|
|
type dataSourceTransport struct {
|
2020-09-09 00:47:05 -05:00
|
|
|
datasourceName string
|
|
|
|
headers map[string]string
|
|
|
|
transport *http.Transport
|
|
|
|
}
|
|
|
|
|
|
|
|
func instrumentRoundtrip(datasourceName string, next http.RoundTripper) promhttp.RoundTripperFunc {
|
|
|
|
return promhttp.RoundTripperFunc(func(r *http.Request) (*http.Response, error) {
|
|
|
|
datasourceLabel := prometheus.Labels{"datasource": datasourceName}
|
|
|
|
|
|
|
|
requestCounter := datasourceRequestCounter.MustCurryWith(datasourceLabel)
|
|
|
|
requestSummary := datasourceRequestSummary.MustCurryWith(datasourceLabel)
|
|
|
|
requestInFlight := datasourceRequestsInFlight.With(datasourceLabel)
|
|
|
|
responseSizeSummary := datasourceResponseSummary.With(datasourceLabel)
|
|
|
|
|
|
|
|
res, err := promhttp.InstrumentRoundTripperDuration(requestSummary,
|
|
|
|
promhttp.InstrumentRoundTripperCounter(requestCounter,
|
|
|
|
promhttp.InstrumentRoundTripperInFlight(requestInFlight, next))).
|
|
|
|
RoundTrip(r)
|
|
|
|
|
2020-09-14 06:05:47 -05:00
|
|
|
// we avoid measuring contentlength less than zero because it indicates
|
2020-09-09 00:47:05 -05:00
|
|
|
// that the content size is unknown. https://godoc.org/github.com/badu/http#Response
|
2020-09-14 06:05:47 -05:00
|
|
|
if res != nil && res.ContentLength > 0 {
|
2020-09-09 00:47:05 -05:00
|
|
|
responseSizeSummary.Observe(float64(res.ContentLength))
|
|
|
|
}
|
|
|
|
|
|
|
|
return res, err
|
|
|
|
})
|
2019-10-11 07:28:52 -05:00
|
|
|
}
|
|
|
|
|
|
|
|
// RoundTrip executes a single HTTP transaction, returning a Response for the provided Request.
|
|
|
|
func (d *dataSourceTransport) RoundTrip(req *http.Request) (*http.Response, error) {
|
|
|
|
for key, value := range d.headers {
|
|
|
|
req.Header.Set(key, value)
|
|
|
|
}
|
|
|
|
|
2020-09-09 00:47:05 -05:00
|
|
|
return instrumentRoundtrip(d.datasourceName, d.transport).RoundTrip(req)
|
2019-10-11 07:28:52 -05:00
|
|
|
}
|
|
|
|
|
2016-12-07 04:10:42 -06:00
|
|
|
type cachedTransport struct {
|
|
|
|
updated time.Time
|
|
|
|
|
2019-10-11 07:28:52 -05:00
|
|
|
*dataSourceTransport
|
2016-12-07 04:10:42 -06:00
|
|
|
}
|
|
|
|
|
|
|
|
var ptc = proxyTransportCache{
|
|
|
|
cache: make(map[int64]cachedTransport),
|
|
|
|
}
|
|
|
|
|
|
|
|
func (ds *DataSource) GetHttpClient() (*http.Client, error) {
|
|
|
|
transport, err := ds.GetHttpTransport()
|
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
|
|
|
|
return &http.Client{
|
2020-06-29 04:22:49 -05:00
|
|
|
Timeout: time.Duration(setting.DataProxyTimeout) * time.Second,
|
2016-12-07 04:10:42 -06:00
|
|
|
Transport: transport,
|
|
|
|
}, nil
|
|
|
|
}
|
|
|
|
|
2019-10-11 07:28:52 -05:00
|
|
|
func (ds *DataSource) GetHttpTransport() (*dataSourceTransport, error) {
|
2016-12-07 04:10:42 -06:00
|
|
|
ptc.Lock()
|
|
|
|
defer ptc.Unlock()
|
|
|
|
|
|
|
|
if t, present := ptc.cache[ds.Id]; present && ds.Updated.Equal(t.updated) {
|
2019-10-11 07:28:52 -05:00
|
|
|
return t.dataSourceTransport, nil
|
2016-12-07 04:10:42 -06:00
|
|
|
}
|
|
|
|
|
2019-01-28 12:38:56 -06:00
|
|
|
tlsConfig, err := ds.GetTLSConfig()
|
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
2017-09-28 08:10:14 -05:00
|
|
|
}
|
|
|
|
|
2019-01-28 12:38:56 -06:00
|
|
|
tlsConfig.Renegotiation = tls.RenegotiateFreelyAsClient
|
|
|
|
|
2019-10-11 07:28:52 -05:00
|
|
|
// Create transport which adds all
|
|
|
|
customHeaders := ds.getCustomHeaders()
|
2016-12-07 04:10:42 -06:00
|
|
|
transport := &http.Transport{
|
2019-01-28 12:38:56 -06:00
|
|
|
TLSClientConfig: tlsConfig,
|
|
|
|
Proxy: http.ProxyFromEnvironment,
|
2016-12-07 04:10:42 -06:00
|
|
|
Dial: (&net.Dialer{
|
2019-02-11 06:42:05 -06:00
|
|
|
Timeout: time.Duration(setting.DataProxyTimeout) * time.Second,
|
2016-12-07 04:10:42 -06:00
|
|
|
KeepAlive: 30 * time.Second,
|
|
|
|
}).Dial,
|
|
|
|
TLSHandshakeTimeout: 10 * time.Second,
|
|
|
|
ExpectContinueTimeout: 1 * time.Second,
|
|
|
|
MaxIdleConns: 100,
|
|
|
|
IdleConnTimeout: 90 * time.Second,
|
|
|
|
}
|
|
|
|
|
2019-10-11 07:28:52 -05:00
|
|
|
dsTransport := &dataSourceTransport{
|
2020-09-09 00:47:05 -05:00
|
|
|
headers: customHeaders,
|
|
|
|
transport: transport,
|
|
|
|
datasourceName: ds.Name,
|
2019-10-11 07:28:52 -05:00
|
|
|
}
|
|
|
|
|
2019-01-28 12:38:56 -06:00
|
|
|
ptc.cache[ds.Id] = cachedTransport{
|
2019-10-11 07:28:52 -05:00
|
|
|
dataSourceTransport: dsTransport,
|
|
|
|
updated: ds.Updated,
|
2019-01-28 12:38:56 -06:00
|
|
|
}
|
|
|
|
|
2019-10-11 07:28:52 -05:00
|
|
|
return dsTransport, nil
|
2019-01-28 12:38:56 -06:00
|
|
|
}
|
|
|
|
|
|
|
|
func (ds *DataSource) GetTLSConfig() (*tls.Config, error) {
|
|
|
|
var tlsSkipVerify, tlsClientAuth, tlsAuthWithCACert bool
|
|
|
|
if ds.JsonData != nil {
|
|
|
|
tlsClientAuth = ds.JsonData.Get("tlsAuth").MustBool(false)
|
|
|
|
tlsAuthWithCACert = ds.JsonData.Get("tlsAuthWithCACert").MustBool(false)
|
|
|
|
tlsSkipVerify = ds.JsonData.Get("tlsSkipVerify").MustBool(false)
|
|
|
|
}
|
|
|
|
|
|
|
|
tlsConfig := &tls.Config{
|
|
|
|
InsecureSkipVerify: tlsSkipVerify,
|
|
|
|
}
|
|
|
|
|
2017-09-28 05:04:01 -05:00
|
|
|
if tlsClientAuth || tlsAuthWithCACert {
|
2016-12-07 04:10:42 -06:00
|
|
|
decrypted := ds.SecureJsonData.Decrypt()
|
|
|
|
if tlsAuthWithCACert && len(decrypted["tlsCACert"]) > 0 {
|
|
|
|
caPool := x509.NewCertPool()
|
|
|
|
ok := caPool.AppendCertsFromPEM([]byte(decrypted["tlsCACert"]))
|
2017-09-28 08:55:32 -05:00
|
|
|
if !ok {
|
|
|
|
return nil, errors.New("Failed to parse TLS CA PEM certificate")
|
2016-12-07 04:10:42 -06:00
|
|
|
}
|
2019-01-28 12:38:56 -06:00
|
|
|
tlsConfig.RootCAs = caPool
|
2016-12-07 04:10:42 -06:00
|
|
|
}
|
|
|
|
|
2017-09-28 05:04:01 -05:00
|
|
|
if tlsClientAuth {
|
|
|
|
cert, err := tls.X509KeyPair([]byte(decrypted["tlsClientCert"]), []byte(decrypted["tlsClientKey"]))
|
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
2019-01-28 12:38:56 -06:00
|
|
|
tlsConfig.Certificates = []tls.Certificate{cert}
|
2016-12-07 04:10:42 -06:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-01-28 12:38:56 -06:00
|
|
|
return tlsConfig, nil
|
2016-12-07 04:10:42 -06:00
|
|
|
}
|
2019-10-11 07:28:52 -05:00
|
|
|
|
|
|
|
// getCustomHeaders returns a map with all the to be set headers
|
|
|
|
// The map key represents the HeaderName and the value represents this header's value
|
|
|
|
func (ds *DataSource) getCustomHeaders() map[string]string {
|
|
|
|
headers := make(map[string]string)
|
|
|
|
if ds.JsonData == nil {
|
|
|
|
return headers
|
|
|
|
}
|
|
|
|
|
|
|
|
decrypted := ds.SecureJsonData.Decrypt()
|
|
|
|
index := 1
|
|
|
|
for {
|
|
|
|
headerNameSuffix := fmt.Sprintf("httpHeaderName%d", index)
|
|
|
|
headerValueSuffix := fmt.Sprintf("httpHeaderValue%d", index)
|
|
|
|
|
|
|
|
key := ds.JsonData.Get(headerNameSuffix).MustString()
|
|
|
|
if key == "" {
|
|
|
|
// No (more) header values are available
|
|
|
|
break
|
|
|
|
}
|
|
|
|
|
|
|
|
if val, ok := decrypted[headerValueSuffix]; ok {
|
|
|
|
headers[key] = val
|
|
|
|
}
|
|
|
|
index++
|
|
|
|
}
|
|
|
|
|
|
|
|
return headers
|
|
|
|
}
|
2019-11-22 07:21:23 -06:00
|
|
|
|
|
|
|
type cachedDecryptedJSON struct {
|
|
|
|
updated time.Time
|
|
|
|
json map[string]string
|
|
|
|
}
|
|
|
|
|
|
|
|
type secureJSONDecryptionCache struct {
|
|
|
|
cache map[int64]cachedDecryptedJSON
|
|
|
|
sync.Mutex
|
|
|
|
}
|
|
|
|
|
|
|
|
var dsDecryptionCache = secureJSONDecryptionCache{
|
|
|
|
cache: make(map[int64]cachedDecryptedJSON),
|
|
|
|
}
|
|
|
|
|
|
|
|
// DecryptedValues returns cached decrypted values from secureJsonData.
|
|
|
|
func (ds *DataSource) DecryptedValues() map[string]string {
|
|
|
|
dsDecryptionCache.Lock()
|
|
|
|
defer dsDecryptionCache.Unlock()
|
|
|
|
|
|
|
|
if item, present := dsDecryptionCache.cache[ds.Id]; present && ds.Updated.Equal(item.updated) {
|
|
|
|
return item.json
|
|
|
|
}
|
|
|
|
|
|
|
|
json := ds.SecureJsonData.Decrypt()
|
|
|
|
dsDecryptionCache.cache[ds.Id] = cachedDecryptedJSON{
|
|
|
|
updated: ds.Updated,
|
|
|
|
json: json,
|
|
|
|
}
|
|
|
|
|
|
|
|
return json
|
|
|
|
}
|
|
|
|
|
|
|
|
// DecryptedValue returns cached decrypted value from cached secureJsonData.
|
|
|
|
func (ds *DataSource) DecryptedValue(key string) (string, bool) {
|
|
|
|
value, exists := ds.DecryptedValues()[key]
|
|
|
|
return value, exists
|
|
|
|
}
|
|
|
|
|
|
|
|
// ClearDSDecryptionCache clears the datasource decryption cache.
|
|
|
|
func ClearDSDecryptionCache() {
|
|
|
|
dsDecryptionCache.Lock()
|
|
|
|
defer dsDecryptionCache.Unlock()
|
|
|
|
|
|
|
|
dsDecryptionCache.cache = make(map[int64]cachedDecryptedJSON)
|
|
|
|
}
|