mirror of
https://github.com/grafana/grafana.git
synced 2025-02-25 18:55:37 -06:00
Merge branch 'master' of github.com:grafana/grafana into dashboard-react-page
This commit is contained in:
commit
28fc27c4ae
@ -2,6 +2,7 @@
|
|||||||
|
|
||||||
### Minor
|
### Minor
|
||||||
* **Pushover**: Adds support for images in pushover notifier [#10780](https://github.com/grafana/grafana/issues/10780), thx [@jpenalbae](https://github.com/jpenalbae)
|
* **Pushover**: Adds support for images in pushover notifier [#10780](https://github.com/grafana/grafana/issues/10780), thx [@jpenalbae](https://github.com/jpenalbae)
|
||||||
|
* **Cloudwatch**: Add AWS/Neptune metrics [#14231](https://github.com/grafana/grafana/issues/14231), thx [@tcpatterson](https://github.com/tcpatterson)
|
||||||
|
|
||||||
# 6.0.0-beta1 (2019-01-30)
|
# 6.0.0-beta1 (2019-01-30)
|
||||||
|
|
||||||
|
27
devenv/docker/blocks/loki/config.yaml
Normal file
27
devenv/docker/blocks/loki/config.yaml
Normal file
@ -0,0 +1,27 @@
|
|||||||
|
server:
|
||||||
|
http_listen_port: 9080
|
||||||
|
grpc_listen_port: 0
|
||||||
|
|
||||||
|
positions:
|
||||||
|
filename: /tmp/positions.yaml
|
||||||
|
|
||||||
|
client:
|
||||||
|
url: http://loki:3100/api/prom/push
|
||||||
|
|
||||||
|
scrape_configs:
|
||||||
|
- job_name: system
|
||||||
|
entry_parser: raw
|
||||||
|
static_configs:
|
||||||
|
- targets:
|
||||||
|
- localhost
|
||||||
|
labels:
|
||||||
|
job: varlogs
|
||||||
|
__path__: /var/log/*log
|
||||||
|
- job_name: grafana
|
||||||
|
entry_parser: raw
|
||||||
|
static_configs:
|
||||||
|
- targets:
|
||||||
|
- localhost
|
||||||
|
labels:
|
||||||
|
job: grafana
|
||||||
|
__path__: /var/log/grafana/*log
|
@ -1,22 +1,14 @@
|
|||||||
version: "3"
|
|
||||||
|
|
||||||
networks:
|
|
||||||
loki:
|
|
||||||
|
|
||||||
services:
|
|
||||||
loki:
|
loki:
|
||||||
image: grafana/loki:master
|
image: grafana/loki:master
|
||||||
ports:
|
ports:
|
||||||
- "3100:3100"
|
- "3100:3100"
|
||||||
command: -config.file=/etc/loki/local-config.yaml
|
command: -config.file=/etc/loki/local-config.yaml
|
||||||
networks:
|
|
||||||
- loki
|
|
||||||
|
|
||||||
promtail:
|
promtail:
|
||||||
image: grafana/promtail:master
|
image: grafana/promtail:master
|
||||||
volumes:
|
volumes:
|
||||||
|
- ./docker/blocks/loki/config.yaml:/etc/promtail/docker-config.yaml
|
||||||
- /var/log:/var/log
|
- /var/log:/var/log
|
||||||
|
- ../data/log:/var/log/grafana
|
||||||
command:
|
command:
|
||||||
-config.file=/etc/promtail/docker-config.yaml
|
-config.file=/etc/promtail/docker-config.yaml
|
||||||
networks:
|
|
||||||
- loki
|
|
||||||
|
@ -49,7 +49,7 @@ export default class SelectOptionGroup extends PureComponent<ExtendedGroupProps,
|
|||||||
return (
|
return (
|
||||||
<div className="gf-form-select-box__option-group">
|
<div className="gf-form-select-box__option-group">
|
||||||
<div className="gf-form-select-box__option-group__header" onClick={this.onToggleChildren}>
|
<div className="gf-form-select-box__option-group__header" onClick={this.onToggleChildren}>
|
||||||
<span className="flex-grow">{label}</span>
|
<span className="flex-grow-1">{label}</span>
|
||||||
<i className={`fa ${expanded ? 'fa-caret-left' : 'fa-caret-down'}`} />{' '}
|
<i className={`fa ${expanded ? 'fa-caret-left' : 'fa-caret-down'}`} />{' '}
|
||||||
</div>
|
</div>
|
||||||
{expanded && children}
|
{expanded && children}
|
||||||
|
@ -0,0 +1,10 @@
|
|||||||
|
import React from 'react';
|
||||||
|
import { storiesOf } from '@storybook/react';
|
||||||
|
import { action } from '@storybook/addon-actions';
|
||||||
|
import { ValueMappingsEditor } from './ValueMappingsEditor';
|
||||||
|
|
||||||
|
const ValueMappingsEditorStories = storiesOf('UI/ValueMappingsEditor', module);
|
||||||
|
|
||||||
|
ValueMappingsEditorStories.add('default', () => {
|
||||||
|
return <ValueMappingsEditor valueMappings={[]} onChange={action('Mapping changed')} />;
|
||||||
|
});
|
@ -108,8 +108,8 @@ func (hs *HTTPServer) registerRoutes() {
|
|||||||
r.Get("/api/snapshots-delete/:deleteKey", Wrap(DeleteDashboardSnapshotByDeleteKey))
|
r.Get("/api/snapshots-delete/:deleteKey", Wrap(DeleteDashboardSnapshotByDeleteKey))
|
||||||
r.Delete("/api/snapshots/:key", reqEditorRole, Wrap(DeleteDashboardSnapshot))
|
r.Delete("/api/snapshots/:key", reqEditorRole, Wrap(DeleteDashboardSnapshot))
|
||||||
|
|
||||||
// api renew session based on remember cookie
|
// api renew session based on cookie
|
||||||
r.Get("/api/login/ping", quota("session"), hs.LoginAPIPing)
|
r.Get("/api/login/ping", quota("session"), Wrap(hs.LoginAPIPing))
|
||||||
|
|
||||||
// authed api
|
// authed api
|
||||||
r.Group("/api", func(apiRoute routing.RouteRegister) {
|
r.Group("/api", func(apiRoute routing.RouteRegister) {
|
||||||
|
@ -149,4 +149,4 @@ func (s *fakeUserAuthTokenService) UserAuthenticatedHook(user *m.User, c *m.ReqC
|
|||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (s *fakeUserAuthTokenService) UserSignedOutHook(c *m.ReqContext) {}
|
func (s *fakeUserAuthTokenService) SignOutUser(c *m.ReqContext) error { return nil }
|
||||||
|
@ -136,7 +136,7 @@ func (hs *HTTPServer) loginUserWithUser(user *m.User, c *m.ReqContext) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func (hs *HTTPServer) Logout(c *m.ReqContext) {
|
func (hs *HTTPServer) Logout(c *m.ReqContext) {
|
||||||
hs.AuthTokenService.UserSignedOutHook(c)
|
hs.AuthTokenService.SignOutUser(c)
|
||||||
|
|
||||||
if setting.SignoutRedirectUrl != "" {
|
if setting.SignoutRedirectUrl != "" {
|
||||||
c.Redirect(setting.SignoutRedirectUrl)
|
c.Redirect(setting.SignoutRedirectUrl)
|
||||||
|
@ -602,4 +602,4 @@ func (s *fakeUserAuthTokenService) UserAuthenticatedHook(user *m.User, c *m.ReqC
|
|||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (s *fakeUserAuthTokenService) UserSignedOutHook(c *m.ReqContext) {}
|
func (s *fakeUserAuthTokenService) SignOutUser(c *m.ReqContext) error { return nil }
|
||||||
|
@ -3,6 +3,7 @@ package auth
|
|||||||
import (
|
import (
|
||||||
"crypto/sha256"
|
"crypto/sha256"
|
||||||
"encoding/hex"
|
"encoding/hex"
|
||||||
|
"errors"
|
||||||
"net/http"
|
"net/http"
|
||||||
"net/url"
|
"net/url"
|
||||||
"time"
|
"time"
|
||||||
@ -31,7 +32,7 @@ var (
|
|||||||
type UserAuthTokenService interface {
|
type UserAuthTokenService interface {
|
||||||
InitContextWithToken(ctx *models.ReqContext, orgID int64) bool
|
InitContextWithToken(ctx *models.ReqContext, orgID int64) bool
|
||||||
UserAuthenticatedHook(user *models.User, c *models.ReqContext) error
|
UserAuthenticatedHook(user *models.User, c *models.ReqContext) error
|
||||||
UserSignedOutHook(c *models.ReqContext)
|
SignOutUser(c *models.ReqContext) error
|
||||||
}
|
}
|
||||||
|
|
||||||
type UserAuthTokenServiceImpl struct {
|
type UserAuthTokenServiceImpl struct {
|
||||||
@ -85,7 +86,7 @@ func (s *UserAuthTokenServiceImpl) InitContextWithToken(ctx *models.ReqContext,
|
|||||||
|
|
||||||
func (s *UserAuthTokenServiceImpl) writeSessionCookie(ctx *models.ReqContext, value string, maxAge int) {
|
func (s *UserAuthTokenServiceImpl) writeSessionCookie(ctx *models.ReqContext, value string, maxAge int) {
|
||||||
if setting.Env == setting.DEV {
|
if setting.Env == setting.DEV {
|
||||||
ctx.Logger.Info("new token", "unhashed token", value)
|
ctx.Logger.Debug("new token", "unhashed token", value)
|
||||||
}
|
}
|
||||||
|
|
||||||
ctx.Resp.Header().Del("Set-Cookie")
|
ctx.Resp.Header().Del("Set-Cookie")
|
||||||
@ -112,8 +113,19 @@ func (s *UserAuthTokenServiceImpl) UserAuthenticatedHook(user *models.User, c *m
|
|||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (s *UserAuthTokenServiceImpl) UserSignedOutHook(c *models.ReqContext) {
|
func (s *UserAuthTokenServiceImpl) SignOutUser(c *models.ReqContext) error {
|
||||||
|
unhashedToken := c.GetCookie(s.Cfg.LoginCookieName)
|
||||||
|
if unhashedToken == "" {
|
||||||
|
return errors.New("cannot logout without session token")
|
||||||
|
}
|
||||||
|
|
||||||
|
hashedToken := hashToken(unhashedToken)
|
||||||
|
|
||||||
|
sql := `DELETE FROM user_auth_token WHERE auth_token = ?`
|
||||||
|
_, err := s.SQLStore.NewSession().Exec(sql, hashedToken)
|
||||||
|
|
||||||
s.writeSessionCookie(c, "", -1)
|
s.writeSessionCookie(c, "", -1)
|
||||||
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
func (s *UserAuthTokenServiceImpl) CreateToken(userId int64, clientIP, userAgent string) (*userAuthToken, error) {
|
func (s *UserAuthTokenServiceImpl) CreateToken(userId int64, clientIP, userAgent string) (*userAuthToken, error) {
|
||||||
|
@ -1,10 +1,15 @@
|
|||||||
package auth
|
package auth
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"fmt"
|
||||||
|
"net/http"
|
||||||
|
"net/http/httptest"
|
||||||
"testing"
|
"testing"
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
|
"github.com/grafana/grafana/pkg/models"
|
||||||
"github.com/grafana/grafana/pkg/setting"
|
"github.com/grafana/grafana/pkg/setting"
|
||||||
|
macaron "gopkg.in/macaron.v1"
|
||||||
|
|
||||||
"github.com/grafana/grafana/pkg/log"
|
"github.com/grafana/grafana/pkg/log"
|
||||||
"github.com/grafana/grafana/pkg/services/sqlstore"
|
"github.com/grafana/grafana/pkg/services/sqlstore"
|
||||||
@ -46,6 +51,40 @@ func TestUserAuthToken(t *testing.T) {
|
|||||||
So(err, ShouldEqual, ErrAuthTokenNotFound)
|
So(err, ShouldEqual, ErrAuthTokenNotFound)
|
||||||
So(LookupToken, ShouldBeNil)
|
So(LookupToken, ShouldBeNil)
|
||||||
})
|
})
|
||||||
|
|
||||||
|
Convey("signing out should delete token and cookie if present", func() {
|
||||||
|
httpreq := &http.Request{Header: make(http.Header)}
|
||||||
|
httpreq.AddCookie(&http.Cookie{Name: userAuthTokenService.Cfg.LoginCookieName, Value: token.UnhashedToken})
|
||||||
|
|
||||||
|
ctx := &models.ReqContext{Context: &macaron.Context{
|
||||||
|
Req: macaron.Request{Request: httpreq},
|
||||||
|
Resp: macaron.NewResponseWriter("POST", httptest.NewRecorder()),
|
||||||
|
},
|
||||||
|
Logger: log.New("fakelogger"),
|
||||||
|
}
|
||||||
|
|
||||||
|
err = userAuthTokenService.SignOutUser(ctx)
|
||||||
|
So(err, ShouldBeNil)
|
||||||
|
|
||||||
|
// makes sure we tell the browser to overwrite the cookie
|
||||||
|
cookieHeader := fmt.Sprintf("%s=; Path=/; Max-Age=0; HttpOnly", userAuthTokenService.Cfg.LoginCookieName)
|
||||||
|
So(ctx.Resp.Header().Get("Set-Cookie"), ShouldEqual, cookieHeader)
|
||||||
|
})
|
||||||
|
|
||||||
|
Convey("signing out an none existing session should return an error", func() {
|
||||||
|
httpreq := &http.Request{Header: make(http.Header)}
|
||||||
|
httpreq.AddCookie(&http.Cookie{Name: userAuthTokenService.Cfg.LoginCookieName, Value: ""})
|
||||||
|
|
||||||
|
ctx := &models.ReqContext{Context: &macaron.Context{
|
||||||
|
Req: macaron.Request{Request: httpreq},
|
||||||
|
Resp: macaron.NewResponseWriter("POST", httptest.NewRecorder()),
|
||||||
|
},
|
||||||
|
Logger: log.New("fakelogger"),
|
||||||
|
}
|
||||||
|
|
||||||
|
err = userAuthTokenService.SignOutUser(ctx)
|
||||||
|
So(err, ShouldNotBeNil)
|
||||||
|
})
|
||||||
})
|
})
|
||||||
|
|
||||||
Convey("expires correctly", func() {
|
Convey("expires correctly", func() {
|
||||||
|
@ -242,10 +242,7 @@ func (ss *SqlStore) buildConnectionString() (string, error) {
|
|||||||
|
|
||||||
cnnstr += ss.buildExtraConnectionString('&')
|
cnnstr += ss.buildExtraConnectionString('&')
|
||||||
case migrator.POSTGRES:
|
case migrator.POSTGRES:
|
||||||
host, port, err := util.SplitIPPort(ss.dbCfg.Host, "5432")
|
host, port := util.SplitHostPortDefault(ss.dbCfg.Host, "127.0.0.1", "5432")
|
||||||
if err != nil {
|
|
||||||
return "", err
|
|
||||||
}
|
|
||||||
if ss.dbCfg.Pwd == "" {
|
if ss.dbCfg.Pwd == "" {
|
||||||
ss.dbCfg.Pwd = "''"
|
ss.dbCfg.Pwd = "''"
|
||||||
}
|
}
|
||||||
|
@ -95,6 +95,7 @@ func init() {
|
|||||||
"AWS/Logs": {"IncomingBytes", "IncomingLogEvents", "ForwardedBytes", "ForwardedLogEvents", "DeliveryErrors", "DeliveryThrottling"},
|
"AWS/Logs": {"IncomingBytes", "IncomingLogEvents", "ForwardedBytes", "ForwardedLogEvents", "DeliveryErrors", "DeliveryThrottling"},
|
||||||
"AWS/ML": {"PredictCount", "PredictFailureCount"},
|
"AWS/ML": {"PredictCount", "PredictFailureCount"},
|
||||||
"AWS/NATGateway": {"PacketsOutToDestination", "PacketsOutToSource", "PacketsInFromSource", "PacketsInFromDestination", "BytesOutToDestination", "BytesOutToSource", "BytesInFromSource", "BytesInFromDestination", "ErrorPortAllocation", "ActiveConnectionCount", "ConnectionAttemptCount", "ConnectionEstablishedCount", "IdleTimeoutCount", "PacketsDropCount"},
|
"AWS/NATGateway": {"PacketsOutToDestination", "PacketsOutToSource", "PacketsInFromSource", "PacketsInFromDestination", "BytesOutToDestination", "BytesOutToSource", "BytesInFromSource", "BytesInFromDestination", "ErrorPortAllocation", "ActiveConnectionCount", "ConnectionAttemptCount", "ConnectionEstablishedCount", "IdleTimeoutCount", "PacketsDropCount"},
|
||||||
|
"AWS/Neptune": {"CPUUtilization", "ClusterReplicaLag", "ClusterReplicaLagMaximum", "ClusterReplicaLagMinimum", "EngineUptime", "FreeableMemory", "FreeLocalStorage", "GremlinHttp1xx", "GremlinHttp2xx", "GremlinHttp4xx", "GremlinHttp5xx", "GremlinErrors", "GremlinRequests", "GremlinRequestsPerSec", "GremlinWebSocketSuccess", "GremlinWebSocketClientErrors", "GremlinWebSocketServerErrors", "GremlinWebSocketAvailableConnections", "Http1xx", "Http2xx", "Http4xx", "Http5xx", "Http100", "Http101", "Http200", "Http400", "Http403", "Http405", "Http413", "Http429", "Http500", "Http501", "LoaderErrors", "LoaderRequests", "NetworkReceiveThroughput", "NetworkThroughput", "NetworkTransmitThroughput", "SparqlHttp1xx", "SparqlHttp2xx", "SparqlHttp4xx", "SparqlHttp5xx", "SparqlErrors", "SparqlRequests", "SparqlRequestsPerSec", "StatusErrors", "StatusRequests", "VolumeBytesUsed", "VolumeReadIOPs", "VolumeWriteIOPs"},
|
||||||
"AWS/NetworkELB": {"ActiveFlowCount", "ConsumedLCUs", "HealthyHostCount", "NewFlowCount", "ProcessedBytes", "TCP_Client_Reset_Count", "TCP_ELB_Reset_Count", "TCP_Target_Reset_Count", "UnHealthyHostCount"},
|
"AWS/NetworkELB": {"ActiveFlowCount", "ConsumedLCUs", "HealthyHostCount", "NewFlowCount", "ProcessedBytes", "TCP_Client_Reset_Count", "TCP_ELB_Reset_Count", "TCP_Target_Reset_Count", "UnHealthyHostCount"},
|
||||||
"AWS/OpsWorks": {"cpu_idle", "cpu_nice", "cpu_system", "cpu_user", "cpu_waitio", "load_1", "load_5", "load_15", "memory_buffers", "memory_cached", "memory_free", "memory_swap", "memory_total", "memory_used", "procs"},
|
"AWS/OpsWorks": {"cpu_idle", "cpu_nice", "cpu_system", "cpu_user", "cpu_waitio", "load_1", "load_5", "load_15", "memory_buffers", "memory_cached", "memory_free", "memory_swap", "memory_total", "memory_used", "procs"},
|
||||||
"AWS/Redshift": {"CPUUtilization", "DatabaseConnections", "HealthStatus", "MaintenanceMode", "NetworkReceiveThroughput", "NetworkTransmitThroughput", "PercentageDiskSpaceUsed", "QueriesCompletedPerSecond", "QueryDuration", "QueryRuntimeBreakdown", "ReadIOPS", "ReadLatency", "ReadThroughput", "WLMQueriesCompletedPerSecond", "WLMQueryDuration", "WLMQueueLength", "WriteIOPS", "WriteLatency", "WriteThroughput"},
|
"AWS/Redshift": {"CPUUtilization", "DatabaseConnections", "HealthStatus", "MaintenanceMode", "NetworkReceiveThroughput", "NetworkTransmitThroughput", "PercentageDiskSpaceUsed", "QueriesCompletedPerSecond", "QueryDuration", "QueryRuntimeBreakdown", "ReadIOPS", "ReadLatency", "ReadThroughput", "WLMQueriesCompletedPerSecond", "WLMQueryDuration", "WLMQueueLength", "WriteIOPS", "WriteLatency", "WriteThroughput"},
|
||||||
@ -149,6 +150,7 @@ func init() {
|
|||||||
"AWS/Logs": {"LogGroupName", "DestinationType", "FilterName"},
|
"AWS/Logs": {"LogGroupName", "DestinationType", "FilterName"},
|
||||||
"AWS/ML": {"MLModelId", "RequestMode"},
|
"AWS/ML": {"MLModelId", "RequestMode"},
|
||||||
"AWS/NATGateway": {"NatGatewayId"},
|
"AWS/NATGateway": {"NatGatewayId"},
|
||||||
|
"AWS/Neptune": {"DBClusterIdentifier", "Role", "DatabaseClass", "EngineName"},
|
||||||
"AWS/NetworkELB": {"LoadBalancer", "TargetGroup", "AvailabilityZone"},
|
"AWS/NetworkELB": {"LoadBalancer", "TargetGroup", "AvailabilityZone"},
|
||||||
"AWS/OpsWorks": {"StackId", "LayerId", "InstanceId"},
|
"AWS/OpsWorks": {"StackId", "LayerId", "InstanceId"},
|
||||||
"AWS/Redshift": {"NodeID", "ClusterIdentifier", "latency", "service class", "wmlid"},
|
"AWS/Redshift": {"NodeID", "ClusterIdentifier", "latency", "service class", "wmlid"},
|
||||||
|
@ -49,10 +49,7 @@ func generateConnectionString(datasource *models.DataSource) (string, error) {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
server, port, err := util.SplitIPPort(datasource.Url, "1433")
|
server, port := util.SplitHostPortDefault(datasource.Url, "localhost", "1433")
|
||||||
if err != nil {
|
|
||||||
return "", err
|
|
||||||
}
|
|
||||||
|
|
||||||
encrypt := datasource.JsonData.Get("encrypt").MustString("false")
|
encrypt := datasource.JsonData.Get("encrypt").MustString("false")
|
||||||
connStr := fmt.Sprintf("server=%s;port=%s;database=%s;user id=%s;password=%s;",
|
connStr := fmt.Sprintf("server=%s;port=%s;database=%s;user id=%s;password=%s;",
|
||||||
|
@ -1,25 +0,0 @@
|
|||||||
package util
|
|
||||||
|
|
||||||
import (
|
|
||||||
"net"
|
|
||||||
)
|
|
||||||
|
|
||||||
// SplitIPPort splits the ip string and port.
|
|
||||||
func SplitIPPort(ipStr string, portDefault string) (ip string, port string, err error) {
|
|
||||||
ipAddr := net.ParseIP(ipStr)
|
|
||||||
|
|
||||||
if ipAddr == nil {
|
|
||||||
// Port was included
|
|
||||||
ip, port, err = net.SplitHostPort(ipStr)
|
|
||||||
|
|
||||||
if err != nil {
|
|
||||||
return "", "", err
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
// No port was included
|
|
||||||
ip = ipAddr.String()
|
|
||||||
port = portDefault
|
|
||||||
}
|
|
||||||
|
|
||||||
return ip, port, nil
|
|
||||||
}
|
|
@ -7,19 +7,13 @@ import (
|
|||||||
|
|
||||||
// ParseIPAddress parses an IP address and removes port and/or IPV6 format
|
// ParseIPAddress parses an IP address and removes port and/or IPV6 format
|
||||||
func ParseIPAddress(input string) string {
|
func ParseIPAddress(input string) string {
|
||||||
s := input
|
host, _ := SplitHostPort(input)
|
||||||
lastIndex := strings.LastIndex(input, ":")
|
|
||||||
|
|
||||||
if lastIndex != -1 {
|
ip := net.ParseIP(host)
|
||||||
if lastIndex > 0 && input[lastIndex-1:lastIndex] != ":" {
|
|
||||||
s = input[:lastIndex]
|
if ip == nil {
|
||||||
|
return host
|
||||||
}
|
}
|
||||||
}
|
|
||||||
|
|
||||||
s = strings.Replace(s, "[", "", -1)
|
|
||||||
s = strings.Replace(s, "]", "", -1)
|
|
||||||
|
|
||||||
ip := net.ParseIP(s)
|
|
||||||
|
|
||||||
if ip.IsLoopback() {
|
if ip.IsLoopback() {
|
||||||
return "127.0.0.1"
|
return "127.0.0.1"
|
||||||
@ -27,3 +21,34 @@ func ParseIPAddress(input string) string {
|
|||||||
|
|
||||||
return ip.String()
|
return ip.String()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// SplitHostPortDefault splits ip address/hostname string by host and port. Defaults used if no match found
|
||||||
|
func SplitHostPortDefault(input, defaultHost, defaultPort string) (host string, port string) {
|
||||||
|
port = defaultPort
|
||||||
|
s := input
|
||||||
|
lastIndex := strings.LastIndex(input, ":")
|
||||||
|
|
||||||
|
if lastIndex != -1 {
|
||||||
|
if lastIndex > 0 && input[lastIndex-1:lastIndex] != ":" {
|
||||||
|
s = input[:lastIndex]
|
||||||
|
port = input[lastIndex+1:]
|
||||||
|
} else if lastIndex == 0 {
|
||||||
|
s = defaultHost
|
||||||
|
port = input[lastIndex+1:]
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
port = defaultPort
|
||||||
|
}
|
||||||
|
|
||||||
|
s = strings.Replace(s, "[", "", -1)
|
||||||
|
s = strings.Replace(s, "]", "", -1)
|
||||||
|
port = strings.Replace(port, "[", "", -1)
|
||||||
|
port = strings.Replace(port, "]", "", -1)
|
||||||
|
|
||||||
|
return s, port
|
||||||
|
}
|
||||||
|
|
||||||
|
// SplitHostPort splits ip address/hostname string by host and port
|
||||||
|
func SplitHostPort(input string) (host string, port string) {
|
||||||
|
return SplitHostPortDefault(input, "", "")
|
||||||
|
}
|
||||||
|
@ -9,8 +9,90 @@ import (
|
|||||||
func TestParseIPAddress(t *testing.T) {
|
func TestParseIPAddress(t *testing.T) {
|
||||||
Convey("Test parse ip address", t, func() {
|
Convey("Test parse ip address", t, func() {
|
||||||
So(ParseIPAddress("192.168.0.140:456"), ShouldEqual, "192.168.0.140")
|
So(ParseIPAddress("192.168.0.140:456"), ShouldEqual, "192.168.0.140")
|
||||||
|
So(ParseIPAddress("192.168.0.140"), ShouldEqual, "192.168.0.140")
|
||||||
So(ParseIPAddress("[::1:456]"), ShouldEqual, "127.0.0.1")
|
So(ParseIPAddress("[::1:456]"), ShouldEqual, "127.0.0.1")
|
||||||
So(ParseIPAddress("[::1]"), ShouldEqual, "127.0.0.1")
|
So(ParseIPAddress("[::1]"), ShouldEqual, "127.0.0.1")
|
||||||
So(ParseIPAddress("192.168.0.140"), ShouldEqual, "192.168.0.140")
|
So(ParseIPAddress("::1"), ShouldEqual, "127.0.0.1")
|
||||||
|
So(ParseIPAddress("::1:123"), ShouldEqual, "127.0.0.1")
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestSplitHostPortDefault(t *testing.T) {
|
||||||
|
Convey("Test split ip address to host and port", t, func() {
|
||||||
|
host, port := SplitHostPortDefault("192.168.0.140:456", "", "")
|
||||||
|
So(host, ShouldEqual, "192.168.0.140")
|
||||||
|
So(port, ShouldEqual, "456")
|
||||||
|
|
||||||
|
host, port = SplitHostPortDefault("192.168.0.140", "", "123")
|
||||||
|
So(host, ShouldEqual, "192.168.0.140")
|
||||||
|
So(port, ShouldEqual, "123")
|
||||||
|
|
||||||
|
host, port = SplitHostPortDefault("[::1:456]", "", "")
|
||||||
|
So(host, ShouldEqual, "::1")
|
||||||
|
So(port, ShouldEqual, "456")
|
||||||
|
|
||||||
|
host, port = SplitHostPortDefault("[::1]", "", "123")
|
||||||
|
So(host, ShouldEqual, "::1")
|
||||||
|
So(port, ShouldEqual, "123")
|
||||||
|
|
||||||
|
host, port = SplitHostPortDefault("::1:123", "", "")
|
||||||
|
So(host, ShouldEqual, "::1")
|
||||||
|
So(port, ShouldEqual, "123")
|
||||||
|
|
||||||
|
host, port = SplitHostPortDefault("::1", "", "123")
|
||||||
|
So(host, ShouldEqual, "::1")
|
||||||
|
So(port, ShouldEqual, "123")
|
||||||
|
|
||||||
|
host, port = SplitHostPortDefault(":456", "1.2.3.4", "")
|
||||||
|
So(host, ShouldEqual, "1.2.3.4")
|
||||||
|
So(port, ShouldEqual, "456")
|
||||||
|
|
||||||
|
host, port = SplitHostPortDefault("xyz.rds.amazonaws.com", "", "123")
|
||||||
|
So(host, ShouldEqual, "xyz.rds.amazonaws.com")
|
||||||
|
So(port, ShouldEqual, "123")
|
||||||
|
|
||||||
|
host, port = SplitHostPortDefault("xyz.rds.amazonaws.com:123", "", "")
|
||||||
|
So(host, ShouldEqual, "xyz.rds.amazonaws.com")
|
||||||
|
So(port, ShouldEqual, "123")
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestSplitHostPort(t *testing.T) {
|
||||||
|
Convey("Test split ip address to host and port", t, func() {
|
||||||
|
host, port := SplitHostPort("192.168.0.140:456")
|
||||||
|
So(host, ShouldEqual, "192.168.0.140")
|
||||||
|
So(port, ShouldEqual, "456")
|
||||||
|
|
||||||
|
host, port = SplitHostPort("192.168.0.140")
|
||||||
|
So(host, ShouldEqual, "192.168.0.140")
|
||||||
|
So(port, ShouldEqual, "")
|
||||||
|
|
||||||
|
host, port = SplitHostPort("[::1:456]")
|
||||||
|
So(host, ShouldEqual, "::1")
|
||||||
|
So(port, ShouldEqual, "456")
|
||||||
|
|
||||||
|
host, port = SplitHostPort("[::1]")
|
||||||
|
So(host, ShouldEqual, "::1")
|
||||||
|
So(port, ShouldEqual, "")
|
||||||
|
|
||||||
|
host, port = SplitHostPort("::1:123")
|
||||||
|
So(host, ShouldEqual, "::1")
|
||||||
|
So(port, ShouldEqual, "123")
|
||||||
|
|
||||||
|
host, port = SplitHostPort("::1")
|
||||||
|
So(host, ShouldEqual, "::1")
|
||||||
|
So(port, ShouldEqual, "")
|
||||||
|
|
||||||
|
host, port = SplitHostPort(":456")
|
||||||
|
So(host, ShouldEqual, "")
|
||||||
|
So(port, ShouldEqual, "456")
|
||||||
|
|
||||||
|
host, port = SplitHostPort("xyz.rds.amazonaws.com")
|
||||||
|
So(host, ShouldEqual, "xyz.rds.amazonaws.com")
|
||||||
|
So(port, ShouldEqual, "")
|
||||||
|
|
||||||
|
host, port = SplitHostPort("xyz.rds.amazonaws.com:123")
|
||||||
|
So(host, ShouldEqual, "xyz.rds.amazonaws.com")
|
||||||
|
So(port, ShouldEqual, "123")
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
@ -1,43 +0,0 @@
|
|||||||
package util
|
|
||||||
|
|
||||||
import (
|
|
||||||
"testing"
|
|
||||||
|
|
||||||
. "github.com/smartystreets/goconvey/convey"
|
|
||||||
)
|
|
||||||
|
|
||||||
func TestSplitIPPort(t *testing.T) {
|
|
||||||
|
|
||||||
Convey("When parsing an IPv4 without explicit port", t, func() {
|
|
||||||
ip, port, err := SplitIPPort("1.2.3.4", "5678")
|
|
||||||
|
|
||||||
So(err, ShouldEqual, nil)
|
|
||||||
So(ip, ShouldEqual, "1.2.3.4")
|
|
||||||
So(port, ShouldEqual, "5678")
|
|
||||||
})
|
|
||||||
|
|
||||||
Convey("When parsing an IPv6 without explicit port", t, func() {
|
|
||||||
ip, port, err := SplitIPPort("::1", "5678")
|
|
||||||
|
|
||||||
So(err, ShouldEqual, nil)
|
|
||||||
So(ip, ShouldEqual, "::1")
|
|
||||||
So(port, ShouldEqual, "5678")
|
|
||||||
})
|
|
||||||
|
|
||||||
Convey("When parsing an IPv4 with explicit port", t, func() {
|
|
||||||
ip, port, err := SplitIPPort("1.2.3.4:56", "78")
|
|
||||||
|
|
||||||
So(err, ShouldEqual, nil)
|
|
||||||
So(ip, ShouldEqual, "1.2.3.4")
|
|
||||||
So(port, ShouldEqual, "56")
|
|
||||||
})
|
|
||||||
|
|
||||||
Convey("When parsing an IPv6 with explicit port", t, func() {
|
|
||||||
ip, port, err := SplitIPPort("[::1]:56", "78")
|
|
||||||
|
|
||||||
So(err, ShouldEqual, nil)
|
|
||||||
So(ip, ShouldEqual, "::1")
|
|
||||||
So(port, ShouldEqual, "56")
|
|
||||||
})
|
|
||||||
|
|
||||||
}
|
|
@ -1,6 +1,7 @@
|
|||||||
import _ from 'lodash';
|
import _ from 'lodash';
|
||||||
import coreModule from 'app/core/core_module';
|
import coreModule from 'app/core/core_module';
|
||||||
import appEvents from 'app/core/app_events';
|
import appEvents from 'app/core/app_events';
|
||||||
|
import config from 'app/core/config';
|
||||||
import { DashboardModel } from 'app/features/dashboard/state/DashboardModel';
|
import { DashboardModel } from 'app/features/dashboard/state/DashboardModel';
|
||||||
|
|
||||||
export class BackendSrv {
|
export class BackendSrv {
|
||||||
@ -103,9 +104,16 @@ export class BackendSrv {
|
|||||||
err => {
|
err => {
|
||||||
// handle unauthorized
|
// handle unauthorized
|
||||||
if (err.status === 401 && this.contextSrv.user.isSignedIn && firstAttempt) {
|
if (err.status === 401 && this.contextSrv.user.isSignedIn && firstAttempt) {
|
||||||
return this.loginPing().then(() => {
|
return this.loginPing()
|
||||||
|
.then(() => {
|
||||||
options.retry = 1;
|
options.retry = 1;
|
||||||
return this.request(options);
|
return this.request(options);
|
||||||
|
})
|
||||||
|
.catch(err => {
|
||||||
|
if (err.status === 401) {
|
||||||
|
window.location.href = config.appSubUrl + '/logout';
|
||||||
|
throw err;
|
||||||
|
}
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -184,12 +192,19 @@ export class BackendSrv {
|
|||||||
|
|
||||||
// handle unauthorized for backend requests
|
// handle unauthorized for backend requests
|
||||||
if (requestIsLocal && firstAttempt && err.status === 401) {
|
if (requestIsLocal && firstAttempt && err.status === 401) {
|
||||||
return this.loginPing().then(() => {
|
return this.loginPing()
|
||||||
|
.then(() => {
|
||||||
options.retry = 1;
|
options.retry = 1;
|
||||||
if (canceler) {
|
if (canceler) {
|
||||||
canceler.resolve();
|
canceler.resolve();
|
||||||
}
|
}
|
||||||
return this.datasourceRequest(options);
|
return this.datasourceRequest(options);
|
||||||
|
})
|
||||||
|
.catch(err => {
|
||||||
|
if (err.status === 401) {
|
||||||
|
window.location.href = config.appSubUrl + '/logout';
|
||||||
|
throw err;
|
||||||
|
}
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -13,6 +13,11 @@ const DEFAULT_EXPLORE_STATE: ExploreUrlState = {
|
|||||||
datasource: null,
|
datasource: null,
|
||||||
queries: [],
|
queries: [],
|
||||||
range: DEFAULT_RANGE,
|
range: DEFAULT_RANGE,
|
||||||
|
ui: {
|
||||||
|
showingGraph: true,
|
||||||
|
showingTable: true,
|
||||||
|
showingLogs: true,
|
||||||
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
describe('state functions', () => {
|
describe('state functions', () => {
|
||||||
@ -69,9 +74,11 @@ describe('state functions', () => {
|
|||||||
to: 'now',
|
to: 'now',
|
||||||
},
|
},
|
||||||
};
|
};
|
||||||
|
|
||||||
expect(serializeStateToUrlParam(state)).toBe(
|
expect(serializeStateToUrlParam(state)).toBe(
|
||||||
'{"datasource":"foo","queries":[{"expr":"metric{test=\\"a/b\\"}"},' +
|
'{"datasource":"foo","queries":[{"expr":"metric{test=\\"a/b\\"}"},' +
|
||||||
'{"expr":"super{foo=\\"x/z\\"}"}],"range":{"from":"now-5h","to":"now"}}'
|
'{"expr":"super{foo=\\"x/z\\"}"}],"range":{"from":"now-5h","to":"now"},' +
|
||||||
|
'"ui":{"showingGraph":true,"showingTable":true,"showingLogs":true}}'
|
||||||
);
|
);
|
||||||
});
|
});
|
||||||
|
|
||||||
@ -93,7 +100,7 @@ describe('state functions', () => {
|
|||||||
},
|
},
|
||||||
};
|
};
|
||||||
expect(serializeStateToUrlParam(state, true)).toBe(
|
expect(serializeStateToUrlParam(state, true)).toBe(
|
||||||
'["now-5h","now","foo",{"expr":"metric{test=\\"a/b\\"}"},{"expr":"super{foo=\\"x/z\\"}"}]'
|
'["now-5h","now","foo",{"expr":"metric{test=\\"a/b\\"}"},{"expr":"super{foo=\\"x/z\\"}"},{"ui":[true,true,true]}]'
|
||||||
);
|
);
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
@ -118,7 +125,28 @@ describe('state functions', () => {
|
|||||||
};
|
};
|
||||||
const serialized = serializeStateToUrlParam(state);
|
const serialized = serializeStateToUrlParam(state);
|
||||||
const parsed = parseUrlState(serialized);
|
const parsed = parseUrlState(serialized);
|
||||||
|
expect(state).toMatchObject(parsed);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('can parse the compact serialized state into the original state', () => {
|
||||||
|
const state = {
|
||||||
|
...DEFAULT_EXPLORE_STATE,
|
||||||
|
datasource: 'foo',
|
||||||
|
queries: [
|
||||||
|
{
|
||||||
|
expr: 'metric{test="a/b"}',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
expr: 'super{foo="x/z"}',
|
||||||
|
},
|
||||||
|
],
|
||||||
|
range: {
|
||||||
|
from: 'now - 5h',
|
||||||
|
to: 'now',
|
||||||
|
},
|
||||||
|
};
|
||||||
|
const serialized = serializeStateToUrlParam(state, true);
|
||||||
|
const parsed = parseUrlState(serialized);
|
||||||
expect(state).toMatchObject(parsed);
|
expect(state).toMatchObject(parsed);
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
@ -27,6 +27,12 @@ export const DEFAULT_RANGE = {
|
|||||||
to: 'now',
|
to: 'now',
|
||||||
};
|
};
|
||||||
|
|
||||||
|
export const DEFAULT_UI_STATE = {
|
||||||
|
showingTable: true,
|
||||||
|
showingGraph: true,
|
||||||
|
showingLogs: true,
|
||||||
|
};
|
||||||
|
|
||||||
const MAX_HISTORY_ITEMS = 100;
|
const MAX_HISTORY_ITEMS = 100;
|
||||||
|
|
||||||
export const LAST_USED_DATASOURCE_KEY = 'grafana.explore.datasource';
|
export const LAST_USED_DATASOURCE_KEY = 'grafana.explore.datasource';
|
||||||
@ -147,7 +153,12 @@ export function buildQueryTransaction(
|
|||||||
|
|
||||||
export const clearQueryKeys: ((query: DataQuery) => object) = ({ key, refId, ...rest }) => rest;
|
export const clearQueryKeys: ((query: DataQuery) => object) = ({ key, refId, ...rest }) => rest;
|
||||||
|
|
||||||
|
const isMetricSegment = (segment: { [key: string]: string }) => segment.hasOwnProperty('expr');
|
||||||
|
const isUISegment = (segment: { [key: string]: string }) => segment.hasOwnProperty('ui');
|
||||||
|
|
||||||
export function parseUrlState(initial: string | undefined): ExploreUrlState {
|
export function parseUrlState(initial: string | undefined): ExploreUrlState {
|
||||||
|
let uiState = DEFAULT_UI_STATE;
|
||||||
|
|
||||||
if (initial) {
|
if (initial) {
|
||||||
try {
|
try {
|
||||||
const parsed = JSON.parse(decodeURI(initial));
|
const parsed = JSON.parse(decodeURI(initial));
|
||||||
@ -160,20 +171,41 @@ export function parseUrlState(initial: string | undefined): ExploreUrlState {
|
|||||||
to: parsed[1],
|
to: parsed[1],
|
||||||
};
|
};
|
||||||
const datasource = parsed[2];
|
const datasource = parsed[2];
|
||||||
const queries = parsed.slice(3);
|
let queries = [];
|
||||||
return { datasource, queries, range };
|
|
||||||
|
parsed.slice(3).forEach(segment => {
|
||||||
|
if (isMetricSegment(segment)) {
|
||||||
|
queries = [...queries, segment];
|
||||||
|
}
|
||||||
|
|
||||||
|
if (isUISegment(segment)) {
|
||||||
|
uiState = {
|
||||||
|
showingGraph: segment.ui[0],
|
||||||
|
showingLogs: segment.ui[1],
|
||||||
|
showingTable: segment.ui[2],
|
||||||
|
};
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
return { datasource, queries, range, ui: uiState };
|
||||||
}
|
}
|
||||||
return parsed;
|
return parsed;
|
||||||
} catch (e) {
|
} catch (e) {
|
||||||
console.error(e);
|
console.error(e);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return { datasource: null, queries: [], range: DEFAULT_RANGE };
|
return { datasource: null, queries: [], range: DEFAULT_RANGE, ui: uiState };
|
||||||
}
|
}
|
||||||
|
|
||||||
export function serializeStateToUrlParam(urlState: ExploreUrlState, compact?: boolean): string {
|
export function serializeStateToUrlParam(urlState: ExploreUrlState, compact?: boolean): string {
|
||||||
if (compact) {
|
if (compact) {
|
||||||
return JSON.stringify([urlState.range.from, urlState.range.to, urlState.datasource, ...urlState.queries]);
|
return JSON.stringify([
|
||||||
|
urlState.range.from,
|
||||||
|
urlState.range.to,
|
||||||
|
urlState.datasource,
|
||||||
|
...urlState.queries,
|
||||||
|
{ ui: [!!urlState.ui.showingGraph, !!urlState.ui.showingLogs, !!urlState.ui.showingTable] },
|
||||||
|
]);
|
||||||
}
|
}
|
||||||
return JSON.stringify(urlState);
|
return JSON.stringify(urlState);
|
||||||
}
|
}
|
||||||
|
@ -1,2 +1,3 @@
|
|||||||
export { SaveDashboardAsModalCtrl } from './SaveDashboardAsModalCtrl';
|
export { SaveDashboardAsModalCtrl } from './SaveDashboardAsModalCtrl';
|
||||||
export { SaveDashboardModalCtrl } from './SaveDashboardModalCtrl';
|
export { SaveDashboardModalCtrl } from './SaveDashboardModalCtrl';
|
||||||
|
export { SaveProvisionedDashboardModalCtrl } from './SaveProvisionedDashboardModalCtrl';
|
||||||
|
@ -133,7 +133,7 @@ export class QueriesTab extends PureComponent<Props, State> {
|
|||||||
return (
|
return (
|
||||||
<>
|
<>
|
||||||
<DataSourcePicker datasources={this.datasources} onChange={this.onChangeDataSource} current={currentDS} />
|
<DataSourcePicker datasources={this.datasources} onChange={this.onChangeDataSource} current={currentDS} />
|
||||||
<div className="flex-grow" />
|
<div className="flex-grow-1" />
|
||||||
{!isAddingMixed && (
|
{!isAddingMixed && (
|
||||||
<button className="btn navbar-button navbar-button--primary" onClick={this.onAddQueryClick}>
|
<button className="btn navbar-button navbar-button--primary" onClick={this.onAddQueryClick}>
|
||||||
Add Query
|
Add Query
|
||||||
|
@ -32,7 +32,7 @@ import {
|
|||||||
import { RawTimeRange, TimeRange, DataQuery } from '@grafana/ui';
|
import { RawTimeRange, TimeRange, DataQuery } from '@grafana/ui';
|
||||||
import { ExploreItemState, ExploreUrlState, RangeScanner, ExploreId } from 'app/types/explore';
|
import { ExploreItemState, ExploreUrlState, RangeScanner, ExploreId } from 'app/types/explore';
|
||||||
import { StoreState } from 'app/types';
|
import { StoreState } from 'app/types';
|
||||||
import { LAST_USED_DATASOURCE_KEY, ensureQueries, DEFAULT_RANGE } from 'app/core/utils/explore';
|
import { LAST_USED_DATASOURCE_KEY, ensureQueries, DEFAULT_RANGE, DEFAULT_UI_STATE } from 'app/core/utils/explore';
|
||||||
import { Emitter } from 'app/core/utils/emitter';
|
import { Emitter } from 'app/core/utils/emitter';
|
||||||
import { ExploreToolbar } from './ExploreToolbar';
|
import { ExploreToolbar } from './ExploreToolbar';
|
||||||
|
|
||||||
@ -61,7 +61,7 @@ interface ExploreProps {
|
|||||||
supportsGraph: boolean | null;
|
supportsGraph: boolean | null;
|
||||||
supportsLogs: boolean | null;
|
supportsLogs: boolean | null;
|
||||||
supportsTable: boolean | null;
|
supportsTable: boolean | null;
|
||||||
urlState: ExploreUrlState;
|
urlState?: ExploreUrlState;
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@ -107,18 +107,20 @@ export class Explore extends React.PureComponent<ExploreProps> {
|
|||||||
// Don't initialize on split, but need to initialize urlparameters when present
|
// Don't initialize on split, but need to initialize urlparameters when present
|
||||||
if (!initialized) {
|
if (!initialized) {
|
||||||
// Load URL state and parse range
|
// Load URL state and parse range
|
||||||
const { datasource, queries, range = DEFAULT_RANGE } = (urlState || {}) as ExploreUrlState;
|
const { datasource, queries, range = DEFAULT_RANGE, ui = DEFAULT_UI_STATE } = (urlState || {}) as ExploreUrlState;
|
||||||
const initialDatasource = datasource || store.get(LAST_USED_DATASOURCE_KEY);
|
const initialDatasource = datasource || store.get(LAST_USED_DATASOURCE_KEY);
|
||||||
const initialQueries: DataQuery[] = ensureQueries(queries);
|
const initialQueries: DataQuery[] = ensureQueries(queries);
|
||||||
const initialRange = { from: parseTime(range.from), to: parseTime(range.to) };
|
const initialRange = { from: parseTime(range.from), to: parseTime(range.to) };
|
||||||
const width = this.el ? this.el.offsetWidth : 0;
|
const width = this.el ? this.el.offsetWidth : 0;
|
||||||
|
|
||||||
this.props.initializeExplore(
|
this.props.initializeExplore(
|
||||||
exploreId,
|
exploreId,
|
||||||
initialDatasource,
|
initialDatasource,
|
||||||
initialQueries,
|
initialQueries,
|
||||||
initialRange,
|
initialRange,
|
||||||
width,
|
width,
|
||||||
this.exploreEvents
|
this.exploreEvents,
|
||||||
|
ui
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -111,7 +111,7 @@ export class QueryRow extends PureComponent<QueryRowProps> {
|
|||||||
<div className="query-row-status">
|
<div className="query-row-status">
|
||||||
<QueryTransactionStatus transactions={transactions} />
|
<QueryTransactionStatus transactions={transactions} />
|
||||||
</div>
|
</div>
|
||||||
<div className="query-row-field">
|
<div className="query-row-field flex-shrink-1">
|
||||||
{QueryField ? (
|
{QueryField ? (
|
||||||
<QueryField
|
<QueryField
|
||||||
datasource={datasourceInstance}
|
datasource={datasourceInstance}
|
||||||
@ -135,7 +135,7 @@ export class QueryRow extends PureComponent<QueryRowProps> {
|
|||||||
/>
|
/>
|
||||||
)}
|
)}
|
||||||
</div>
|
</div>
|
||||||
<div className="gf-form-inline">
|
<div className="gf-form-inline flex-shrink-0">
|
||||||
<div className="gf-form">
|
<div className="gf-form">
|
||||||
<button className="gf-form-label gf-form-label--btn" onClick={this.onClickClearButton}>
|
<button className="gf-form-label gf-form-label--btn" onClick={this.onClickClearButton}>
|
||||||
<i className="fa fa-times" />
|
<i className="fa fa-times" />
|
||||||
|
@ -8,6 +8,7 @@ import {
|
|||||||
RangeScanner,
|
RangeScanner,
|
||||||
ResultType,
|
ResultType,
|
||||||
QueryTransaction,
|
QueryTransaction,
|
||||||
|
ExploreUIState,
|
||||||
} from 'app/types/explore';
|
} from 'app/types/explore';
|
||||||
|
|
||||||
export enum ActionTypes {
|
export enum ActionTypes {
|
||||||
@ -106,6 +107,7 @@ export interface InitializeExploreAction {
|
|||||||
exploreDatasources: DataSourceSelectItem[];
|
exploreDatasources: DataSourceSelectItem[];
|
||||||
queries: DataQuery[];
|
queries: DataQuery[];
|
||||||
range: RawTimeRange;
|
range: RawTimeRange;
|
||||||
|
ui: ExploreUIState;
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -38,6 +38,7 @@ import {
|
|||||||
ResultType,
|
ResultType,
|
||||||
QueryOptions,
|
QueryOptions,
|
||||||
QueryTransaction,
|
QueryTransaction,
|
||||||
|
ExploreUIState,
|
||||||
} from 'app/types/explore';
|
} from 'app/types/explore';
|
||||||
|
|
||||||
import {
|
import {
|
||||||
@ -78,7 +79,15 @@ export function changeDatasource(exploreId: ExploreId, datasource: string): Thun
|
|||||||
await dispatch(importQueries(exploreId, modifiedQueries, currentDataSourceInstance, newDataSourceInstance));
|
await dispatch(importQueries(exploreId, modifiedQueries, currentDataSourceInstance, newDataSourceInstance));
|
||||||
|
|
||||||
dispatch(updateDatasourceInstance(exploreId, newDataSourceInstance));
|
dispatch(updateDatasourceInstance(exploreId, newDataSourceInstance));
|
||||||
dispatch(loadDatasource(exploreId, newDataSourceInstance));
|
|
||||||
|
try {
|
||||||
|
await dispatch(loadDatasource(exploreId, newDataSourceInstance));
|
||||||
|
} catch (error) {
|
||||||
|
console.error(error);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
dispatch(runQueries(exploreId));
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -154,7 +163,8 @@ export function initializeExplore(
|
|||||||
queries: DataQuery[],
|
queries: DataQuery[],
|
||||||
range: RawTimeRange,
|
range: RawTimeRange,
|
||||||
containerWidth: number,
|
containerWidth: number,
|
||||||
eventBridge: Emitter
|
eventBridge: Emitter,
|
||||||
|
ui: ExploreUIState
|
||||||
): ThunkResult<void> {
|
): ThunkResult<void> {
|
||||||
return async dispatch => {
|
return async dispatch => {
|
||||||
const exploreDatasources: DataSourceSelectItem[] = getDatasourceSrv()
|
const exploreDatasources: DataSourceSelectItem[] = getDatasourceSrv()
|
||||||
@ -175,6 +185,7 @@ export function initializeExplore(
|
|||||||
exploreDatasources,
|
exploreDatasources,
|
||||||
queries,
|
queries,
|
||||||
range,
|
range,
|
||||||
|
ui,
|
||||||
},
|
},
|
||||||
});
|
});
|
||||||
|
|
||||||
@ -194,7 +205,14 @@ export function initializeExplore(
|
|||||||
}
|
}
|
||||||
|
|
||||||
dispatch(updateDatasourceInstance(exploreId, instance));
|
dispatch(updateDatasourceInstance(exploreId, instance));
|
||||||
dispatch(loadDatasource(exploreId, instance));
|
|
||||||
|
try {
|
||||||
|
await dispatch(loadDatasource(exploreId, instance));
|
||||||
|
} catch (error) {
|
||||||
|
console.error(error);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
dispatch(runQueries(exploreId, true));
|
||||||
} else {
|
} else {
|
||||||
dispatch(loadDatasourceMissing(exploreId));
|
dispatch(loadDatasourceMissing(exploreId));
|
||||||
}
|
}
|
||||||
@ -258,10 +276,7 @@ export const queriesImported = (exploreId: ExploreId, queries: DataQuery[]): Que
|
|||||||
* run datasource-specific code. Existing queries are imported to the new datasource if an importer exists,
|
* run datasource-specific code. Existing queries are imported to the new datasource if an importer exists,
|
||||||
* e.g., Prometheus -> Loki queries.
|
* e.g., Prometheus -> Loki queries.
|
||||||
*/
|
*/
|
||||||
export const loadDatasourceSuccess = (
|
export const loadDatasourceSuccess = (exploreId: ExploreId, instance: any): LoadDatasourceSuccessAction => {
|
||||||
exploreId: ExploreId,
|
|
||||||
instance: any,
|
|
||||||
): LoadDatasourceSuccessAction => {
|
|
||||||
// Capabilities
|
// Capabilities
|
||||||
const supportsGraph = instance.meta.metrics;
|
const supportsGraph = instance.meta.metrics;
|
||||||
const supportsLogs = instance.meta.logs;
|
const supportsLogs = instance.meta.logs;
|
||||||
@ -343,8 +358,8 @@ export function loadDatasource(exploreId: ExploreId, instance: DataSourceApi): T
|
|||||||
|
|
||||||
// Keep ID to track selection
|
// Keep ID to track selection
|
||||||
dispatch(loadDatasourcePending(exploreId, datasourceName));
|
dispatch(loadDatasourcePending(exploreId, datasourceName));
|
||||||
|
|
||||||
let datasourceError = null;
|
let datasourceError = null;
|
||||||
|
|
||||||
try {
|
try {
|
||||||
const testResult = await instance.testDatasource();
|
const testResult = await instance.testDatasource();
|
||||||
datasourceError = testResult.status === 'success' ? null : testResult.message;
|
datasourceError = testResult.status === 'success' ? null : testResult.message;
|
||||||
@ -354,7 +369,7 @@ export function loadDatasource(exploreId: ExploreId, instance: DataSourceApi): T
|
|||||||
|
|
||||||
if (datasourceError) {
|
if (datasourceError) {
|
||||||
dispatch(loadDatasourceFailure(exploreId, datasourceError));
|
dispatch(loadDatasourceFailure(exploreId, datasourceError));
|
||||||
return;
|
return Promise.reject(`${datasourceName} loading failed`);
|
||||||
}
|
}
|
||||||
|
|
||||||
if (datasourceName !== getState().explore[exploreId].requestedDatasourceName) {
|
if (datasourceName !== getState().explore[exploreId].requestedDatasourceName) {
|
||||||
@ -372,7 +387,7 @@ export function loadDatasource(exploreId: ExploreId, instance: DataSourceApi): T
|
|||||||
}
|
}
|
||||||
|
|
||||||
dispatch(loadDatasourceSuccess(exploreId, instance));
|
dispatch(loadDatasourceSuccess(exploreId, instance));
|
||||||
dispatch(runQueries(exploreId));
|
return Promise.resolve();
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -572,7 +587,7 @@ export function removeQueryRow(exploreId: ExploreId, index: number): ThunkResult
|
|||||||
/**
|
/**
|
||||||
* Main action to run queries and dispatches sub-actions based on which result viewers are active
|
* Main action to run queries and dispatches sub-actions based on which result viewers are active
|
||||||
*/
|
*/
|
||||||
export function runQueries(exploreId: ExploreId) {
|
export function runQueries(exploreId: ExploreId, ignoreUIState = false) {
|
||||||
return (dispatch, getState) => {
|
return (dispatch, getState) => {
|
||||||
const {
|
const {
|
||||||
datasourceInstance,
|
datasourceInstance,
|
||||||
@ -596,7 +611,7 @@ export function runQueries(exploreId: ExploreId) {
|
|||||||
const interval = datasourceInstance.interval;
|
const interval = datasourceInstance.interval;
|
||||||
|
|
||||||
// Keep table queries first since they need to return quickly
|
// Keep table queries first since they need to return quickly
|
||||||
if (showingTable && supportsTable) {
|
if ((ignoreUIState || showingTable) && supportsTable) {
|
||||||
dispatch(
|
dispatch(
|
||||||
runQueriesForType(
|
runQueriesForType(
|
||||||
exploreId,
|
exploreId,
|
||||||
@ -611,7 +626,7 @@ export function runQueries(exploreId: ExploreId) {
|
|||||||
)
|
)
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
if (showingGraph && supportsGraph) {
|
if ((ignoreUIState || showingGraph) && supportsGraph) {
|
||||||
dispatch(
|
dispatch(
|
||||||
runQueriesForType(
|
runQueriesForType(
|
||||||
exploreId,
|
exploreId,
|
||||||
@ -625,9 +640,10 @@ export function runQueries(exploreId: ExploreId) {
|
|||||||
)
|
)
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
if (showingLogs && supportsLogs) {
|
if ((ignoreUIState || showingLogs) && supportsLogs) {
|
||||||
dispatch(runQueriesForType(exploreId, 'Logs', { interval, format: 'logs' }));
|
dispatch(runQueriesForType(exploreId, 'Logs', { interval, format: 'logs' }));
|
||||||
}
|
}
|
||||||
|
|
||||||
dispatch(stateSave());
|
dispatch(stateSave());
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
@ -766,6 +782,11 @@ export function stateSave() {
|
|||||||
datasource: left.datasourceInstance.name,
|
datasource: left.datasourceInstance.name,
|
||||||
queries: left.modifiedQueries.map(clearQueryKeys),
|
queries: left.modifiedQueries.map(clearQueryKeys),
|
||||||
range: left.range,
|
range: left.range,
|
||||||
|
ui: {
|
||||||
|
showingGraph: left.showingGraph,
|
||||||
|
showingLogs: left.showingLogs,
|
||||||
|
showingTable: left.showingTable,
|
||||||
|
},
|
||||||
};
|
};
|
||||||
urlStates.left = serializeStateToUrlParam(leftUrlState, true);
|
urlStates.left = serializeStateToUrlParam(leftUrlState, true);
|
||||||
if (split) {
|
if (split) {
|
||||||
@ -773,48 +794,64 @@ export function stateSave() {
|
|||||||
datasource: right.datasourceInstance.name,
|
datasource: right.datasourceInstance.name,
|
||||||
queries: right.modifiedQueries.map(clearQueryKeys),
|
queries: right.modifiedQueries.map(clearQueryKeys),
|
||||||
range: right.range,
|
range: right.range,
|
||||||
|
ui: {
|
||||||
|
showingGraph: right.showingGraph,
|
||||||
|
showingLogs: right.showingLogs,
|
||||||
|
showingTable: right.showingTable,
|
||||||
|
},
|
||||||
};
|
};
|
||||||
|
|
||||||
urlStates.right = serializeStateToUrlParam(rightUrlState, true);
|
urlStates.right = serializeStateToUrlParam(rightUrlState, true);
|
||||||
}
|
}
|
||||||
|
|
||||||
dispatch(updateLocation({ query: urlStates }));
|
dispatch(updateLocation({ query: urlStates }));
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Expand/collapse the graph result viewer. When collapsed, graph queries won't be run.
|
* Creates action to collapse graph/logs/table panel. When panel is collapsed,
|
||||||
|
* queries won't be run
|
||||||
*/
|
*/
|
||||||
export function toggleGraph(exploreId: ExploreId): ThunkResult<void> {
|
const togglePanelActionCreator = (type: ActionTypes.ToggleGraph | ActionTypes.ToggleTable | ActionTypes.ToggleLogs) => (
|
||||||
|
exploreId: ExploreId
|
||||||
|
) => {
|
||||||
return (dispatch, getState) => {
|
return (dispatch, getState) => {
|
||||||
dispatch({ type: ActionTypes.ToggleGraph, payload: { exploreId } });
|
let shouldRunQueries;
|
||||||
if (getState().explore[exploreId].showingGraph) {
|
dispatch({ type, payload: { exploreId } });
|
||||||
|
dispatch(stateSave());
|
||||||
|
|
||||||
|
switch (type) {
|
||||||
|
case ActionTypes.ToggleGraph:
|
||||||
|
shouldRunQueries = getState().explore[exploreId].showingGraph;
|
||||||
|
break;
|
||||||
|
case ActionTypes.ToggleLogs:
|
||||||
|
shouldRunQueries = getState().explore[exploreId].showingLogs;
|
||||||
|
break;
|
||||||
|
case ActionTypes.ToggleTable:
|
||||||
|
shouldRunQueries = getState().explore[exploreId].showingTable;
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (shouldRunQueries) {
|
||||||
dispatch(runQueries(exploreId));
|
dispatch(runQueries(exploreId));
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
}
|
};
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Expand/collapse the graph result viewer. When collapsed, graph queries won't be run.
|
||||||
|
*/
|
||||||
|
export const toggleGraph = togglePanelActionCreator(ActionTypes.ToggleGraph);
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Expand/collapse the logs result viewer. When collapsed, log queries won't be run.
|
* Expand/collapse the logs result viewer. When collapsed, log queries won't be run.
|
||||||
*/
|
*/
|
||||||
export function toggleLogs(exploreId: ExploreId): ThunkResult<void> {
|
export const toggleLogs = togglePanelActionCreator(ActionTypes.ToggleLogs);
|
||||||
return (dispatch, getState) => {
|
|
||||||
dispatch({ type: ActionTypes.ToggleLogs, payload: { exploreId } });
|
|
||||||
if (getState().explore[exploreId].showingLogs) {
|
|
||||||
dispatch(runQueries(exploreId));
|
|
||||||
}
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Expand/collapse the table result viewer. When collapsed, table queries won't be run.
|
* Expand/collapse the table result viewer. When collapsed, table queries won't be run.
|
||||||
*/
|
*/
|
||||||
export function toggleTable(exploreId: ExploreId): ThunkResult<void> {
|
export const toggleTable = togglePanelActionCreator(ActionTypes.ToggleTable);
|
||||||
return (dispatch, getState) => {
|
|
||||||
dispatch({ type: ActionTypes.ToggleTable, payload: { exploreId } });
|
|
||||||
if (getState().explore[exploreId].showingTable) {
|
|
||||||
dispatch(runQueries(exploreId));
|
|
||||||
}
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Resets state for explore.
|
* Resets state for explore.
|
||||||
|
@ -163,7 +163,7 @@ export const itemReducer = (state, action: Action): ExploreItemState => {
|
|||||||
}
|
}
|
||||||
|
|
||||||
case ActionTypes.InitializeExplore: {
|
case ActionTypes.InitializeExplore: {
|
||||||
const { containerWidth, eventBridge, exploreDatasources, queries, range } = action.payload;
|
const { containerWidth, eventBridge, exploreDatasources, queries, range, ui } = action.payload;
|
||||||
return {
|
return {
|
||||||
...state,
|
...state,
|
||||||
containerWidth,
|
containerWidth,
|
||||||
@ -173,6 +173,7 @@ export const itemReducer = (state, action: Action): ExploreItemState => {
|
|||||||
initialQueries: queries,
|
initialQueries: queries,
|
||||||
initialized: true,
|
initialized: true,
|
||||||
modifiedQueries: queries.slice(),
|
modifiedQueries: queries.slice(),
|
||||||
|
...ui,
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -249,15 +249,15 @@ class PromQueryField extends React.PureComponent<PromQueryFieldProps, PromQueryF
|
|||||||
|
|
||||||
return (
|
return (
|
||||||
<>
|
<>
|
||||||
<div className="gf-form-inline">
|
<div className="gf-form-inline gf-form-inline--nowrap">
|
||||||
<div className="gf-form">
|
<div className="gf-form flex-shrink-0">
|
||||||
<Cascader options={metricsOptions} onChange={this.onChangeMetrics}>
|
<Cascader options={metricsOptions} onChange={this.onChangeMetrics}>
|
||||||
<button className="gf-form-label gf-form-label--btn" disabled={!syntaxLoaded}>
|
<button className="gf-form-label gf-form-label--btn" disabled={!syntaxLoaded}>
|
||||||
{chooserText} <i className="fa fa-caret-down" />
|
{chooserText} <i className="fa fa-caret-down" />
|
||||||
</button>
|
</button>
|
||||||
</Cascader>
|
</Cascader>
|
||||||
</div>
|
</div>
|
||||||
<div className="gf-form gf-form--grow">
|
<div className="gf-form gf-form--grow flex-shrink-1">
|
||||||
<QueryField
|
<QueryField
|
||||||
additionalPlugins={this.plugins}
|
additionalPlugins={this.plugins}
|
||||||
cleanText={cleanText}
|
cleanText={cleanText}
|
||||||
|
@ -231,10 +231,17 @@ export interface ExploreItemState {
|
|||||||
tableResult?: TableModel;
|
tableResult?: TableModel;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export interface ExploreUIState {
|
||||||
|
showingTable: boolean;
|
||||||
|
showingGraph: boolean;
|
||||||
|
showingLogs: boolean;
|
||||||
|
}
|
||||||
|
|
||||||
export interface ExploreUrlState {
|
export interface ExploreUrlState {
|
||||||
datasource: string;
|
datasource: string;
|
||||||
queries: any[]; // Should be a DataQuery, but we're going to strip refIds, so typing makes less sense
|
queries: any[]; // Should be a DataQuery, but we're going to strip refIds, so typing makes less sense
|
||||||
range: RawTimeRange;
|
range: RawTimeRange;
|
||||||
|
ui: ExploreUIState;
|
||||||
}
|
}
|
||||||
|
|
||||||
export interface HistoryItem<TQuery extends DataQuery = DataQuery> {
|
export interface HistoryItem<TQuery extends DataQuery = DataQuery> {
|
||||||
|
@ -7,7 +7,7 @@
|
|||||||
&.ace_editor {
|
&.ace_editor {
|
||||||
@include font-family-monospace();
|
@include font-family-monospace();
|
||||||
font-size: 1rem;
|
font-size: 1rem;
|
||||||
min-height: 2.6rem;
|
min-height: 3.6rem; // Include space for horizontal scrollbar
|
||||||
|
|
||||||
@include border-radius($input-border-radius-sm);
|
@include border-radius($input-border-radius-sm);
|
||||||
border: $input-btn-border-width solid $input-border-color;
|
border: $input-btn-border-width solid $input-border-color;
|
||||||
|
@ -84,6 +84,10 @@ $input-border: 1px solid $input-border-color;
|
|||||||
.gf-form + .gf-form {
|
.gf-form + .gf-form {
|
||||||
margin-left: $gf-form-margin;
|
margin-left: $gf-form-margin;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
&--nowrap {
|
||||||
|
flex-wrap: nowrap;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
.gf-form-button-row {
|
.gf-form-button-row {
|
||||||
|
@ -4,7 +4,6 @@
|
|||||||
align-items: center;
|
align-items: center;
|
||||||
padding: 3px 20px 3px 20px;
|
padding: 3px 20px 3px 20px;
|
||||||
position: relative;
|
position: relative;
|
||||||
z-index: 1;
|
|
||||||
flex: 0 0 auto;
|
flex: 0 0 auto;
|
||||||
background: $toolbar-bg;
|
background: $toolbar-bg;
|
||||||
border-radius: 3px;
|
border-radius: 3px;
|
||||||
|
@ -83,10 +83,18 @@ button.close {
|
|||||||
position: absolute;
|
position: absolute;
|
||||||
}
|
}
|
||||||
|
|
||||||
.flex-grow {
|
.flex-grow-1 {
|
||||||
flex-grow: 1;
|
flex-grow: 1;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
.flex-shrink-1 {
|
||||||
|
flex-shrink: 1;
|
||||||
|
}
|
||||||
|
|
||||||
|
.flex-shrink-0 {
|
||||||
|
flex-shrink: 0;
|
||||||
|
}
|
||||||
|
|
||||||
.center-vh {
|
.center-vh {
|
||||||
display: flex;
|
display: flex;
|
||||||
align-items: center;
|
align-items: center;
|
||||||
|
Loading…
Reference in New Issue
Block a user