mirror of
https://github.com/grafana/grafana.git
synced 2025-02-25 18:55:37 -06:00
Merge with master
This commit is contained in:
@@ -2,6 +2,7 @@
|
||||
|
||||
### Minor
|
||||
* **Pushover**: Adds support for images in pushover notifier [#10780](https://github.com/grafana/grafana/issues/10780), thx [@jpenalbae](https://github.com/jpenalbae)
|
||||
* **Cloudwatch**: Add AWS/Neptune metrics [#14231](https://github.com/grafana/grafana/issues/14231), thx [@tcpatterson](https://github.com/tcpatterson)
|
||||
|
||||
# 6.0.0-beta1 (2019-01-30)
|
||||
|
||||
|
||||
@@ -242,10 +242,7 @@ func (ss *SqlStore) buildConnectionString() (string, error) {
|
||||
|
||||
cnnstr += ss.buildExtraConnectionString('&')
|
||||
case migrator.POSTGRES:
|
||||
host, port, err := util.SplitIPPort(ss.dbCfg.Host, "5432")
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
host, port := util.SplitHostPortDefault(ss.dbCfg.Host, "127.0.0.1", "5432")
|
||||
if ss.dbCfg.Pwd == "" {
|
||||
ss.dbCfg.Pwd = "''"
|
||||
}
|
||||
|
||||
@@ -95,6 +95,7 @@ func init() {
|
||||
"AWS/Logs": {"IncomingBytes", "IncomingLogEvents", "ForwardedBytes", "ForwardedLogEvents", "DeliveryErrors", "DeliveryThrottling"},
|
||||
"AWS/ML": {"PredictCount", "PredictFailureCount"},
|
||||
"AWS/NATGateway": {"PacketsOutToDestination", "PacketsOutToSource", "PacketsInFromSource", "PacketsInFromDestination", "BytesOutToDestination", "BytesOutToSource", "BytesInFromSource", "BytesInFromDestination", "ErrorPortAllocation", "ActiveConnectionCount", "ConnectionAttemptCount", "ConnectionEstablishedCount", "IdleTimeoutCount", "PacketsDropCount"},
|
||||
"AWS/Neptune": {"CPUUtilization", "ClusterReplicaLag", "ClusterReplicaLagMaximum", "ClusterReplicaLagMinimum", "EngineUptime", "FreeableMemory", "FreeLocalStorage", "GremlinHttp1xx", "GremlinHttp2xx", "GremlinHttp4xx", "GremlinHttp5xx", "GremlinErrors", "GremlinRequests", "GremlinRequestsPerSec", "GremlinWebSocketSuccess", "GremlinWebSocketClientErrors", "GremlinWebSocketServerErrors", "GremlinWebSocketAvailableConnections", "Http1xx", "Http2xx", "Http4xx", "Http5xx", "Http100", "Http101", "Http200", "Http400", "Http403", "Http405", "Http413", "Http429", "Http500", "Http501", "LoaderErrors", "LoaderRequests", "NetworkReceiveThroughput", "NetworkThroughput", "NetworkTransmitThroughput", "SparqlHttp1xx", "SparqlHttp2xx", "SparqlHttp4xx", "SparqlHttp5xx", "SparqlErrors", "SparqlRequests", "SparqlRequestsPerSec", "StatusErrors", "StatusRequests", "VolumeBytesUsed", "VolumeReadIOPs", "VolumeWriteIOPs"},
|
||||
"AWS/NetworkELB": {"ActiveFlowCount", "ConsumedLCUs", "HealthyHostCount", "NewFlowCount", "ProcessedBytes", "TCP_Client_Reset_Count", "TCP_ELB_Reset_Count", "TCP_Target_Reset_Count", "UnHealthyHostCount"},
|
||||
"AWS/OpsWorks": {"cpu_idle", "cpu_nice", "cpu_system", "cpu_user", "cpu_waitio", "load_1", "load_5", "load_15", "memory_buffers", "memory_cached", "memory_free", "memory_swap", "memory_total", "memory_used", "procs"},
|
||||
"AWS/Redshift": {"CPUUtilization", "DatabaseConnections", "HealthStatus", "MaintenanceMode", "NetworkReceiveThroughput", "NetworkTransmitThroughput", "PercentageDiskSpaceUsed", "QueriesCompletedPerSecond", "QueryDuration", "QueryRuntimeBreakdown", "ReadIOPS", "ReadLatency", "ReadThroughput", "WLMQueriesCompletedPerSecond", "WLMQueryDuration", "WLMQueueLength", "WriteIOPS", "WriteLatency", "WriteThroughput"},
|
||||
@@ -149,6 +150,7 @@ func init() {
|
||||
"AWS/Logs": {"LogGroupName", "DestinationType", "FilterName"},
|
||||
"AWS/ML": {"MLModelId", "RequestMode"},
|
||||
"AWS/NATGateway": {"NatGatewayId"},
|
||||
"AWS/Neptune": {"DBClusterIdentifier", "Role", "DatabaseClass", "EngineName"},
|
||||
"AWS/NetworkELB": {"LoadBalancer", "TargetGroup", "AvailabilityZone"},
|
||||
"AWS/OpsWorks": {"StackId", "LayerId", "InstanceId"},
|
||||
"AWS/Redshift": {"NodeID", "ClusterIdentifier", "latency", "service class", "wmlid"},
|
||||
|
||||
@@ -49,10 +49,7 @@ func generateConnectionString(datasource *models.DataSource) (string, error) {
|
||||
}
|
||||
}
|
||||
|
||||
server, port, err := util.SplitIPPort(datasource.Url, "1433")
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
server, port := util.SplitHostPortDefault(datasource.Url, "localhost", "1433")
|
||||
|
||||
encrypt := datasource.JsonData.Get("encrypt").MustString("false")
|
||||
connStr := fmt.Sprintf("server=%s;port=%s;database=%s;user id=%s;password=%s;",
|
||||
|
||||
@@ -1,25 +0,0 @@
|
||||
package util
|
||||
|
||||
import (
|
||||
"net"
|
||||
)
|
||||
|
||||
// SplitIPPort splits the ip string and port.
|
||||
func SplitIPPort(ipStr string, portDefault string) (ip string, port string, err error) {
|
||||
ipAddr := net.ParseIP(ipStr)
|
||||
|
||||
if ipAddr == nil {
|
||||
// Port was included
|
||||
ip, port, err = net.SplitHostPort(ipStr)
|
||||
|
||||
if err != nil {
|
||||
return "", "", err
|
||||
}
|
||||
} else {
|
||||
// No port was included
|
||||
ip = ipAddr.String()
|
||||
port = portDefault
|
||||
}
|
||||
|
||||
return ip, port, nil
|
||||
}
|
||||
@@ -7,23 +7,48 @@ import (
|
||||
|
||||
// ParseIPAddress parses an IP address and removes port and/or IPV6 format
|
||||
func ParseIPAddress(input string) string {
|
||||
s := input
|
||||
lastIndex := strings.LastIndex(input, ":")
|
||||
host, _ := SplitHostPort(input)
|
||||
|
||||
if lastIndex != -1 {
|
||||
if lastIndex > 0 && input[lastIndex-1:lastIndex] != ":" {
|
||||
s = input[:lastIndex]
|
||||
}
|
||||
ip := net.ParseIP(host)
|
||||
|
||||
if ip == nil {
|
||||
return host
|
||||
}
|
||||
|
||||
s = strings.Replace(s, "[", "", -1)
|
||||
s = strings.Replace(s, "]", "", -1)
|
||||
|
||||
ip := net.ParseIP(s)
|
||||
|
||||
if ip.IsLoopback() {
|
||||
return "127.0.0.1"
|
||||
}
|
||||
|
||||
return ip.String()
|
||||
}
|
||||
|
||||
// SplitHostPortDefault splits ip address/hostname string by host and port. Defaults used if no match found
|
||||
func SplitHostPortDefault(input, defaultHost, defaultPort string) (host string, port string) {
|
||||
port = defaultPort
|
||||
s := input
|
||||
lastIndex := strings.LastIndex(input, ":")
|
||||
|
||||
if lastIndex != -1 {
|
||||
if lastIndex > 0 && input[lastIndex-1:lastIndex] != ":" {
|
||||
s = input[:lastIndex]
|
||||
port = input[lastIndex+1:]
|
||||
} else if lastIndex == 0 {
|
||||
s = defaultHost
|
||||
port = input[lastIndex+1:]
|
||||
}
|
||||
} else {
|
||||
port = defaultPort
|
||||
}
|
||||
|
||||
s = strings.Replace(s, "[", "", -1)
|
||||
s = strings.Replace(s, "]", "", -1)
|
||||
port = strings.Replace(port, "[", "", -1)
|
||||
port = strings.Replace(port, "]", "", -1)
|
||||
|
||||
return s, port
|
||||
}
|
||||
|
||||
// SplitHostPort splits ip address/hostname string by host and port
|
||||
func SplitHostPort(input string) (host string, port string) {
|
||||
return SplitHostPortDefault(input, "", "")
|
||||
}
|
||||
|
||||
@@ -9,8 +9,90 @@ import (
|
||||
func TestParseIPAddress(t *testing.T) {
|
||||
Convey("Test parse ip address", t, func() {
|
||||
So(ParseIPAddress("192.168.0.140:456"), ShouldEqual, "192.168.0.140")
|
||||
So(ParseIPAddress("192.168.0.140"), ShouldEqual, "192.168.0.140")
|
||||
So(ParseIPAddress("[::1:456]"), ShouldEqual, "127.0.0.1")
|
||||
So(ParseIPAddress("[::1]"), ShouldEqual, "127.0.0.1")
|
||||
So(ParseIPAddress("192.168.0.140"), ShouldEqual, "192.168.0.140")
|
||||
So(ParseIPAddress("::1"), ShouldEqual, "127.0.0.1")
|
||||
So(ParseIPAddress("::1:123"), ShouldEqual, "127.0.0.1")
|
||||
})
|
||||
}
|
||||
|
||||
func TestSplitHostPortDefault(t *testing.T) {
|
||||
Convey("Test split ip address to host and port", t, func() {
|
||||
host, port := SplitHostPortDefault("192.168.0.140:456", "", "")
|
||||
So(host, ShouldEqual, "192.168.0.140")
|
||||
So(port, ShouldEqual, "456")
|
||||
|
||||
host, port = SplitHostPortDefault("192.168.0.140", "", "123")
|
||||
So(host, ShouldEqual, "192.168.0.140")
|
||||
So(port, ShouldEqual, "123")
|
||||
|
||||
host, port = SplitHostPortDefault("[::1:456]", "", "")
|
||||
So(host, ShouldEqual, "::1")
|
||||
So(port, ShouldEqual, "456")
|
||||
|
||||
host, port = SplitHostPortDefault("[::1]", "", "123")
|
||||
So(host, ShouldEqual, "::1")
|
||||
So(port, ShouldEqual, "123")
|
||||
|
||||
host, port = SplitHostPortDefault("::1:123", "", "")
|
||||
So(host, ShouldEqual, "::1")
|
||||
So(port, ShouldEqual, "123")
|
||||
|
||||
host, port = SplitHostPortDefault("::1", "", "123")
|
||||
So(host, ShouldEqual, "::1")
|
||||
So(port, ShouldEqual, "123")
|
||||
|
||||
host, port = SplitHostPortDefault(":456", "1.2.3.4", "")
|
||||
So(host, ShouldEqual, "1.2.3.4")
|
||||
So(port, ShouldEqual, "456")
|
||||
|
||||
host, port = SplitHostPortDefault("xyz.rds.amazonaws.com", "", "123")
|
||||
So(host, ShouldEqual, "xyz.rds.amazonaws.com")
|
||||
So(port, ShouldEqual, "123")
|
||||
|
||||
host, port = SplitHostPortDefault("xyz.rds.amazonaws.com:123", "", "")
|
||||
So(host, ShouldEqual, "xyz.rds.amazonaws.com")
|
||||
So(port, ShouldEqual, "123")
|
||||
})
|
||||
}
|
||||
|
||||
func TestSplitHostPort(t *testing.T) {
|
||||
Convey("Test split ip address to host and port", t, func() {
|
||||
host, port := SplitHostPort("192.168.0.140:456")
|
||||
So(host, ShouldEqual, "192.168.0.140")
|
||||
So(port, ShouldEqual, "456")
|
||||
|
||||
host, port = SplitHostPort("192.168.0.140")
|
||||
So(host, ShouldEqual, "192.168.0.140")
|
||||
So(port, ShouldEqual, "")
|
||||
|
||||
host, port = SplitHostPort("[::1:456]")
|
||||
So(host, ShouldEqual, "::1")
|
||||
So(port, ShouldEqual, "456")
|
||||
|
||||
host, port = SplitHostPort("[::1]")
|
||||
So(host, ShouldEqual, "::1")
|
||||
So(port, ShouldEqual, "")
|
||||
|
||||
host, port = SplitHostPort("::1:123")
|
||||
So(host, ShouldEqual, "::1")
|
||||
So(port, ShouldEqual, "123")
|
||||
|
||||
host, port = SplitHostPort("::1")
|
||||
So(host, ShouldEqual, "::1")
|
||||
So(port, ShouldEqual, "")
|
||||
|
||||
host, port = SplitHostPort(":456")
|
||||
So(host, ShouldEqual, "")
|
||||
So(port, ShouldEqual, "456")
|
||||
|
||||
host, port = SplitHostPort("xyz.rds.amazonaws.com")
|
||||
So(host, ShouldEqual, "xyz.rds.amazonaws.com")
|
||||
So(port, ShouldEqual, "")
|
||||
|
||||
host, port = SplitHostPort("xyz.rds.amazonaws.com:123")
|
||||
So(host, ShouldEqual, "xyz.rds.amazonaws.com")
|
||||
So(port, ShouldEqual, "123")
|
||||
})
|
||||
}
|
||||
|
||||
@@ -1,43 +0,0 @@
|
||||
package util
|
||||
|
||||
import (
|
||||
"testing"
|
||||
|
||||
. "github.com/smartystreets/goconvey/convey"
|
||||
)
|
||||
|
||||
func TestSplitIPPort(t *testing.T) {
|
||||
|
||||
Convey("When parsing an IPv4 without explicit port", t, func() {
|
||||
ip, port, err := SplitIPPort("1.2.3.4", "5678")
|
||||
|
||||
So(err, ShouldEqual, nil)
|
||||
So(ip, ShouldEqual, "1.2.3.4")
|
||||
So(port, ShouldEqual, "5678")
|
||||
})
|
||||
|
||||
Convey("When parsing an IPv6 without explicit port", t, func() {
|
||||
ip, port, err := SplitIPPort("::1", "5678")
|
||||
|
||||
So(err, ShouldEqual, nil)
|
||||
So(ip, ShouldEqual, "::1")
|
||||
So(port, ShouldEqual, "5678")
|
||||
})
|
||||
|
||||
Convey("When parsing an IPv4 with explicit port", t, func() {
|
||||
ip, port, err := SplitIPPort("1.2.3.4:56", "78")
|
||||
|
||||
So(err, ShouldEqual, nil)
|
||||
So(ip, ShouldEqual, "1.2.3.4")
|
||||
So(port, ShouldEqual, "56")
|
||||
})
|
||||
|
||||
Convey("When parsing an IPv6 with explicit port", t, func() {
|
||||
ip, port, err := SplitIPPort("[::1]:56", "78")
|
||||
|
||||
So(err, ShouldEqual, nil)
|
||||
So(ip, ShouldEqual, "::1")
|
||||
So(port, ShouldEqual, "56")
|
||||
})
|
||||
|
||||
}
|
||||
@@ -13,6 +13,11 @@ const DEFAULT_EXPLORE_STATE: ExploreUrlState = {
|
||||
datasource: null,
|
||||
queries: [],
|
||||
range: DEFAULT_RANGE,
|
||||
ui: {
|
||||
showingGraph: true,
|
||||
showingTable: true,
|
||||
showingLogs: true,
|
||||
}
|
||||
};
|
||||
|
||||
describe('state functions', () => {
|
||||
@@ -69,9 +74,11 @@ describe('state functions', () => {
|
||||
to: 'now',
|
||||
},
|
||||
};
|
||||
|
||||
expect(serializeStateToUrlParam(state)).toBe(
|
||||
'{"datasource":"foo","queries":[{"expr":"metric{test=\\"a/b\\"}"},' +
|
||||
'{"expr":"super{foo=\\"x/z\\"}"}],"range":{"from":"now-5h","to":"now"}}'
|
||||
'{"expr":"super{foo=\\"x/z\\"}"}],"range":{"from":"now-5h","to":"now"},' +
|
||||
'"ui":{"showingGraph":true,"showingTable":true,"showingLogs":true}}'
|
||||
);
|
||||
});
|
||||
|
||||
@@ -93,7 +100,7 @@ describe('state functions', () => {
|
||||
},
|
||||
};
|
||||
expect(serializeStateToUrlParam(state, true)).toBe(
|
||||
'["now-5h","now","foo",{"expr":"metric{test=\\"a/b\\"}"},{"expr":"super{foo=\\"x/z\\"}"}]'
|
||||
'["now-5h","now","foo",{"expr":"metric{test=\\"a/b\\"}"},{"expr":"super{foo=\\"x/z\\"}"},{"ui":[true,true,true]}]'
|
||||
);
|
||||
});
|
||||
});
|
||||
@@ -118,7 +125,28 @@ describe('state functions', () => {
|
||||
};
|
||||
const serialized = serializeStateToUrlParam(state);
|
||||
const parsed = parseUrlState(serialized);
|
||||
expect(state).toMatchObject(parsed);
|
||||
});
|
||||
|
||||
it('can parse the compact serialized state into the original state', () => {
|
||||
const state = {
|
||||
...DEFAULT_EXPLORE_STATE,
|
||||
datasource: 'foo',
|
||||
queries: [
|
||||
{
|
||||
expr: 'metric{test="a/b"}',
|
||||
},
|
||||
{
|
||||
expr: 'super{foo="x/z"}',
|
||||
},
|
||||
],
|
||||
range: {
|
||||
from: 'now - 5h',
|
||||
to: 'now',
|
||||
},
|
||||
};
|
||||
const serialized = serializeStateToUrlParam(state, true);
|
||||
const parsed = parseUrlState(serialized);
|
||||
expect(state).toMatchObject(parsed);
|
||||
});
|
||||
});
|
||||
|
||||
@@ -27,6 +27,12 @@ export const DEFAULT_RANGE = {
|
||||
to: 'now',
|
||||
};
|
||||
|
||||
export const DEFAULT_UI_STATE = {
|
||||
showingTable: true,
|
||||
showingGraph: true,
|
||||
showingLogs: true,
|
||||
};
|
||||
|
||||
const MAX_HISTORY_ITEMS = 100;
|
||||
|
||||
export const LAST_USED_DATASOURCE_KEY = 'grafana.explore.datasource';
|
||||
@@ -147,7 +153,12 @@ export function buildQueryTransaction(
|
||||
|
||||
export const clearQueryKeys: ((query: DataQuery) => object) = ({ key, refId, ...rest }) => rest;
|
||||
|
||||
const isMetricSegment = (segment: { [key: string]: string }) => segment.hasOwnProperty('expr');
|
||||
const isUISegment = (segment: { [key: string]: string }) => segment.hasOwnProperty('ui');
|
||||
|
||||
export function parseUrlState(initial: string | undefined): ExploreUrlState {
|
||||
let uiState = DEFAULT_UI_STATE;
|
||||
|
||||
if (initial) {
|
||||
try {
|
||||
const parsed = JSON.parse(decodeURI(initial));
|
||||
@@ -160,20 +171,41 @@ export function parseUrlState(initial: string | undefined): ExploreUrlState {
|
||||
to: parsed[1],
|
||||
};
|
||||
const datasource = parsed[2];
|
||||
const queries = parsed.slice(3);
|
||||
return { datasource, queries, range };
|
||||
let queries = [];
|
||||
|
||||
parsed.slice(3).forEach(segment => {
|
||||
if (isMetricSegment(segment)) {
|
||||
queries = [...queries, segment];
|
||||
}
|
||||
|
||||
if (isUISegment(segment)) {
|
||||
uiState = {
|
||||
showingGraph: segment.ui[0],
|
||||
showingLogs: segment.ui[1],
|
||||
showingTable: segment.ui[2],
|
||||
};
|
||||
}
|
||||
});
|
||||
|
||||
return { datasource, queries, range, ui: uiState };
|
||||
}
|
||||
return parsed;
|
||||
} catch (e) {
|
||||
console.error(e);
|
||||
}
|
||||
}
|
||||
return { datasource: null, queries: [], range: DEFAULT_RANGE };
|
||||
return { datasource: null, queries: [], range: DEFAULT_RANGE, ui: uiState };
|
||||
}
|
||||
|
||||
export function serializeStateToUrlParam(urlState: ExploreUrlState, compact?: boolean): string {
|
||||
if (compact) {
|
||||
return JSON.stringify([urlState.range.from, urlState.range.to, urlState.datasource, ...urlState.queries]);
|
||||
return JSON.stringify([
|
||||
urlState.range.from,
|
||||
urlState.range.to,
|
||||
urlState.datasource,
|
||||
...urlState.queries,
|
||||
{ ui: [!!urlState.ui.showingGraph, !!urlState.ui.showingLogs, !!urlState.ui.showingTable] },
|
||||
]);
|
||||
}
|
||||
return JSON.stringify(urlState);
|
||||
}
|
||||
|
||||
@@ -1,2 +1,3 @@
|
||||
export { SaveDashboardAsModalCtrl } from './SaveDashboardAsModalCtrl';
|
||||
export { SaveDashboardModalCtrl } from './SaveDashboardModalCtrl';
|
||||
export { SaveProvisionedDashboardModalCtrl } from './SaveProvisionedDashboardModalCtrl';
|
||||
|
||||
@@ -24,7 +24,7 @@ import { changeSize, changeTime, initializeExplore, modifyQueries, scanStart, se
|
||||
import { RawTimeRange, TimeRange, DataQuery, ExploreStartPageProps } from '@grafana/ui';
|
||||
import { ExploreItemState, ExploreUrlState, RangeScanner, ExploreId } from 'app/types/explore';
|
||||
import { StoreState } from 'app/types';
|
||||
import { LAST_USED_DATASOURCE_KEY, ensureQueries, DEFAULT_RANGE } from 'app/core/utils/explore';
|
||||
import { LAST_USED_DATASOURCE_KEY, ensureQueries, DEFAULT_RANGE, DEFAULT_UI_STATE } from 'app/core/utils/explore';
|
||||
import { Emitter } from 'app/core/utils/emitter';
|
||||
import { ExploreToolbar } from './ExploreToolbar';
|
||||
import { scanStopAction } from './state/actionTypes';
|
||||
@@ -100,18 +100,20 @@ export class Explore extends React.PureComponent<ExploreProps> {
|
||||
// Don't initialize on split, but need to initialize urlparameters when present
|
||||
if (!initialized) {
|
||||
// Load URL state and parse range
|
||||
const { datasource, queries, range = DEFAULT_RANGE } = (urlState || {}) as ExploreUrlState;
|
||||
const { datasource, queries, range = DEFAULT_RANGE, ui = DEFAULT_UI_STATE } = (urlState || {}) as ExploreUrlState;
|
||||
const initialDatasource = datasource || store.get(LAST_USED_DATASOURCE_KEY);
|
||||
const initialQueries: DataQuery[] = ensureQueries(queries);
|
||||
const initialRange = { from: parseTime(range.from), to: parseTime(range.to) };
|
||||
const width = this.el ? this.el.offsetWidth : 0;
|
||||
|
||||
this.props.initializeExplore(
|
||||
exploreId,
|
||||
initialDatasource,
|
||||
initialQueries,
|
||||
initialRange,
|
||||
width,
|
||||
this.exploreEvents
|
||||
this.exploreEvents,
|
||||
ui
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -15,6 +15,7 @@ import {
|
||||
RangeScanner,
|
||||
ResultType,
|
||||
QueryTransaction,
|
||||
ExploreUIState,
|
||||
} from 'app/types/explore';
|
||||
import { actionCreatorFactory, noPayloadActionCreatorFactory, ActionOf } from 'app/core/redux/actionCreatorFactory';
|
||||
|
||||
@@ -93,6 +94,7 @@ export interface InitializeExplorePayload {
|
||||
exploreDatasources: DataSourceSelectItem[];
|
||||
queries: DataQuery[];
|
||||
range: RawTimeRange;
|
||||
ui: ExploreUIState;
|
||||
}
|
||||
|
||||
export interface LoadDatasourceFailurePayload {
|
||||
|
||||
@@ -32,7 +32,7 @@ import {
|
||||
QueryHint,
|
||||
QueryFixAction,
|
||||
} from '@grafana/ui/src/types';
|
||||
import { ExploreId, ExploreUrlState, RangeScanner, ResultType, QueryOptions } from 'app/types/explore';
|
||||
import { ExploreId, ExploreUrlState, RangeScanner, ResultType, QueryOptions, ExploreUIState } from 'app/types/explore';
|
||||
import {
|
||||
Action,
|
||||
updateDatasourceInstanceAction,
|
||||
@@ -59,13 +59,16 @@ import {
|
||||
setQueriesAction,
|
||||
splitCloseAction,
|
||||
splitOpenAction,
|
||||
addQueryRowAction,
|
||||
AddQueryRowPayload,
|
||||
toggleGraphAction,
|
||||
toggleLogsAction,
|
||||
toggleTableAction,
|
||||
addQueryRowAction,
|
||||
AddQueryRowPayload,
|
||||
ToggleGraphPayload,
|
||||
ToggleLogsPayload,
|
||||
ToggleTablePayload,
|
||||
} from './actionTypes';
|
||||
import { ActionOf } from 'app/core/redux/actionCreatorFactory';
|
||||
import { ActionOf, ActionCreator } from 'app/core/redux/actionCreatorFactory';
|
||||
|
||||
type ThunkResult<R> = ThunkAction<R, StoreState, undefined, Action>;
|
||||
|
||||
@@ -89,7 +92,15 @@ export function changeDatasource(exploreId: ExploreId, datasource: string): Thun
|
||||
await dispatch(importQueries(exploreId, queries, currentDataSourceInstance, newDataSourceInstance));
|
||||
|
||||
dispatch(updateDatasourceInstanceAction({ exploreId, datasourceInstance: newDataSourceInstance }));
|
||||
dispatch(loadDatasource(exploreId, newDataSourceInstance));
|
||||
|
||||
try {
|
||||
await dispatch(loadDatasource(exploreId, newDataSourceInstance));
|
||||
} catch (error) {
|
||||
console.error(error);
|
||||
return;
|
||||
}
|
||||
|
||||
dispatch(runQueries(exploreId));
|
||||
};
|
||||
}
|
||||
|
||||
@@ -158,7 +169,8 @@ export function initializeExplore(
|
||||
queries: DataQuery[],
|
||||
range: RawTimeRange,
|
||||
containerWidth: number,
|
||||
eventBridge: Emitter
|
||||
eventBridge: Emitter,
|
||||
ui: ExploreUIState
|
||||
): ThunkResult<void> {
|
||||
return async dispatch => {
|
||||
const exploreDatasources: DataSourceSelectItem[] = getDatasourceSrv()
|
||||
@@ -177,6 +189,7 @@ export function initializeExplore(
|
||||
exploreDatasources,
|
||||
queries,
|
||||
range,
|
||||
ui,
|
||||
})
|
||||
);
|
||||
|
||||
@@ -196,7 +209,14 @@ export function initializeExplore(
|
||||
}
|
||||
|
||||
dispatch(updateDatasourceInstanceAction({ exploreId, datasourceInstance: instance }));
|
||||
dispatch(loadDatasource(exploreId, instance));
|
||||
|
||||
try {
|
||||
await dispatch(loadDatasource(exploreId, instance));
|
||||
} catch (error) {
|
||||
console.error(error);
|
||||
return;
|
||||
}
|
||||
dispatch(runQueries(exploreId, true));
|
||||
} else {
|
||||
dispatch(loadDatasourceMissingAction({ exploreId }));
|
||||
}
|
||||
@@ -271,8 +291,8 @@ export function loadDatasource(exploreId: ExploreId, instance: DataSourceApi): T
|
||||
|
||||
// Keep ID to track selection
|
||||
dispatch(loadDatasourcePendingAction({ exploreId, requestedDatasourceName: datasourceName }));
|
||||
|
||||
let datasourceError = null;
|
||||
|
||||
try {
|
||||
const testResult = await instance.testDatasource();
|
||||
datasourceError = testResult.status === 'success' ? null : testResult.message;
|
||||
@@ -282,7 +302,7 @@ export function loadDatasource(exploreId: ExploreId, instance: DataSourceApi): T
|
||||
|
||||
if (datasourceError) {
|
||||
dispatch(loadDatasourceFailureAction({ exploreId, error: datasourceError }));
|
||||
return;
|
||||
return Promise.reject(`${datasourceName} loading failed`);
|
||||
}
|
||||
|
||||
if (datasourceName !== getState().explore[exploreId].requestedDatasourceName) {
|
||||
@@ -300,7 +320,7 @@ export function loadDatasource(exploreId: ExploreId, instance: DataSourceApi): T
|
||||
}
|
||||
|
||||
dispatch(loadDatasourceSuccess(exploreId, instance));
|
||||
dispatch(runQueries(exploreId));
|
||||
return Promise.resolve();
|
||||
};
|
||||
}
|
||||
|
||||
@@ -470,7 +490,7 @@ export function queryTransactionSuccess(
|
||||
/**
|
||||
* Main action to run queries and dispatches sub-actions based on which result viewers are active
|
||||
*/
|
||||
export function runQueries(exploreId: ExploreId) {
|
||||
export function runQueries(exploreId: ExploreId, ignoreUIState = false) {
|
||||
return (dispatch, getState) => {
|
||||
const {
|
||||
datasourceInstance,
|
||||
@@ -494,7 +514,7 @@ export function runQueries(exploreId: ExploreId) {
|
||||
const interval = datasourceInstance.interval;
|
||||
|
||||
// Keep table queries first since they need to return quickly
|
||||
if (showingTable && supportsTable) {
|
||||
if ((ignoreUIState || showingTable) && supportsTable) {
|
||||
dispatch(
|
||||
runQueriesForType(
|
||||
exploreId,
|
||||
@@ -509,7 +529,7 @@ export function runQueries(exploreId: ExploreId) {
|
||||
)
|
||||
);
|
||||
}
|
||||
if (showingGraph && supportsGraph) {
|
||||
if ((ignoreUIState || showingGraph) && supportsGraph) {
|
||||
dispatch(
|
||||
runQueriesForType(
|
||||
exploreId,
|
||||
@@ -523,9 +543,10 @@ export function runQueries(exploreId: ExploreId) {
|
||||
)
|
||||
);
|
||||
}
|
||||
if (showingLogs && supportsLogs) {
|
||||
if ((ignoreUIState || showingLogs) && supportsLogs) {
|
||||
dispatch(runQueriesForType(exploreId, 'Logs', { interval, format: 'logs' }));
|
||||
}
|
||||
|
||||
dispatch(stateSave());
|
||||
};
|
||||
}
|
||||
@@ -651,6 +672,11 @@ export function stateSave() {
|
||||
datasource: left.datasourceInstance.name,
|
||||
queries: left.initialQueries.map(clearQueryKeys),
|
||||
range: left.range,
|
||||
ui: {
|
||||
showingGraph: left.showingGraph,
|
||||
showingLogs: left.showingLogs,
|
||||
showingTable: left.showingTable,
|
||||
},
|
||||
};
|
||||
urlStates.left = serializeStateToUrlParam(leftUrlState, true);
|
||||
if (split) {
|
||||
@@ -658,45 +684,64 @@ export function stateSave() {
|
||||
datasource: right.datasourceInstance.name,
|
||||
queries: right.initialQueries.map(clearQueryKeys),
|
||||
range: right.range,
|
||||
ui: {
|
||||
showingGraph: right.showingGraph,
|
||||
showingLogs: right.showingLogs,
|
||||
showingTable: right.showingTable,
|
||||
},
|
||||
};
|
||||
|
||||
urlStates.right = serializeStateToUrlParam(rightUrlState, true);
|
||||
}
|
||||
|
||||
dispatch(updateLocation({ query: urlStates }));
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Expand/collapse the graph result viewer. When collapsed, graph queries won't be run.
|
||||
* Creates action to collapse graph/logs/table panel. When panel is collapsed,
|
||||
* queries won't be run
|
||||
*/
|
||||
export function toggleGraph(exploreId: ExploreId): ThunkResult<void> {
|
||||
const togglePanelActionCreator = (
|
||||
actionCreator:
|
||||
| ActionCreator<ToggleGraphPayload>
|
||||
| ActionCreator<ToggleLogsPayload>
|
||||
| ActionCreator<ToggleTablePayload>
|
||||
) => (exploreId: ExploreId) => {
|
||||
return (dispatch, getState) => {
|
||||
dispatch(toggleGraphAction({ exploreId }));
|
||||
if (getState().explore[exploreId].showingGraph) {
|
||||
let shouldRunQueries;
|
||||
dispatch(actionCreator);
|
||||
dispatch(stateSave());
|
||||
|
||||
switch (actionCreator.type) {
|
||||
case toggleGraphAction.type:
|
||||
shouldRunQueries = getState().explore[exploreId].showingGraph;
|
||||
break;
|
||||
case toggleLogsAction.type:
|
||||
shouldRunQueries = getState().explore[exploreId].showingLogs;
|
||||
break;
|
||||
case toggleTableAction.type:
|
||||
shouldRunQueries = getState().explore[exploreId].showingTable;
|
||||
break;
|
||||
}
|
||||
|
||||
if (shouldRunQueries) {
|
||||
dispatch(runQueries(exploreId));
|
||||
}
|
||||
};
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Expand/collapse the graph result viewer. When collapsed, graph queries won't be run.
|
||||
*/
|
||||
export const toggleGraph = togglePanelActionCreator(toggleGraphAction);
|
||||
|
||||
/**
|
||||
* Expand/collapse the logs result viewer. When collapsed, log queries won't be run.
|
||||
*/
|
||||
export function toggleLogs(exploreId: ExploreId): ThunkResult<void> {
|
||||
return (dispatch, getState) => {
|
||||
dispatch(toggleLogsAction({ exploreId }));
|
||||
if (getState().explore[exploreId].showingLogs) {
|
||||
dispatch(runQueries(exploreId));
|
||||
}
|
||||
};
|
||||
}
|
||||
export const toggleLogs = togglePanelActionCreator(toggleLogsAction);
|
||||
|
||||
/**
|
||||
* Expand/collapse the table result viewer. When collapsed, table queries won't be run.
|
||||
*/
|
||||
export function toggleTable(exploreId: ExploreId): ThunkResult<void> {
|
||||
return (dispatch, getState) => {
|
||||
dispatch(toggleTableAction({ exploreId }));
|
||||
if (getState().explore[exploreId].showingTable) {
|
||||
dispatch(runQueries(exploreId));
|
||||
}
|
||||
};
|
||||
}
|
||||
export const toggleTable = togglePanelActionCreator(toggleTableAction);
|
||||
|
||||
@@ -179,7 +179,7 @@ export const itemReducer = reducerFactory<ExploreItemState>({} as ExploreItemSta
|
||||
.addMapper({
|
||||
filter: initializeExploreAction,
|
||||
mapper: (state, action): ExploreItemState => {
|
||||
const { containerWidth, eventBridge, exploreDatasources, queries, range } = action.payload;
|
||||
const { containerWidth, eventBridge, exploreDatasources, queries, range, ui } = action.payload;
|
||||
return {
|
||||
...state,
|
||||
containerWidth,
|
||||
@@ -189,6 +189,7 @@ export const itemReducer = reducerFactory<ExploreItemState>({} as ExploreItemSta
|
||||
initialQueries: queries,
|
||||
initialized: true,
|
||||
queryKeys: getQueryKeys(queries, state.datasourceInstance),
|
||||
...ui,
|
||||
};
|
||||
},
|
||||
})
|
||||
|
||||
@@ -239,10 +239,17 @@ export interface ExploreItemState {
|
||||
queryKeys: string[];
|
||||
}
|
||||
|
||||
export interface ExploreUIState {
|
||||
showingTable: boolean;
|
||||
showingGraph: boolean;
|
||||
showingLogs: boolean;
|
||||
}
|
||||
|
||||
export interface ExploreUrlState {
|
||||
datasource: string;
|
||||
queries: any[]; // Should be a DataQuery, but we're going to strip refIds, so typing makes less sense
|
||||
range: RawTimeRange;
|
||||
ui: ExploreUIState;
|
||||
}
|
||||
|
||||
export interface HistoryItem<TQuery extends DataQuery = DataQuery> {
|
||||
|
||||
Reference in New Issue
Block a user