diff --git a/CHANGELOG.md b/CHANGELOG.md index 055aa9316fe..3e6049d5dc2 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -5,6 +5,7 @@ * **SingleStat**: Add seriename as option in singlestat panel, closes [#4740](https://github.com/grafana/grafana/issues/4740) * **Localization**: Week start day now dependant on browser locale setting, closes [#3003](https://github.com/grafana/grafana/issues/3003) * **Templating**: Update panel repeats for variables that change on time refresh, closes [#5021](https://github.com/grafana/grafana/issues/5021) +* **Templating**: Add support for numeric and alphabetical sorting of variable values, closes [#2839](https://github.com/grafana/grafana/issues/2839) * **Elasticsearch**: Support to set Precision Threshold for Unique Count metric, closes [#4689](https://github.com/grafana/grafana/issues/4689) * **Navigation**: Add search to org swithcer, closes [#2609](https://github.com/grafana/grafana/issues/2609) * **Database**: Allow database config using one propertie, closes [#5456](https://github.com/grafana/grafana/pull/5456) @@ -15,6 +16,9 @@ ### Breaking changes * **SystemD**: Change systemd description, closes [#5971](https://github.com/grafana/grafana/pull/5971) +### Bugfixes +* **Table Panel**: Fixed problem when switching to Mixed datasource in metrics tab, fixes [#5999](https://github.com/grafana/grafana/pull/5999) + # 3.1.2 (unreleased) * **Templating**: Fixed issue when combining row & panel repeats, fixes [#5790](https://github.com/grafana/grafana/issues/5790) * **Drag&Drop**: Fixed issue with drag and drop in latest Chrome(51+), fixes [#5767](https://github.com/grafana/grafana/issues/5767) diff --git a/conf/ldap.toml b/conf/ldap.toml index 395179e219f..812cd582c91 100644 --- a/conf/ldap.toml +++ b/conf/ldap.toml @@ -8,6 +8,8 @@ host = "127.0.0.1" port = 389 # Set to true if ldap server supports TLS use_ssl = false +# Set to true if connect ldap server with STARTTLS pattern (create connection in insecure, then upgrade to secure connection with TLS) +start_tls = false # set to true if you want to skip ssl cert validation ssl_skip_verify = false # set to the path to your root CA certificate or leave unset to use system defaults diff --git a/docs/mkdocs.yml b/docs/mkdocs.yml index e4a528dcdbd..e8a397328a1 100644 --- a/docs/mkdocs.yml +++ b/docs/mkdocs.yml @@ -42,6 +42,7 @@ pages: - ['installation/performance.md', 'Installation', 'Performance Tips'] - ['installation/troubleshooting.md', 'Installation', 'Troubleshooting'] - ['installation/migrating_to2.md', 'Installation', 'Migrating from v1.x to v2.x'] +- ['installation/behind_proxy.md', 'Installation', 'Grafana behind reverse proxy'] - ['guides/basic_concepts.md', 'User Guides', 'Basic Concepts'] - ['guides/gettingstarted.md', 'User Guides', 'Getting Started'] diff --git a/docs/sources/installation/behind_proxy.md b/docs/sources/installation/behind_proxy.md new file mode 100644 index 00000000000..0cf2507b404 --- /dev/null +++ b/docs/sources/installation/behind_proxy.md @@ -0,0 +1,64 @@ +--- +page_title: Running Grafana behind a reverse proxy +page_description: Guide for running Grafana behind a reverse proxy +page_keywords: Grafana, reverse proxy, nginx, haproxy +--- + +# Running Grafana behind a reverse proxy + +It should be straight forward to get Grafana up and running behind a reverse proxy. But here are some things that you might run into. + +Links and redirects will not be rendered correctly unless you set the server.domain setting. +``` +[server] +domain = foo.bar +``` + +To use sub *path* ex `http://foo.bar/grafana` make sure to include `/grafana` in the end of root_url. +Otherwise Grafana will not behave correctly. See example below. + +# Examples +Here are some example configurations for running Grafana behind a reverse proxy. + +## Grafana configuration (ex http://foo.bar.com) +``` +[server] +domain = foo.bar +``` + +## Nginx configuration +``` +server { + listen 80; + root /usr/share/nginx/www; + index index.html index.htm; + + location / { + proxy_pass http://localhost:3000/; + } +} +``` + +# Examples with **sub path** (ex http://foo.bar.com/grafana) + +## Grafana configuration with sub path +``` +[server] +domain = foo.bar +root_url = %(protocol)s://%(domain)s:/grafana +``` + +## Nginx configuration with sub path +``` +server { + listen 80; + root /usr/share/nginx/www; + index index.html index.htm; + + location /grafana/ { + proxy_pass http://localhost:3000/; + } +} +``` + + diff --git a/docs/sources/installation/ldap.md b/docs/sources/installation/ldap.md index a5311fb4fa5..8002c045d82 100644 --- a/docs/sources/installation/ldap.md +++ b/docs/sources/installation/ldap.md @@ -27,6 +27,8 @@ host = "127.0.0.1" port = 389 # Set to true if ldap server supports TLS use_ssl = false +# Set to true if connect ldap server with STARTTLS pattern (create connection in insecure, then upgrade to secure connection with TLS) +start_tls = false # set to true if you want to skip ssl cert validation ssl_skip_verify = false # set to the path to your root CA certificate or leave unset to use system defaults diff --git a/pkg/api/cloudwatch/metrics.go b/pkg/api/cloudwatch/metrics.go index 4721924cff4..4d5fa1f52db 100644 --- a/pkg/api/cloudwatch/metrics.go +++ b/pkg/api/cloudwatch/metrics.go @@ -67,25 +67,29 @@ func init() { "CoreNodesRunning", "CoreNodesPending", "LiveDataNodes", "MRTotalNodes", "MRActiveNodes", "MRLostNodes", "MRUnhealthyNodes", "MRDecommissionedNodes", "MRRebootedNodes", "S3BytesWritten", "S3BytesRead", "HDFSUtilization", "HDFSBytesRead", "HDFSBytesWritten", "MissingBlocks", "CorruptBlocks", "TotalLoad", "MemoryTotalMB", "MemoryReservedMB", "MemoryAvailableMB", "MemoryAllocatedMB", "PendingDeletionBlocks", "UnderReplicatedBlocks", "DfsPendingReplicationBlocks", "CapacityRemainingGB", "HbaseBackupFailed", "MostRecentBackupDuration", "TimeSinceLastSuccessfulBackup"}, - "AWS/ES": {"ClusterStatus.green", "ClusterStatus.yellow", "ClusterStatus.red", "Nodes", "SearchableDocuments", "DeletedDocuments", "CPUUtilization", "FreeStorageSpace", "JVMMemoryPressure", "AutomatedSnapshotFailure", "MasterCPUUtilization", "MasterFreeStorageSpace", "MasterJVMMemoryPressure", "ReadLatency", "WriteLatency", "ReadThroughput", "WriteThroughput", "DiskQueueLength", "ReadIOPS", "WriteIOPS"}, - "AWS/Events": {"Invocations", "FailedInvocations", "TriggeredRules", "MatchedEvents", "ThrottledRules"}, - "AWS/Kinesis": {"GetRecords.Bytes", "GetRecords.IteratorAge", "GetRecords.IteratorAgeMilliseconds", "GetRecords.Latency", "GetRecords.Records", "GetRecords.Success", "IncomingBytes", "IncomingRecords", "PutRecord.Bytes", "PutRecord.Latency", "PutRecord.Success", "PutRecords.Bytes", "PutRecords.Latency", "PutRecords.Records", "PutRecords.Success", "ReadProvisionedThroughputExceeded", "WriteProvisionedThroughputExceeded", "IteratorAgeMilliseconds", "OutgoingBytes", "OutgoingRecords"}, - "AWS/Lambda": {"Invocations", "Errors", "Duration", "Throttles"}, - "AWS/Logs": {"IncomingBytes", "IncomingLogEvents", "ForwardedBytes", "ForwardedLogEvents", "DeliveryErrors", "DeliveryThrottling"}, - "AWS/ML": {"PredictCount", "PredictFailureCount"}, - "AWS/OpsWorks": {"cpu_idle", "cpu_nice", "cpu_system", "cpu_user", "cpu_waitio", "load_1", "load_5", "load_15", "memory_buffers", "memory_cached", "memory_free", "memory_swap", "memory_total", "memory_used", "procs"}, - "AWS/Redshift": {"CPUUtilization", "DatabaseConnections", "HealthStatus", "MaintenanceMode", "NetworkReceiveThroughput", "NetworkTransmitThroughput", "PercentageDiskSpaceUsed", "ReadIOPS", "ReadLatency", "ReadThroughput", "WriteIOPS", "WriteLatency", "WriteThroughput"}, - "AWS/RDS": {"BinLogDiskUsage", "CPUUtilization", "CPUCreditUsage", "CPUCreditBalance", "DatabaseConnections", "DiskQueueDepth", "FreeableMemory", "FreeStorageSpace", "ReplicaLag", "SwapUsage", "ReadIOPS", "WriteIOPS", "ReadLatency", "WriteLatency", "ReadThroughput", "WriteThroughput", "NetworkReceiveThroughput", "NetworkTransmitThroughput"}, - "AWS/Route53": {"HealthCheckStatus", "HealthCheckPercentageHealthy", "ConnectionTime", "SSLHandshakeTime", "TimeToFirstByte"}, - "AWS/S3": {"BucketSizeBytes", "NumberOfObjects"}, - "AWS/SNS": {"NumberOfMessagesPublished", "PublishSize", "NumberOfNotificationsDelivered", "NumberOfNotificationsFailed"}, - "AWS/SQS": {"NumberOfMessagesSent", "SentMessageSize", "NumberOfMessagesReceived", "NumberOfEmptyReceives", "NumberOfMessagesDeleted", "ApproximateNumberOfMessagesDelayed", "ApproximateNumberOfMessagesVisible", "ApproximateNumberOfMessagesNotVisible"}, + "AWS/ES": {"ClusterStatus.green", "ClusterStatus.yellow", "ClusterStatus.red", "Nodes", "SearchableDocuments", "DeletedDocuments", "CPUUtilization", "FreeStorageSpace", "JVMMemoryPressure", "AutomatedSnapshotFailure", "MasterCPUUtilization", "MasterFreeStorageSpace", "MasterJVMMemoryPressure", "ReadLatency", "WriteLatency", "ReadThroughput", "WriteThroughput", "DiskQueueLength", "ReadIOPS", "WriteIOPS"}, + "AWS/Events": {"Invocations", "FailedInvocations", "TriggeredRules", "MatchedEvents", "ThrottledRules"}, + "AWS/Firehose": {"DeliveryToElasticsearch.Bytes", "DeliveryToElasticsearch.Records", "DeliveryToElasticsearch.Success", "DeliveryToRedshift.Bytes", "DeliveryToRedshift.Records", "DeliveryToRedshift.Success", "DeliveryToS3.Bytes", "DeliveryToS3.DataFreshness", "DeliveryToS3.Records", "DeliveryToS3.Success", "IncomingBytes", "IncomingRecords", "DescribeDeliveryStream.Latency", "DescribeDeliveryStream.Requests", "ListDeliveryStreams.Latency", "ListDeliveryStreams.Requests", "PutRecord.Bytes", "PutRecord.Latency", "PutRecord.Requests", "PutRecordBatch.Bytes", "PutRecordBatch.Latency", "PutRecordBatch.Records", "PutRecordBatch.Requests", "UpdateDeliveryStream.Latency", "UpdateDeliveryStream.Requests"}, + "AWS/IoT": {"PublishIn.Success", "PublishOut.Success", "Subscribe.Success", "Ping.Success", "Connect.Success", "GetThingShadow.Accepted"}, + "AWS/Kinesis": {"GetRecords.Bytes", "GetRecords.IteratorAge", "GetRecords.IteratorAgeMilliseconds", "GetRecords.Latency", "GetRecords.Records", "GetRecords.Success", "IncomingBytes", "IncomingRecords", "PutRecord.Bytes", "PutRecord.Latency", "PutRecord.Success", "PutRecords.Bytes", "PutRecords.Latency", "PutRecords.Records", "PutRecords.Success", "ReadProvisionedThroughputExceeded", "WriteProvisionedThroughputExceeded", "IteratorAgeMilliseconds", "OutgoingBytes", "OutgoingRecords"}, + "AWS/KinesisAnalytics": {"Bytes", "MillisBehindLatest", "Records", "Success"}, + "AWS/Lambda": {"Invocations", "Errors", "Duration", "Throttles"}, + "AWS/Logs": {"IncomingBytes", "IncomingLogEvents", "ForwardedBytes", "ForwardedLogEvents", "DeliveryErrors", "DeliveryThrottling"}, + "AWS/ML": {"PredictCount", "PredictFailureCount"}, + "AWS/OpsWorks": {"cpu_idle", "cpu_nice", "cpu_system", "cpu_user", "cpu_waitio", "load_1", "load_5", "load_15", "memory_buffers", "memory_cached", "memory_free", "memory_swap", "memory_total", "memory_used", "procs"}, + "AWS/Redshift": {"CPUUtilization", "DatabaseConnections", "HealthStatus", "MaintenanceMode", "NetworkReceiveThroughput", "NetworkTransmitThroughput", "PercentageDiskSpaceUsed", "ReadIOPS", "ReadLatency", "ReadThroughput", "WriteIOPS", "WriteLatency", "WriteThroughput"}, + "AWS/RDS": {"BinLogDiskUsage", "CPUUtilization", "CPUCreditUsage", "CPUCreditBalance", "DatabaseConnections", "DiskQueueDepth", "FreeableMemory", "FreeStorageSpace", "ReplicaLag", "SwapUsage", "ReadIOPS", "WriteIOPS", "ReadLatency", "WriteLatency", "ReadThroughput", "WriteThroughput", "NetworkReceiveThroughput", "NetworkTransmitThroughput"}, + "AWS/Route53": {"HealthCheckStatus", "HealthCheckPercentageHealthy", "ConnectionTime", "SSLHandshakeTime", "TimeToFirstByte"}, + "AWS/S3": {"BucketSizeBytes", "NumberOfObjects"}, + "AWS/SNS": {"NumberOfMessagesPublished", "PublishSize", "NumberOfNotificationsDelivered", "NumberOfNotificationsFailed"}, + "AWS/SQS": {"NumberOfMessagesSent", "SentMessageSize", "NumberOfMessagesReceived", "NumberOfEmptyReceives", "NumberOfMessagesDeleted", "ApproximateNumberOfMessagesDelayed", "ApproximateNumberOfMessagesVisible", "ApproximateNumberOfMessagesNotVisible"}, "AWS/StorageGateway": {"CacheHitPercent", "CachePercentUsed", "CachePercentDirty", "CloudBytesDownloaded", "CloudDownloadLatency", "CloudBytesUploaded", "UploadBufferFree", "UploadBufferPercentUsed", "UploadBufferUsed", "QueuedWrites", "ReadBytes", "ReadTime", "TotalCacheSize", "WriteBytes", "WriteTime", "TimeSinceLastRecoveryPoint", "WorkingStorageFree", "WorkingStoragePercentUsed", "WorkingStorageUsed", "CacheHitPercent", "CachePercentUsed", "CachePercentDirty", "ReadBytes", "ReadTime", "WriteBytes", "WriteTime", "QueuedWrites"}, "AWS/SWF": {"DecisionTaskScheduleToStartTime", "DecisionTaskStartToCloseTime", "DecisionTasksCompleted", "StartedDecisionTasksTimedOutOnClose", "WorkflowStartToCloseTime", "WorkflowsCanceled", "WorkflowsCompleted", "WorkflowsContinuedAsNew", "WorkflowsFailed", "WorkflowsTerminated", "WorkflowsTimedOut", "ActivityTaskScheduleToCloseTime", "ActivityTaskScheduleToStartTime", "ActivityTaskStartToCloseTime", "ActivityTasksCanceled", "ActivityTasksCompleted", "ActivityTasksFailed", "ScheduledActivityTasksTimedOutOnClose", "ScheduledActivityTasksTimedOutOnStart", "StartedActivityTasksTimedOutOnClose", "StartedActivityTasksTimedOutOnHeartbeat"}, "AWS/WAF": {"AllowedRequests", "BlockedRequests", "CountedRequests"}, "AWS/WorkSpaces": {"Available", "Unhealthy", "ConnectionAttempt", "ConnectionSuccess", "ConnectionFailure", "SessionLaunchTime", "InSessionLatency", "SessionDisconnect"}, + "KMS": {"SecondsUntilKeyMaterialExpiration"}, } dimensionsMap = map[string][]string{ "AWS/ApiGateway": {"ApiName", "Method", "Resource", "Stage"}, @@ -106,7 +110,10 @@ func init() { "AWS/ElasticMapReduce": {"ClusterId", "JobFlowId", "JobId"}, "AWS/ES": {"ClientId", "DomainName"}, "AWS/Events": {"RuleName"}, + "AWS/Firehose": {}, + "AWS/IoT": {"Protocol"}, "AWS/Kinesis": {"StreamName", "ShardID"}, + "AWS/KinesisAnalytics": {"Flow", "Id", "Application"}, "AWS/Lambda": {"FunctionName", "Resource", "Version", "Alias"}, "AWS/Logs": {"LogGroupName", "DestinationType", "FilterName"}, "AWS/ML": {"MLModelId", "RequestMode"}, @@ -121,6 +128,7 @@ func init() { "AWS/SWF": {"Domain", "WorkflowTypeName", "WorkflowTypeVersion", "ActivityTypeName", "ActivityTypeVersion"}, "AWS/WAF": {"Rule", "WebACL"}, "AWS/WorkSpaces": {"DirectoryId", "WorkspaceId"}, + "KMS": {"KeyId"}, } customMetricsMetricsMap = make(map[string]map[string]map[string]*CustomMetricsCache) diff --git a/pkg/api/common.go b/pkg/api/common.go index 9d3ad90783b..82eed0db5fe 100644 --- a/pkg/api/common.go +++ b/pkg/api/common.go @@ -4,7 +4,6 @@ import ( "encoding/json" "net/http" - "github.com/grafana/grafana/pkg/metrics" "github.com/grafana/grafana/pkg/middleware" "github.com/grafana/grafana/pkg/setting" "gopkg.in/macaron.v1" @@ -88,10 +87,8 @@ func ApiError(status int, message string, err error) *NormalResponse { switch status { case 404: - metrics.M_Api_Status_404.Inc(1) data["message"] = "Not Found" case 500: - metrics.M_Api_Status_500.Inc(1) data["message"] = "Internal Server Error" } diff --git a/pkg/cmd/grafana-server/web.go b/pkg/cmd/grafana-server/web.go index 51975ac5617..4c294a814ca 100644 --- a/pkg/cmd/grafana-server/web.go +++ b/pkg/cmd/grafana-server/web.go @@ -53,6 +53,7 @@ func newMacaron() *macaron.Macaron { m.Use(middleware.GetContextHandler()) m.Use(middleware.Sessioner(&setting.SessionOptions)) + m.Use(middleware.RequestMetrics()) return m } diff --git a/pkg/login/ldap.go b/pkg/login/ldap.go index 4e8188e7a4b..d8c916bb765 100644 --- a/pkg/login/ldap.go +++ b/pkg/login/ldap.go @@ -48,7 +48,16 @@ func (a *ldapAuther) Dial() error { ServerName: host, RootCAs: certPool, } - a.conn, err = ldap.DialTLS("tcp", address, tlsCfg) + if a.server.StartTLS { + a.conn, err = ldap.Dial("tcp", address) + if err == nil { + if err = a.conn.StartTLS(tlsCfg); err == nil { + return nil + } + } + } else { + a.conn, err = ldap.DialTLS("tcp", address, tlsCfg) + } } else { a.conn, err = ldap.Dial("tcp", address) } diff --git a/pkg/login/settings.go b/pkg/login/settings.go index e01c0e50992..e0713302a6d 100644 --- a/pkg/login/settings.go +++ b/pkg/login/settings.go @@ -19,6 +19,7 @@ type LdapServerConf struct { Host string `toml:"host"` Port int `toml:"port"` UseSSL bool `toml:"use_ssl"` + StartTLS bool `toml:"start_tls"` SkipVerifySSL bool `toml:"ssl_skip_verify"` RootCACert string `toml:"root_ca_cert"` BindDN string `toml:"bind_dn"` diff --git a/pkg/metrics/metrics.go b/pkg/metrics/metrics.go index 31dd6f8b201..80d316c022f 100644 --- a/pkg/metrics/metrics.go +++ b/pkg/metrics/metrics.go @@ -13,8 +13,15 @@ var ( M_Page_Status_200 Counter M_Page_Status_500 Counter M_Page_Status_404 Counter - M_Api_Status_500 Counter + M_Page_Status_Unknown Counter + M_Api_Status_200 Counter M_Api_Status_404 Counter + M_Api_Status_500 Counter + M_Api_Status_Unknown Counter + M_Proxy_Status_200 Counter + M_Proxy_Status_404 Counter + M_Proxy_Status_500 Counter + M_Proxy_Status_Unknown Counter M_Api_User_SignUpStarted Counter M_Api_User_SignUpCompleted Counter M_Api_User_SignUpInvite Counter @@ -54,9 +61,17 @@ func initMetricVars(settings *MetricSettings) { M_Page_Status_200 = RegCounter("page.resp_status", "code", "200") M_Page_Status_500 = RegCounter("page.resp_status", "code", "500") M_Page_Status_404 = RegCounter("page.resp_status", "code", "404") + M_Page_Status_Unknown = RegCounter("page.resp_status", "code", "unknown") - M_Api_Status_500 = RegCounter("api.resp_status", "code", "500") + M_Api_Status_200 = RegCounter("api.resp_status", "code", "200") M_Api_Status_404 = RegCounter("api.resp_status", "code", "404") + M_Api_Status_500 = RegCounter("api.resp_status", "code", "500") + M_Api_Status_Unknown = RegCounter("api.resp_status", "code", "unknown") + + M_Proxy_Status_200 = RegCounter("proxy.resp_status", "code", "200") + M_Proxy_Status_404 = RegCounter("proxy.resp_status", "code", "404") + M_Proxy_Status_500 = RegCounter("proxy.resp_status", "code", "500") + M_Proxy_Status_Unknown = RegCounter("proxy.resp_status", "code", "unknown") M_Api_User_SignUpStarted = RegCounter("api.user.signup_started") M_Api_User_SignUpCompleted = RegCounter("api.user.signup_completed") diff --git a/pkg/middleware/logger.go b/pkg/middleware/logger.go index c6405ef80f9..9bed7cbe16b 100644 --- a/pkg/middleware/logger.go +++ b/pkg/middleware/logger.go @@ -49,9 +49,9 @@ func Logger() macaron.Handler { if ctx, ok := c.Data["ctx"]; ok { ctxTyped := ctx.(*Context) if status == 500 { - ctxTyped.Logger.Error("Request Completed", "method", req.Method, "path", req.URL.Path, "status", status, "remote_addr", c.RemoteAddr(), "time_ns", timeTakenMs, "size", rw.Size()) + ctxTyped.Logger.Error("Request Completed", "method", req.Method, "path", req.URL.Path, "status", status, "remote_addr", c.RemoteAddr(), "time_ms", timeTakenMs, "size", rw.Size()) } else { - ctxTyped.Logger.Info("Request Completed", "method", req.Method, "path", req.URL.Path, "status", status, "remote_addr", c.RemoteAddr(), "time_ns", timeTakenMs, "size", rw.Size()) + ctxTyped.Logger.Info("Request Completed", "method", req.Method, "path", req.URL.Path, "status", status, "remote_addr", c.RemoteAddr(), "time_ms", timeTakenMs, "size", rw.Size()) } } } diff --git a/pkg/middleware/middleware.go b/pkg/middleware/middleware.go index 5d52c68722e..df1768e1c3a 100644 --- a/pkg/middleware/middleware.go +++ b/pkg/middleware/middleware.go @@ -208,15 +208,6 @@ func (ctx *Context) Handle(status int, title string, err error) { } } - switch status { - case 200: - metrics.M_Page_Status_200.Inc(1) - case 404: - metrics.M_Page_Status_404.Inc(1) - case 500: - metrics.M_Page_Status_500.Inc(1) - } - ctx.Data["Title"] = title ctx.HTML(status, strconv.Itoa(status)) } @@ -243,10 +234,8 @@ func (ctx *Context) JsonApiErr(status int, message string, err error) { switch status { case 404: - metrics.M_Api_Status_404.Inc(1) resp["message"] = "Not Found" case 500: - metrics.M_Api_Status_500.Inc(1) resp["message"] = "Internal Server Error" } diff --git a/pkg/middleware/request_metrics.go b/pkg/middleware/request_metrics.go new file mode 100644 index 00000000000..417a1817d15 --- /dev/null +++ b/pkg/middleware/request_metrics.go @@ -0,0 +1,65 @@ +package middleware + +import ( + "net/http" + "strings" + + "github.com/grafana/grafana/pkg/metrics" + "gopkg.in/macaron.v1" +) + +func RequestMetrics() macaron.Handler { + return func(res http.ResponseWriter, req *http.Request, c *macaron.Context) { + rw := res.(macaron.ResponseWriter) + c.Next() + + status := rw.Status() + + if strings.HasPrefix(req.RequestURI, "/api/datasources/proxy") { + countProxyRequests(status) + } else if strings.HasPrefix(req.RequestURI, "/api/") { + countApiRequests(status) + } else { + countPageRequests(status) + } + } +} + +func countApiRequests(status int) { + switch status { + case 200: + metrics.M_Api_Status_200.Inc(1) + case 404: + metrics.M_Api_Status_404.Inc(1) + case 500: + metrics.M_Api_Status_500.Inc(1) + default: + metrics.M_Api_Status_Unknown.Inc(1) + } +} + +func countPageRequests(status int) { + switch status { + case 200: + metrics.M_Page_Status_200.Inc(1) + case 404: + metrics.M_Page_Status_404.Inc(1) + case 500: + metrics.M_Page_Status_500.Inc(1) + default: + metrics.M_Page_Status_Unknown.Inc(1) + } +} + +func countProxyRequests(status int) { + switch status { + case 200: + metrics.M_Proxy_Status_200.Inc(1) + case 404: + metrics.M_Proxy_Status_404.Inc(1) + case 500: + metrics.M_Proxy_Status_500.Inc(1) + default: + metrics.M_Proxy_Status_Unknown.Inc(1) + } +} diff --git a/pkg/models/alert_state.go b/pkg/models/alert_state.go deleted file mode 100644 index 5071efc2171..00000000000 --- a/pkg/models/alert_state.go +++ /dev/null @@ -1,47 +0,0 @@ -package models - -// type AlertState struct { -// Id int64 `json:"-"` -// OrgId int64 `json:"-"` -// AlertId int64 `json:"alertId"` -// State string `json:"state"` -// Created time.Time `json:"created"` -// Info string `json:"info"` -// TriggeredAlerts *simplejson.Json `json:"triggeredAlerts"` -// } -// -// func (this *UpdateAlertStateCommand) IsValidState() bool { -// for _, v := range alertstates.ValidStates { -// if this.State == v { -// return true -// } -// } -// return false -// } -// -// // Commands -// -// type UpdateAlertStateCommand struct { -// AlertId int64 `json:"alertId" binding:"Required"` -// OrgId int64 `json:"orgId" binding:"Required"` -// State string `json:"state" binding:"Required"` -// Info string `json:"info"` -// -// Result *Alert -// } -// -// // Queries -// -// type GetAlertsStateQuery struct { -// OrgId int64 `json:"orgId" binding:"Required"` -// AlertId int64 `json:"alertId" binding:"Required"` -// -// Result *[]AlertState -// } -// -// type GetLastAlertStateQuery struct { -// AlertId int64 -// OrgId int64 -// -// Result *AlertState -// } diff --git a/pkg/services/alerting/conditions/common.go b/pkg/services/alerting/conditions/common.go deleted file mode 100644 index 06702fd1e08..00000000000 --- a/pkg/services/alerting/conditions/common.go +++ /dev/null @@ -1 +0,0 @@ -package conditions diff --git a/pkg/services/alerting/conditions/query.go b/pkg/services/alerting/conditions/query.go index ef51e09685c..dd39453ae42 100644 --- a/pkg/services/alerting/conditions/query.go +++ b/pkg/services/alerting/conditions/query.go @@ -38,6 +38,7 @@ func (c *QueryCondition) Eval(context *alerting.EvalContext) { return } + emptySerieCount := 0 for _, series := range seriesList { reducedValue := c.Reducer.Reduce(series) evalMatch := c.Evaluator.Eval(reducedValue) @@ -55,13 +56,14 @@ func (c *QueryCondition) Eval(context *alerting.EvalContext) { }) } - context.Firing = evalMatch - // handle no data scenario if reducedValue == nil { - context.NoDataFound = true + emptySerieCount++ } } + + context.NoDataFound = emptySerieCount == len(seriesList) + context.Firing = len(context.EvalMatches) > 0 } func (c *QueryCondition) executeQuery(context *alerting.EvalContext) (tsdb.TimeSeriesSlice, error) { diff --git a/pkg/services/alerting/conditions/query_test.go b/pkg/services/alerting/conditions/query_test.go index 88891c83096..f78ac17db2b 100644 --- a/pkg/services/alerting/conditions/query_test.go +++ b/pkg/services/alerting/conditions/query_test.go @@ -59,6 +59,45 @@ func TestQueryCondition(t *testing.T) { So(ctx.result.Error, ShouldBeNil) So(ctx.result.Firing, ShouldBeFalse) }) + + Convey("Should fire if only first serie matches", func() { + one := float64(120) + two := float64(0) + ctx.series = tsdb.TimeSeriesSlice{ + tsdb.NewTimeSeries("test1", [][2]*float64{{&one, &two}}), + tsdb.NewTimeSeries("test2", [][2]*float64{{&two, &two}}), + } + ctx.exec() + + So(ctx.result.Error, ShouldBeNil) + So(ctx.result.Firing, ShouldBeTrue) + }) + + Convey("Empty series", func() { + Convey("Should set NoDataFound both series are empty", func() { + ctx.series = tsdb.TimeSeriesSlice{ + tsdb.NewTimeSeries("test1", [][2]*float64{}), + tsdb.NewTimeSeries("test2", [][2]*float64{}), + } + ctx.exec() + + So(ctx.result.Error, ShouldBeNil) + So(ctx.result.NoDataFound, ShouldBeTrue) + }) + + Convey("Should not set NoDataFound if one serie is empty", func() { + one := float64(120) + two := float64(0) + ctx.series = tsdb.TimeSeriesSlice{ + tsdb.NewTimeSeries("test1", [][2]*float64{}), + tsdb.NewTimeSeries("test2", [][2]*float64{{&one, &two}}), + } + ctx.exec() + + So(ctx.result.Error, ShouldBeNil) + So(ctx.result.NoDataFound, ShouldBeFalse) + }) + }) }) }) } diff --git a/pkg/services/alerting/eval_handler.go b/pkg/services/alerting/eval_handler.go index 9f47252968b..ab4c377197b 100644 --- a/pkg/services/alerting/eval_handler.go +++ b/pkg/services/alerting/eval_handler.go @@ -20,7 +20,7 @@ type DefaultEvalHandler struct { func NewEvalHandler() *DefaultEvalHandler { return &DefaultEvalHandler{ log: log.New("alerting.evalHandler"), - alertJobTimeout: time.Second * 5, + alertJobTimeout: time.Second * 10, } } @@ -29,9 +29,9 @@ func (e *DefaultEvalHandler) Eval(context *EvalContext) { select { case <-time.After(e.alertJobTimeout): - context.Error = fmt.Errorf("Timeout") + context.Error = fmt.Errorf("Execution timed out after %v", e.alertJobTimeout) context.EndTime = time.Now() - e.log.Debug("Job Execution timeout", "alertId", context.Rule.Id) + e.log.Debug("Job Execution timeout", "alertId", context.Rule.Id, "timeout setting", e.alertJobTimeout) e.retry(context) case <-context.DoneChan: e.log.Debug("Job Execution done", "timeMs", context.GetDurationMs(), "alertId", context.Rule.Id, "firing", context.Firing) @@ -45,10 +45,10 @@ func (e *DefaultEvalHandler) Eval(context *EvalContext) { func (e *DefaultEvalHandler) retry(context *EvalContext) { e.log.Debug("Retrying eval exeuction", "alertId", context.Rule.Id) - context.RetryCount++ - if context.RetryCount > MaxRetries { + if context.RetryCount < MaxRetries { context.DoneChan = make(chan bool, 1) context.CancelChan = make(chan bool, 1) + context.RetryCount++ e.Eval(context) } } diff --git a/pkg/services/sqlstore/datasource.go b/pkg/services/sqlstore/datasource.go index c028fd0fccc..2f1b40b2d61 100644 --- a/pkg/services/sqlstore/datasource.go +++ b/pkg/services/sqlstore/datasource.go @@ -43,7 +43,7 @@ func GetDataSourceByName(query *m.GetDataSourceByNameQuery) error { } func GetDataSources(query *m.GetDataSourcesQuery) error { - sess := x.Limit(100, 0).Where("org_id=?", query.OrgId).Asc("name") + sess := x.Limit(1000, 0).Where("org_id=?", query.OrgId).Asc("name") query.Result = make([]*m.DataSource, 0) return sess.Find(&query.Result) diff --git a/pkg/tsdb/graphite/graphite.go b/pkg/tsdb/graphite/graphite.go index 6e554dbe810..4042702378c 100644 --- a/pkg/tsdb/graphite/graphite.go +++ b/pkg/tsdb/graphite/graphite.go @@ -1,6 +1,7 @@ package graphite import ( + "crypto/tls" "encoding/json" "fmt" "io/ioutil" @@ -15,10 +16,6 @@ import ( "github.com/grafana/grafana/pkg/tsdb" ) -var ( - HttpClient = http.Client{Timeout: time.Duration(10 * time.Second)} -) - type GraphiteExecutor struct { *tsdb.DataSourceInfo } @@ -27,11 +24,23 @@ func NewGraphiteExecutor(dsInfo *tsdb.DataSourceInfo) tsdb.Executor { return &GraphiteExecutor{dsInfo} } -var glog log.Logger +var ( + glog log.Logger + HttpClient http.Client +) func init() { glog = log.New("tsdb.graphite") tsdb.RegisterExecutor("graphite", NewGraphiteExecutor) + + tr := &http.Transport{ + TLSClientConfig: &tls.Config{InsecureSkipVerify: true}, + } + + HttpClient = http.Client{ + Timeout: time.Duration(10 * time.Second), + Transport: tr, + } } func (e *GraphiteExecutor) Execute(queries tsdb.QuerySlice, context *tsdb.QueryContext) *tsdb.BatchResult { diff --git a/public/app/features/panel/metrics_panel_ctrl.ts b/public/app/features/panel/metrics_panel_ctrl.ts index 65625e6fd70..62cece44acf 100644 --- a/public/app/features/panel/metrics_panel_ctrl.ts +++ b/public/app/features/panel/metrics_panel_ctrl.ts @@ -246,7 +246,7 @@ class MetricsPanelCtrl extends PanelCtrl { if (datasource.meta.mixed) { _.each(this.panel.targets, target => { target.datasource = this.panel.datasource; - if (target.datasource === null) { + if (!target.datasource) { target.datasource = config.defaultDatasource; } }); diff --git a/public/app/features/panel/panel_ctrl.ts b/public/app/features/panel/panel_ctrl.ts index e58994974cd..3bcca4f3f72 100644 --- a/public/app/features/panel/panel_ctrl.ts +++ b/public/app/features/panel/panel_ctrl.ts @@ -242,7 +242,7 @@ export class PanelCtrl { var modalScope = this.$scope.$new(); modalScope.panel = this.panel; modalScope.dashboard = this.dashboard; - modalScope.inspector = angular.copy(this.inspector); + modalScope.inspector = $.extend(true, {}, this.inspector); this.publishAppEvent('show-modal', { src: 'public/app/partials/inspector.html', diff --git a/public/app/features/templating/editorCtrl.js b/public/app/features/templating/editorCtrl.js index 5efebc21e30..95f3bd2e9dd 100644 --- a/public/app/features/templating/editorCtrl.js +++ b/public/app/features/templating/editorCtrl.js @@ -13,6 +13,7 @@ function (angular, _) { type: 'query', datasource: null, refresh: 0, + sort: 1, name: '', hide: 0, options: [], @@ -34,6 +35,14 @@ function (angular, _) { {value: 2, text: "On Time Range Change"}, ]; + $scope.sortOptions = [ + {value: 0, text: "Without Sort"}, + {value: 1, text: "Alphabetical (asc)"}, + {value: 2, text: "Alphabetical (desc)"}, + {value: 3, text: "Numerical (asc)"}, + {value: 4, text: "Numerical (desc)"}, + ]; + $scope.hideOptions = [ {value: 0, text: ""}, {value: 1, text: "Label"}, @@ -114,6 +123,7 @@ function (angular, _) { $scope.currentIsNew = false; $scope.mode = 'edit'; + $scope.current.sort = $scope.current.sort || replacementDefaults.sort; if ($scope.current.datasource === void 0) { $scope.current.datasource = null; $scope.current.type = 'query'; diff --git a/public/app/features/templating/partials/editor.html b/public/app/features/templating/partials/editor.html index 247945c2342..80ea870dfc4 100644 --- a/public/app/features/templating/partials/editor.html +++ b/public/app/features/templating/partials/editor.html @@ -181,6 +181,17 @@ +
+ + Sort + + How to sort the values of this variable. + + +
+ +
+
Query diff --git a/public/app/features/templating/templateValuesSrv.js b/public/app/features/templating/templateValuesSrv.js index 44904b07fad..6125a4b44a9 100644 --- a/public/app/features/templating/templateValuesSrv.js +++ b/public/app/features/templating/templateValuesSrv.js @@ -342,7 +342,7 @@ function (angular, _, $, kbn) { this.metricNamesToVariableValues = function(variable, metricNames) { var regex, options, i, matches; - options = {}; // use object hash to remove duplicates + options = []; if (variable.regex) { regex = kbn.stringToJsRegex(templateSrv.replace(variable.regex)); @@ -370,16 +370,43 @@ function (angular, _, $, kbn) { } } - options[value] = {text: text, value: value}; + options.push({text: text, value: value}); } + options = _.uniq(options, 'value'); - return _.sortBy(options, 'text'); + return this.sortVariableValues(options, variable.sort); }; this.addAllOption = function(variable) { variable.options.unshift({text: 'All', value: "$__all"}); }; + this.sortVariableValues = function(options, sortOrder) { + if (sortOrder === 0) { + return options; + } + + var sortType = Math.ceil(sortOrder / 2); + var reverseSort = (sortOrder % 2 === 0); + if (sortType === 1) { + options = _.sortBy(options, 'text'); + } else if (sortType === 2) { + options = _.sortBy(options, function(opt) { + var matches = opt.text.match(/.*?(\d+).*/); + if (!matches) { + return 0; + } else { + return parseInt(matches[1], 10); + } + }); + } + if (reverseSort) { + options = options.reverse(); + } + + return options; + }; + }); }); diff --git a/public/app/partials/inspector.html b/public/app/partials/inspector.html index 1b1510090ad..228dfc7041e 100644 --- a/public/app/partials/inspector.html +++ b/public/app/partials/inspector.html @@ -68,9 +68,9 @@ -
-			{{stack_trace}}
-		
+
+{{stack_trace}}
+
diff --git a/public/app/plugins/datasource/influxdb/datasource.ts b/public/app/plugins/datasource/influxdb/datasource.ts index 9a5ebbef859..08af9e71a91 100644 --- a/public/app/plugins/datasource/influxdb/datasource.ts +++ b/public/app/plugins/datasource/influxdb/datasource.ts @@ -16,6 +16,7 @@ export default class InfluxDatasource { name: string; database: any; basicAuth: any; + withCredentials: any; interval: any; supportAnnotations: boolean; supportMetrics: boolean; @@ -33,6 +34,7 @@ export default class InfluxDatasource { this.name = instanceSettings.name; this.database = instanceSettings.database; this.basicAuth = instanceSettings.basicAuth; + this.withCredentials = instanceSettings.withCredentials; this.interval = (instanceSettings.jsonData || {}).timeInterval; this.supportAnnotations = true; this.supportMetrics = true; @@ -187,6 +189,9 @@ export default class InfluxDatasource { }; options.headers = options.headers || {}; + if (this.basicAuth || this.withCredentials) { + options.withCredentials = true; + } if (self.basicAuth) { options.headers.Authorization = self.basicAuth; } diff --git a/public/app/plugins/datasource/prometheus/datasource.ts b/public/app/plugins/datasource/prometheus/datasource.ts index a763b2f92ea..8f9fceeba3e 100644 --- a/public/app/plugins/datasource/prometheus/datasource.ts +++ b/public/app/plugins/datasource/prometheus/datasource.ts @@ -257,7 +257,7 @@ export function PrometheusDatasource(instanceSettings, $q, backendSrv, templateS return this.getOriginalMetricName(labelData); } - return this.renderTemplate(options.legendFormat, labelData) || '{}'; + return this.renderTemplate(templateSrv.replace(options.legendFormat), labelData) || '{}'; }; this.renderTemplate = function(aliasPattern, aliasData) { diff --git a/public/app/plugins/panel/graph/graph.js b/public/app/plugins/panel/graph/graph.js index f58b92e57c5..50c4877a606 100755 --- a/public/app/plugins/panel/graph/graph.js +++ b/public/app/plugins/panel/graph/graph.js @@ -265,7 +265,7 @@ function (angular, $, moment, _, kbn, GraphTooltip, thresholdManExports) { console.log('flotcharts error', e); ctrl.error = e.message || "Render Error"; ctrl.renderError = true; - ctrl.inspector = {error: ctrl.error}; + ctrl.inspector = {error: e}; } if (incrementRenderCounter) { diff --git a/public/test/specs/templateValuesSrv-specs.js b/public/test/specs/templateValuesSrv-specs.js index 7a8d8c1fccb..1742f5739c3 100644 --- a/public/test/specs/templateValuesSrv-specs.js +++ b/public/test/specs/templateValuesSrv-specs.js @@ -386,5 +386,69 @@ define([ }); }); + describeUpdateVariable('without sort', function(scenario) { + scenario.setup(function() { + scenario.variable = {type: 'query', query: 'apps.*', name: 'test', sort: 0}; + scenario.queryResult = [{text: 'bbb2'}, {text: 'aaa10'}, { text: 'ccc3'}]; + }); + + it('should return options without sort', function() { + expect(scenario.variable.options[0].text).to.be('bbb2'); + expect(scenario.variable.options[1].text).to.be('aaa10'); + expect(scenario.variable.options[2].text).to.be('ccc3'); + }); + }); + + describeUpdateVariable('with alphabetical sort (asc)', function(scenario) { + scenario.setup(function() { + scenario.variable = {type: 'query', query: 'apps.*', name: 'test', sort: 1}; + scenario.queryResult = [{text: 'bbb2'}, {text: 'aaa10'}, { text: 'ccc3'}]; + }); + + it('should return options with alphabetical sort', function() { + expect(scenario.variable.options[0].text).to.be('aaa10'); + expect(scenario.variable.options[1].text).to.be('bbb2'); + expect(scenario.variable.options[2].text).to.be('ccc3'); + }); + }); + + describeUpdateVariable('with alphabetical sort (desc)', function(scenario) { + scenario.setup(function() { + scenario.variable = {type: 'query', query: 'apps.*', name: 'test', sort: 2}; + scenario.queryResult = [{text: 'bbb2'}, {text: 'aaa10'}, { text: 'ccc3'}]; + }); + + it('should return options with alphabetical sort', function() { + expect(scenario.variable.options[0].text).to.be('ccc3'); + expect(scenario.variable.options[1].text).to.be('bbb2'); + expect(scenario.variable.options[2].text).to.be('aaa10'); + }); + }); + + describeUpdateVariable('with numerical sort (asc)', function(scenario) { + scenario.setup(function() { + scenario.variable = {type: 'query', query: 'apps.*', name: 'test', sort: 3}; + scenario.queryResult = [{text: 'bbb2'}, {text: 'aaa10'}, { text: 'ccc3'}]; + }); + + it('should return options with numerical sort', function() { + expect(scenario.variable.options[0].text).to.be('bbb2'); + expect(scenario.variable.options[1].text).to.be('ccc3'); + expect(scenario.variable.options[2].text).to.be('aaa10'); + }); + }); + + describeUpdateVariable('with numerical sort (desc)', function(scenario) { + scenario.setup(function() { + scenario.variable = {type: 'query', query: 'apps.*', name: 'test', sort: 4}; + scenario.queryResult = [{text: 'bbb2'}, {text: 'aaa10'}, { text: 'ccc3'}]; + }); + + it('should return options with numerical sort', function() { + expect(scenario.variable.options[0].text).to.be('aaa10'); + expect(scenario.variable.options[1].text).to.be('ccc3'); + expect(scenario.variable.options[2].text).to.be('bbb2'); + }); + }); }); });