Merge branch 'master' of github.com:grafana/grafana

This commit is contained in:
Torkel Ödegaard 2016-10-19 13:14:34 +02:00
commit a826591eed
16 changed files with 169 additions and 122 deletions

View File

@ -25,10 +25,11 @@ install:
build_script:
- go run build.go build
- grunt release
- go run build.go sha1-dist
- cp dist/* .
artifacts:
- path: grafana-*windows-*.zip
- path: grafana-*windows-*.*
name: binzip
deploy:

View File

@ -98,6 +98,9 @@ func main() {
createDebPackages()
sha1FilesInDist()
case "sha1-dist":
sha1FilesInDist()
case "latest":
makeLatestDistCopies()
sha1FilesInDist()

View File

@ -1,75 +0,0 @@
bind-address = "0.0.0.0"
[logging]
level = "debug"
file = "/opt/influxdb/shared/data/influxdb.log" # stdout to log to standard out
[admin]
port = 8083 # binding is disabled if the port isn't set
assets = "/opt/influxdb/current/admin"
[api]
port = 8086 # binding is disabled if the port isn't set
read-timeout = "5s"
[input_plugins]
[input_plugins.graphite]
enabled = true
port = 2004
database = "graphite" # store graphite data in this database
[raft]
port = 8090
dir = "/opt/influxdb/shared/data/raft"
[storage]
dir = "/opt/influxdb/shared/data/db"
# How many requests to potentially buffer in memory. If the buffer gets filled then writes
# will still be logged and once the local storage has caught up (or compacted) the writes
# will be replayed from the WAL
write-buffer-size = 10000
default-engine = "rocksdb"
max-open-shards = 0
point-batch-size = 100
write-batch-size = 5000000
retention-sweep-period = "10m"
[storage.engines.rocksdb]
max-open-files = 1000
lru-cache-size = "200m"
[storage.engines.leveldb]
max-open-files = 1000
lru-cache-size = "200m"
[cluster]
protobuf_port = 8099
protobuf_timeout = "2s" # the write timeout on the protobuf conn any duration parseable by time.ParseDuration
protobuf_heartbeat = "200ms" # the heartbeat interval between the servers. must be parseable by time.ParseDuration
protobuf_min_backoff = "1s" # the minimum backoff after a failed heartbeat attempt
protobuf_max_backoff = "10s" # the maxmimum backoff after a failed heartbeat attempt
write-buffer-size = 10000
ax-response-buffer-size = 100000
oncurrent-shard-query-limit = 10
[sharding]
replication-factor = 1
[sharding.short-term]
duration = "7d"
split = 1
[sharding.long-term]
duration = "30d"
split = 1
# split-random = "/^Hf.*/"
[wal]
dir = "/opt/influxdb/shared/data/wal"
flush-after = 1000 # the number of writes after which wal will be flushed, 0 for flushing on every write
bookmark-after = 1000 # the number of writes after which a bookmark will be created
index-after = 1000
requests-per-logfile = 10000

View File

@ -1,11 +1,12 @@
influxdb:
#image: influxdb/influxdb:1.0-alpine
image: influxdb:latest
container_name: influxdb
ports:
- "2004:2004"
- "8083:8083"
- "8086:8086"
volumes:
- ./blocks/influxdb/influxdb.conf:/etc/influxdb/influxdb.conf
fake-influxdb-data:
image: grafana/fake-data-gen

View File

@ -0,0 +1,92 @@
reporting-disabled = false
[meta]
# Where the metadata/raft database is stored
dir = "/var/lib/influxdb/meta"
retention-autocreate = true
# If log messages are printed for the meta service
logging-enabled = true
pprof-enabled = false
# The default duration for leases.
lease-duration = "1m0s"
[data]
# Controls if this node holds time series data shards in the cluster
enabled = true
dir = "/var/lib/influxdb/data"
# These are the WAL settings for the storage engine >= 0.9.3
wal-dir = "/var/lib/influxdb/wal"
wal-logging-enabled = true
[coordinator]
write-timeout = "10s"
max-concurrent-queries = 0
query-timeout = "0"
log-queries-after = "0"
max-select-point = 0
max-select-series = 0
max-select-buckets = 0
[retention]
enabled = true
check-interval = "30m"
[shard-precreation]
enabled = true
check-interval = "10m"
advance-period = "30m"
[monitor]
store-enabled = true # Whether to record statistics internally.
store-database = "_internal" # The destination database for recorded statistics
store-interval = "10s" # The interval at which to record statistics
[admin]
enabled = true
bind-address = ":8083"
https-enabled = false
https-certificate = "/etc/ssl/influxdb.pem"
[http]
enabled = true
bind-address = ":8086"
auth-enabled = true
log-enabled = true
write-tracing = false
pprof-enabled = false
https-enabled = false
https-certificate = "/etc/ssl/influxdb.pem"
### Use a separate private key location.
# https-private-key = ""
max-row-limit = 10000
realm = "InfluxDB"
unix-socket-enabled = false # enable http service over unix domain socket
# bind-socket = "/var/run/influxdb.sock"
[subscriber]
enabled = true
[[graphite]]
enabled = false
[[collectd]]
enabled = false
[[opentsdb]]
enabled = false
[[udp]]
enabled = false
[continuous_queries]
log-enabled = true
enabled = true
# run-interval = "1s" # interval for how often continuous queries will be checked if they need to run

View File

@ -252,7 +252,7 @@ func Register(r *macaron.Macaron) {
r.Group("/alerts", func() {
r.Post("/test", bind(dtos.AlertTestCommand{}), wrap(AlertTest))
r.Post("/:alertId/pause", bind(dtos.PauseAlertCommand{}), wrap(PauseAlert))
r.Post("/:alertId/pause", bind(dtos.PauseAlertCommand{}), wrap(PauseAlert), reqEditorRole)
r.Get("/:alertId", ValidateOrgAlert, wrap(GetAlert))
r.Get("/", wrap(GetAlerts))
r.Get("/states-for-dashboard", wrap(GetAlertStatesForDashboard))
@ -266,7 +266,7 @@ func Register(r *macaron.Macaron) {
r.Put("/:notificationId", bind(m.UpdateAlertNotificationCommand{}), wrap(UpdateAlertNotification))
r.Get("/:notificationId", wrap(GetAlertNotificationById))
r.Delete("/:notificationId", wrap(DeleteAlertNotification))
}, reqOrgAdmin)
}, reqEditorRole)
r.Get("/annotations", wrap(GetAnnotations))
r.Post("/annotations/mass-delete", reqOrgAdmin, bind(dtos.DeleteAnnotationsCmd{}), wrap(DeleteAnnotations))

View File

@ -17,17 +17,19 @@ type SendEmailCommandSync struct {
}
type SendWebhook struct {
Url string
User string
Password string
Body string
Url string
User string
Password string
Body string
HttpMethod string
}
type SendWebhookSync struct {
Url string
User string
Password string
Body string
Url string
User string
Password string
Body string
HttpMethod string
}
type SendResetPasswordEmailCommand struct {

View File

@ -24,16 +24,18 @@ func NewWebHookNotifier(model *m.AlertNotification) (alerting.Notifier, error) {
Url: url,
User: model.Settings.Get("user").MustString(),
Password: model.Settings.Get("password").MustString(),
HttpMethod: model.Settings.Get("httpMethod").MustString("POST"),
log: log.New("alerting.notifier.webhook"),
}, nil
}
type WebhookNotifier struct {
NotifierBase
Url string
User string
Password string
log log.Logger
Url string
User string
Password string
HttpMethod string
log log.Logger
}
func (this *WebhookNotifier) Notify(evalContext *alerting.EvalContext) error {
@ -59,10 +61,11 @@ func (this *WebhookNotifier) Notify(evalContext *alerting.EvalContext) error {
body, _ := bodyJSON.MarshalJSON()
cmd := &m.SendWebhookSync{
Url: this.Url,
User: this.User,
Password: this.Password,
Body: string(body),
Url: this.Url,
User: this.User,
Password: this.Password,
Body: string(body),
HttpMethod: this.HttpMethod,
}
if err := bus.DispatchCtx(evalContext.Ctx, cmd); err != nil {

View File

@ -61,19 +61,21 @@ func Init() error {
func SendWebhookSync(ctx context.Context, cmd *m.SendWebhookSync) error {
return sendWebRequestSync(ctx, &Webhook{
Url: cmd.Url,
User: cmd.User,
Password: cmd.Password,
Body: cmd.Body,
Url: cmd.Url,
User: cmd.User,
Password: cmd.Password,
Body: cmd.Body,
HttpMethod: cmd.HttpMethod,
})
}
func sendWebhook(cmd *m.SendWebhook) error {
addToWebhookQueue(&Webhook{
Url: cmd.Url,
User: cmd.User,
Password: cmd.Password,
Body: cmd.Body,
Url: cmd.Url,
User: cmd.User,
Password: cmd.Password,
Body: cmd.Body,
HttpMethod: cmd.HttpMethod,
})
return nil

View File

@ -15,10 +15,11 @@ import (
)
type Webhook struct {
Url string
User string
Password string
Body string
Url string
User string
Password string
Body string
HttpMethod string
}
var webhookQueue chan *Webhook
@ -44,13 +45,17 @@ func processWebhookQueue() {
}
func sendWebRequestSync(ctx context.Context, webhook *Webhook) error {
webhookLog.Debug("Sending webhook", "url", webhook.Url)
webhookLog.Debug("Sending webhook", "url", webhook.Url, "http method", webhook.HttpMethod)
client := &http.Client{
Timeout: time.Duration(10 * time.Second),
}
request, err := http.NewRequest(http.MethodPost, webhook.Url, bytes.NewReader([]byte(webhook.Body)))
if webhook.HttpMethod == "" {
webhook.HttpMethod = http.MethodPost
}
request, err := http.NewRequest(webhook.HttpMethod, webhook.Url, bytes.NewReader([]byte(webhook.Body)))
if webhook.User != "" && webhook.Password != "" {
request.Header.Add("Authorization", util.GetBasicAuthHeader(webhook.User, webhook.Password))
}

View File

@ -46,5 +46,6 @@ func (s *SocialGoogle) UserInfo(client *http.Client) (*BasicUserInfo, error) {
return &BasicUserInfo{
Name: data.Name,
Email: data.Email,
Login: data.Email,
}, nil
}

View File

@ -124,10 +124,15 @@ func (e *InfluxDBExecutor) createRequest(query string) (*http.Request, error) {
req.URL.RawQuery = params.Encode()
req.Header.Set("User-Agent", "Grafana")
if e.BasicAuth {
req.SetBasicAuth(e.BasicAuthUser, e.BasicAuthPassword)
}
if e.User != "" {
req.SetBasicAuth(e.User, e.Password)
}
glog.Debug("Influxdb request", "url", req.URL.String())
return req, nil
}

View File

@ -84,8 +84,10 @@ func formatLegend(metric pmodel.Metric, query *PrometheusQuery) string {
reg, _ := regexp.Compile(`\{\{\s*(.+?)\s*\}\}`)
result := reg.ReplaceAllFunc([]byte(query.LegendFormat), func(in []byte) []byte {
ind := strings.Replace(strings.Replace(string(in), "{{", "", 1), "}}", "", 1)
if val, exists := metric[pmodel.LabelName(ind)]; exists {
labelName := strings.Replace(string(in), "{{", "", 1)
labelName = strings.Replace(labelName, "}}", "", 1)
labelName = strings.TrimSpace(labelName)
if val, exists := metric[pmodel.LabelName(labelName)]; exists {
return []byte(val)
}

View File

@ -17,7 +17,7 @@ func TestPrometheus(t *testing.T) {
}
query := &PrometheusQuery{
LegendFormat: "legend {{app}} {{device}} {{broken}}",
LegendFormat: "legend {{app}} {{ device }} {{broken}}",
}
So(formatLegend(metric, query), ShouldEqual, "legend backend mobile {{broken}}")

View File

@ -18,7 +18,7 @@ export class AlertNotificationEditCtrl {
this.model = {
type: 'email',
settings: {
severityFilter: 'none'
httpMethod: 'POST'
},
isDefault: false
};

View File

@ -32,19 +32,24 @@
<div class="gf-form-group" ng-if="ctrl.model.type === 'webhook'">
<h3 class="page-heading">Webhook settings</h3>
<div class="gf-form">
<span class="gf-form-label width-6">Url</span>
<span class="gf-form-label width-10">Url</span>
<input type="text" required class="gf-form-input max-width-26" ng-model="ctrl.model.settings.url"></input>
</div>
<div class="gf-form-inline">
<div class="gf-form">
<span class="gf-form-label width-6">Username</span>
<input type="text" class="gf-form-input max-width-10" ng-model="ctrl.model.settings.username"></input>
</div>
<div class="gf-form">
<span class="gf-form-label width-6">Password</span>
<input type="text" class="gf-form-input max-width-10" ng-model="ctrl.model.settings.password"></input>
<div class="gf-form">
<span class="gf-form-label width-10">Http Method</span>
<div class="gf-form-select-wrapper width-14">
<select class="gf-form-input" ng-model="ctrl.model.settings.httpMethod" ng-options="t for t in ['POST', 'PUT']">
</select>
</div>
</div>
<div class="gf-form">
<span class="gf-form-label width-10">Username</span>
<input type="text" class="gf-form-input max-width-14" ng-model="ctrl.model.settings.username"></input>
</div>
<div class="gf-form">
<span class="gf-form-label width-10">Password</span>
<input type="text" class="gf-form-input max-width-14" ng-model="ctrl.model.settings.password"></input>
</div>
</div>
<div class="gf-form-group" ng-if="ctrl.model.type === 'slack'">