Merge branch 'master' into develop

This commit is contained in:
Torkel Ödegaard
2017-11-17 15:49:17 +01:00
177 changed files with 14293 additions and 1263 deletions

View File

@@ -8,6 +8,7 @@ import (
"github.com/grafana/grafana/pkg/components/simplejson"
"github.com/grafana/grafana/pkg/middleware"
"github.com/grafana/grafana/pkg/services/annotations"
"github.com/grafana/grafana/pkg/util"
)
func GetAnnotations(c *middleware.Context) Response {
@@ -75,9 +76,11 @@ func PostAnnotation(c *middleware.Context, cmd dtos.PostAnnotationsCmd) Response
return ApiError(500, "Failed to save annotation", err)
}
startID := item.Id
// handle regions
if cmd.IsRegion {
item.RegionId = item.Id
item.RegionId = startID
if item.Data == nil {
item.Data = simplejson.New()
@@ -93,9 +96,18 @@ func PostAnnotation(c *middleware.Context, cmd dtos.PostAnnotationsCmd) Response
if err := repo.Save(&item); err != nil {
return ApiError(500, "Failed save annotation for region end time", err)
}
return Json(200, util.DynMap{
"message": "Annotation added",
"id": startID,
"endId": item.Id,
})
}
return ApiSuccess("Annotation added")
return Json(200, util.DynMap{
"message": "Annotation added",
"id": startID,
})
}
func formatGraphiteAnnotation(what string, data string) string {
@@ -154,7 +166,10 @@ func PostGraphiteAnnotation(c *middleware.Context, cmd dtos.PostGraphiteAnnotati
return ApiError(500, "Failed to save Graphite annotation", err)
}
return ApiSuccess("Graphite annotation added")
return Json(200, util.DynMap{
"message": "Graphite annotation added",
"id": item.Id,
})
}
func UpdateAnnotation(c *middleware.Context, cmd dtos.UpdateAnnotationsCmd) Response {

View File

@@ -212,10 +212,10 @@ func (hs *HttpServer) registerRoutes() {
// Data sources
apiRoute.Group("/datasources", func(datasourceRoute RouteRegister) {
datasourceRoute.Get("/", wrap(GetDataSources))
datasourceRoute.Post("/", quota("data_source"), bind(m.AddDataSourceCommand{}), AddDataSource)
datasourceRoute.Post("/", quota("data_source"), bind(m.AddDataSourceCommand{}), wrap(AddDataSource))
datasourceRoute.Put("/:id", bind(m.UpdateDataSourceCommand{}), wrap(UpdateDataSource))
datasourceRoute.Delete("/:id", DeleteDataSourceById)
datasourceRoute.Delete("/name/:name", DeleteDataSourceByName)
datasourceRoute.Delete("/:id", wrap(DeleteDataSourceById))
datasourceRoute.Delete("/name/:name", wrap(DeleteDataSourceByName))
datasourceRoute.Get("/:id", wrap(GetDataSourceById))
datasourceRoute.Get("/name/:name", wrap(GetDataSourceByName))
}, reqOrgAdmin)
@@ -340,8 +340,8 @@ func (hs *HttpServer) registerRoutes() {
r.Any("/api/gnet/*", reqSignedIn, ProxyGnetRequest)
// Gravatar service.
avt := avatar.CacheServer()
r.Get("/avatar/:hash", avt.ServeHTTP)
avatarCacheServer := avatar.NewCacheServer()
r.Get("/avatar/:hash", avatarCacheServer.Handler)
// Websocket
r.Any("/ws", hs.streamManager.Serve)

View File

@@ -24,6 +24,7 @@ import (
"github.com/grafana/grafana/pkg/log"
"github.com/grafana/grafana/pkg/setting"
"gopkg.in/macaron.v1"
)
var gravatarSource string
@@ -89,12 +90,12 @@ func (this *Avatar) Update() (err error) {
return err
}
type service struct {
type CacheServer struct {
notFound *Avatar
cache map[string]*Avatar
}
func (this *service) mustInt(r *http.Request, defaultValue int, keys ...string) (v int) {
func (this *CacheServer) mustInt(r *http.Request, defaultValue int, keys ...string) (v int) {
for _, k := range keys {
if _, err := fmt.Sscanf(r.FormValue(k), "%d", &v); err == nil {
defaultValue = v
@@ -103,8 +104,8 @@ func (this *service) mustInt(r *http.Request, defaultValue int, keys ...string)
return defaultValue
}
func (this *service) ServeHTTP(w http.ResponseWriter, r *http.Request) {
urlPath := r.URL.Path
func (this *CacheServer) Handler(ctx *macaron.Context) {
urlPath := ctx.Req.URL.Path
hash := urlPath[strings.LastIndex(urlPath, "/")+1:]
var avatar *Avatar
@@ -126,20 +127,24 @@ func (this *service) ServeHTTP(w http.ResponseWriter, r *http.Request) {
this.cache[hash] = avatar
}
w.Header().Set("Content-Type", "image/jpeg")
w.Header().Set("Content-Length", strconv.Itoa(len(avatar.data.Bytes())))
w.Header().Set("Cache-Control", "private, max-age=3600")
ctx.Resp.Header().Add("Content-Type", "image/jpeg")
if err := avatar.Encode(w); err != nil {
if !setting.EnableGzip {
ctx.Resp.Header().Add("Content-Length", strconv.Itoa(len(avatar.data.Bytes())))
}
ctx.Resp.Header().Add("Cache-Control", "private, max-age=3600")
if err := avatar.Encode(ctx.Resp); err != nil {
log.Warn("avatar encode error: %v", err)
w.WriteHeader(500)
ctx.WriteHeader(500)
}
}
func CacheServer() http.Handler {
func NewCacheServer() *CacheServer {
UpdateGravatarSource()
return &service{
return &CacheServer{
notFound: newNotFound(),
cache: make(map[string]*Avatar),
}

View File

@@ -33,6 +33,7 @@ func GetDataSources(c *middleware.Context) Response {
BasicAuth: ds.BasicAuth,
IsDefault: ds.IsDefault,
JsonData: ds.JsonData,
ReadOnly: ds.ReadOnly,
}
if plugin, exists := plugins.DataSources[ds.Type]; exists {
@@ -68,59 +69,70 @@ func GetDataSourceById(c *middleware.Context) Response {
return Json(200, &dtos)
}
func DeleteDataSourceById(c *middleware.Context) {
func DeleteDataSourceById(c *middleware.Context) Response {
id := c.ParamsInt64(":id")
if id <= 0 {
c.JsonApiErr(400, "Missing valid datasource id", nil)
return
return ApiError(400, "Missing valid datasource id", nil)
}
ds, err := getRawDataSourceById(id, c.OrgId)
if err != nil {
return ApiError(400, "Failed to delete datasource", nil)
}
if ds.ReadOnly {
return ApiError(403, "Cannot delete read-only data source", nil)
}
cmd := &m.DeleteDataSourceByIdCommand{Id: id, OrgId: c.OrgId}
err := bus.Dispatch(cmd)
err = bus.Dispatch(cmd)
if err != nil {
c.JsonApiErr(500, "Failed to delete datasource", err)
return
return ApiError(500, "Failed to delete datasource", err)
}
c.JsonOK("Data source deleted")
return ApiSuccess("Data source deleted")
}
func DeleteDataSourceByName(c *middleware.Context) {
func DeleteDataSourceByName(c *middleware.Context) Response {
name := c.Params(":name")
if name == "" {
c.JsonApiErr(400, "Missing valid datasource name", nil)
return
return ApiError(400, "Missing valid datasource name", nil)
}
getCmd := &m.GetDataSourceByNameQuery{Name: name, OrgId: c.OrgId}
if err := bus.Dispatch(getCmd); err != nil {
return ApiError(500, "Failed to delete datasource", err)
}
if getCmd.Result.ReadOnly {
return ApiError(403, "Cannot delete read-only data source", nil)
}
cmd := &m.DeleteDataSourceByNameCommand{Name: name, OrgId: c.OrgId}
err := bus.Dispatch(cmd)
if err != nil {
c.JsonApiErr(500, "Failed to delete datasource", err)
return
return ApiError(500, "Failed to delete datasource", err)
}
c.JsonOK("Data source deleted")
return ApiSuccess("Data source deleted")
}
func AddDataSource(c *middleware.Context, cmd m.AddDataSourceCommand) {
func AddDataSource(c *middleware.Context, cmd m.AddDataSourceCommand) Response {
cmd.OrgId = c.OrgId
if err := bus.Dispatch(&cmd); err != nil {
if err == m.ErrDataSourceNameExists {
c.JsonApiErr(409, err.Error(), err)
return
return ApiError(409, err.Error(), err)
}
c.JsonApiErr(500, "Failed to add datasource", err)
return
return ApiError(500, "Failed to add datasource", err)
}
ds := convertModelToDtos(cmd.Result)
c.JSON(200, util.DynMap{
return Json(200, util.DynMap{
"message": "Datasource added",
"id": cmd.Result.Id,
"name": cmd.Result.Name,
@@ -160,11 +172,14 @@ func fillWithSecureJsonData(cmd *m.UpdateDataSourceCommand) error {
}
ds, err := getRawDataSourceById(cmd.Id, cmd.OrgId)
if err != nil {
return err
}
if ds.ReadOnly {
return m.ErrDatasourceIsReadOnly
}
secureJsonData := ds.SecureJsonData.Decrypt()
for k, v := range secureJsonData {
@@ -201,6 +216,7 @@ func GetDataSourceByName(c *middleware.Context) Response {
}
dtos := convertModelToDtos(query.Result)
dtos.ReadOnly = true
return Json(200, &dtos)
}
@@ -242,6 +258,7 @@ func convertModelToDtos(ds *m.DataSource) dtos.DataSource {
JsonData: ds.JsonData,
SecureJsonFields: map[string]bool{},
Version: ds.Version,
ReadOnly: ds.ReadOnly,
}
for k, v := range ds.SecureJsonData {

View File

@@ -26,6 +26,7 @@ type DataSource struct {
JsonData *simplejson.Json `json:"jsonData,omitempty"`
SecureJsonFields map[string]bool `json:"secureJsonFields"`
Version int `json:"version"`
ReadOnly bool `json:"readOnly"`
}
type DataSourceListItemDTO struct {
@@ -42,6 +43,7 @@ type DataSourceListItemDTO struct {
BasicAuth bool `json:"basicAuth"`
IsDefault bool `json:"isDefault"`
JsonData *simplejson.Json `json:"jsonData,omitempty"`
ReadOnly bool `json:"readOnly"`
}
type DataSourceList []DataSourceListItemDTO

View File

@@ -146,12 +146,13 @@ func (hs *HttpServer) newMacaron() *macaron.Macaron {
m := macaron.New()
m.Use(middleware.Logger())
m.Use(middleware.Recovery())
if setting.EnableGzip {
m.Use(middleware.Gziper())
}
m.Use(middleware.Recovery())
for _, route := range plugins.StaticRoutes {
pluginRoute := path.Join("/public/plugins/", route.PluginId)
hs.log.Debug("Plugins: Adding route", "route", pluginRoute, "dir", route.Directory)

View File

@@ -81,8 +81,6 @@ func (rr *routeRegister) Register(router Router) *macaron.Router {
}
func (rr *routeRegister) route(pattern, method string, handlers ...macaron.Handler) {
//inject tracing
h := make([]macaron.Handler, 0)
for _, fn := range rr.namedMiddleware {
h = append(h, fn(pattern))

View File

@@ -16,7 +16,6 @@ import (
"github.com/grafana/grafana/pkg/metrics"
"github.com/grafana/grafana/pkg/models"
"github.com/grafana/grafana/pkg/services/sqlstore"
"github.com/grafana/grafana/pkg/setting"
_ "github.com/grafana/grafana/pkg/services/alerting/conditions"
@@ -88,11 +87,6 @@ func main() {
server.Start()
}
func initSql() {
sqlstore.NewEngine()
sqlstore.EnsureAdminUser()
}
func listenToSystemSignals(server models.GrafanaServer) {
signalChan := make(chan os.Signal, 1)
ignoreChan := make(chan os.Signal, 1)

View File

@@ -9,6 +9,9 @@ import (
"strconv"
"time"
"github.com/grafana/grafana/pkg/cmd/grafana-cli/logger"
"github.com/grafana/grafana/pkg/services/provisioning"
"golang.org/x/sync/errgroup"
"github.com/grafana/grafana/pkg/api"
@@ -21,7 +24,9 @@ import (
"github.com/grafana/grafana/pkg/services/cleanup"
"github.com/grafana/grafana/pkg/services/notifications"
"github.com/grafana/grafana/pkg/services/search"
"github.com/grafana/grafana/pkg/services/sqlstore"
"github.com/grafana/grafana/pkg/setting"
"github.com/grafana/grafana/pkg/social"
"github.com/grafana/grafana/pkg/tracing"
)
@@ -54,12 +59,19 @@ func (g *GrafanaServerImpl) Start() {
g.writePIDFile()
initSql()
metrics.Init(setting.Cfg)
search.Init()
login.Init()
social.NewOAuthService()
plugins.Init()
if err := provisioning.StartUp(setting.DatasourcesPath); err != nil {
logger.Error("Failed to provision Grafana from config", "error", err)
g.Shutdown(1, "Startup failed")
return
}
closer, err := tracing.Init(setting.Cfg)
if err != nil {
g.log.Error("Tracing settings is not valid", "error", err)
@@ -87,6 +99,11 @@ func (g *GrafanaServerImpl) Start() {
g.startHttpServer()
}
func initSql() {
sqlstore.NewEngine()
sqlstore.EnsureAdminUser()
}
func (g *GrafanaServerImpl) initLogging() {
err := setting.NewConfigContext(&setting.CommandLineArgs{
Config: *configFile,

View File

@@ -363,6 +363,7 @@ type scenarioContext struct {
respJson map[string]interface{}
handlerFunc handlerFunc
defaultHandler macaron.Handler
url string
req *http.Request
}

View File

@@ -123,23 +123,22 @@ func Recovery() macaron.Handler {
c.Data["ErrorMsg"] = string(stack)
}
c.HTML(500, "500")
ctx, ok := c.Data["ctx"].(*Context)
// // Lookup the current responsewriter
// val := c.GetVal(inject.InterfaceOf((*http.ResponseWriter)(nil)))
// res := val.Interface().(http.ResponseWriter)
//
// // respond with panic message while in development mode
// var body []byte
// if setting.Env == setting.DEV {
// res.Header().Set("Content-Type", "text/html")
// body = []byte(fmt.Sprintf(panicHtml, err, err, stack))
// }
//
// res.WriteHeader(http.StatusInternalServerError)
// if nil != body {
// res.Write(body)
// }
if ok && ctx.IsApiRequest() {
resp := make(map[string]interface{})
resp["message"] = "Internal Server Error - Check the Grafana server logs for the detailed error message."
if c.Data["ErrorMsg"] != nil {
resp["error"] = fmt.Sprintf("%v - %v", c.Data["Title"], c.Data["ErrorMsg"])
} else {
resp["error"] = c.Data["Title"]
}
c.JSON(500, resp)
} else {
c.HTML(500, "500")
}
}
}()

View File

@@ -0,0 +1,79 @@
package middleware
import (
"path/filepath"
"testing"
"github.com/go-macaron/session"
"github.com/grafana/grafana/pkg/bus"
. "github.com/smartystreets/goconvey/convey"
"gopkg.in/macaron.v1"
)
func TestRecoveryMiddleware(t *testing.T) {
Convey("Given an api route that panics", t, func() {
apiUrl := "/api/whatever"
recoveryScenario("recovery middleware should return json", apiUrl, func(sc *scenarioContext) {
sc.handlerFunc = PanicHandler
sc.fakeReq("GET", apiUrl).exec()
sc.req.Header.Add("content-type", "application/json")
So(sc.resp.Code, ShouldEqual, 500)
So(sc.respJson["message"], ShouldStartWith, "Internal Server Error - Check the Grafana server logs for the detailed error message.")
So(sc.respJson["error"], ShouldStartWith, "Server Error")
})
})
Convey("Given a non-api route that panics", t, func() {
apiUrl := "/whatever"
recoveryScenario("recovery middleware should return html", apiUrl, func(sc *scenarioContext) {
sc.handlerFunc = PanicHandler
sc.fakeReq("GET", apiUrl).exec()
So(sc.resp.Code, ShouldEqual, 500)
So(sc.resp.Header().Get("content-type"), ShouldEqual, "text/html; charset=UTF-8")
So(sc.resp.Body.String(), ShouldContainSubstring, "<title>Grafana - Error</title>")
})
})
}
func PanicHandler(c *Context) {
panic("Handler has panicked")
}
func recoveryScenario(desc string, url string, fn scenarioFunc) {
Convey(desc, func() {
defer bus.ClearBusHandlers()
sc := &scenarioContext{
url: url,
}
viewsPath, _ := filepath.Abs("../../public/views")
sc.m = macaron.New()
sc.m.Use(Recovery())
sc.m.Use(macaron.Renderer(macaron.RenderOptions{
Directory: viewsPath,
Delims: macaron.Delims{Left: "[[", Right: "]]"},
}))
sc.m.Use(GetContextHandler())
// mock out gc goroutine
startSessionGC = func() {}
sc.m.Use(Sessioner(&session.Options{}))
sc.m.Use(OrgRedirect())
sc.m.Use(AddDefaultResponseHeaders())
sc.defaultHandler = func(c *Context) {
sc.context = c
if sc.handlerFunc != nil {
sc.handlerFunc(sc.context)
}
}
sc.m.Get(url, sc.defaultHandler)
fn(sc)
})
}

View File

@@ -69,3 +69,10 @@ type GetDashboardVersionsQuery struct {
Result []*DashboardVersionDTO
}
//
// Commands
//
type DeleteExpiredVersionsCommand struct {
}

View File

@@ -27,6 +27,7 @@ var (
ErrDataSourceNotFound = errors.New("Data source not found")
ErrDataSourceNameExists = errors.New("Data source with same name already exists")
ErrDataSourceUpdatingOldVersion = errors.New("Trying to update old version of datasource")
ErrDatasourceIsReadOnly = errors.New("Data source is readonly. Can only be updated from configuration.")
)
type DsAccess string
@@ -50,6 +51,7 @@ type DataSource struct {
IsDefault bool
JsonData *simplejson.Json
SecureJsonData securejsondata.SecureJsonData
ReadOnly bool
Created time.Time
Updated time.Time
@@ -109,6 +111,7 @@ type AddDataSourceCommand struct {
IsDefault bool `json:"isDefault"`
JsonData *simplejson.Json `json:"jsonData"`
SecureJsonData map[string]string `json:"secureJsonData"`
ReadOnly bool `json:"readOnly"`
OrgId int64 `json:"-"`
@@ -132,6 +135,7 @@ type UpdateDataSourceCommand struct {
JsonData *simplejson.Json `json:"jsonData"`
SecureJsonData map[string]string `json:"secureJsonData"`
Version int `json:"version"`
ReadOnly bool `json:"readOnly"`
OrgId int64 `json:"-"`
Id int64 `json:"-"`
@@ -142,11 +146,15 @@ type UpdateDataSourceCommand struct {
type DeleteDataSourceByIdCommand struct {
Id int64
OrgId int64
DeletedDatasourcesCount int64
}
type DeleteDataSourceByNameCommand struct {
Name string
OrgId int64
DeletedDatasourcesCount int64
}
// ---------------------
@@ -157,6 +165,10 @@ type GetDataSourcesQuery struct {
Result []*DataSource
}
type GetAllDataSourcesQuery struct {
Result []*DataSource
}
type GetDataSourceByIdQuery struct {
Id int64
OrgId int64

View File

@@ -0,0 +1,122 @@
package notifiers
import (
"encoding/json"
"github.com/grafana/grafana/pkg/bus"
"github.com/grafana/grafana/pkg/log"
m "github.com/grafana/grafana/pkg/models"
"github.com/grafana/grafana/pkg/services/alerting"
)
func init() {
alerting.RegisterNotifier(&alerting.NotifierPlugin{
Type: "teams",
Name: "Microsoft Teams",
Description: "Sends notifications using Incomming Webhook connector to Microsoft Teams",
Factory: NewTeamsNotifier,
OptionsTemplate: `
<h3 class="page-heading">Teams settings</h3>
<div class="gf-form max-width-30">
<span class="gf-form-label width-6">Url</span>
<input type="text" required class="gf-form-input max-width-30" ng-model="ctrl.model.settings.url" placeholder="Teams incoming webhook url"></input>
</div>
`,
})
}
func NewTeamsNotifier(model *m.AlertNotification) (alerting.Notifier, error) {
url := model.Settings.Get("url").MustString()
if url == "" {
return nil, alerting.ValidationError{Reason: "Could not find url property in settings"}
}
return &TeamsNotifier{
NotifierBase: NewNotifierBase(model.Id, model.IsDefault, model.Name, model.Type, model.Settings),
Url: url,
log: log.New("alerting.notifier.teams"),
}, nil
}
type TeamsNotifier struct {
NotifierBase
Url string
Recipient string
Mention string
log log.Logger
}
func (this *TeamsNotifier) Notify(evalContext *alerting.EvalContext) error {
this.log.Info("Executing teams notification", "ruleId", evalContext.Rule.Id, "notification", this.Name)
ruleUrl, err := evalContext.GetRuleUrl()
if err != nil {
this.log.Error("Failed get rule link", "error", err)
return err
}
fields := make([]map[string]interface{}, 0)
fieldLimitCount := 4
for index, evt := range evalContext.EvalMatches {
fields = append(fields, map[string]interface{}{
"name": evt.Metric,
"value": evt.Value,
})
if index > fieldLimitCount {
break
}
}
if evalContext.Error != nil {
fields = append(fields, map[string]interface{}{
"name": "Error message",
"value": evalContext.Error.Error(),
})
}
message := this.Mention
if evalContext.Rule.State != m.AlertStateOK { //dont add message when going back to alert state ok.
message += " " + evalContext.Rule.Message
}
body := map[string]interface{}{
"@type": "MessageCard",
"@context": "http://schema.org/extensions",
"summary": message,
"title": evalContext.GetNotificationTitle(),
"themeColor": evalContext.GetStateModel().Color,
"sections": []map[string]interface{}{
{
"title": "Details",
"facts": fields,
"images": []map[string]interface{}{
{
"image": evalContext.ImagePublicUrl,
},
},
"text": message,
"potentialAction": []map[string]interface{}{
{
"@context": "http://schema.org",
"@type": "ViewAction",
"name": "View Rule",
"target": []string{
ruleUrl,
},
},
},
},
},
}
data, _ := json.Marshal(&body)
cmd := &m.SendWebhookSync{Url: this.Url, Body: string(data)}
if err := bus.DispatchCtx(evalContext.Ctx, cmd); err != nil {
this.log.Error("Failed to send teams notification", "error", err, "webhook", this.Name)
return err
}
return nil
}

View File

@@ -0,0 +1,75 @@
package notifiers
import (
"testing"
"github.com/grafana/grafana/pkg/components/simplejson"
m "github.com/grafana/grafana/pkg/models"
. "github.com/smartystreets/goconvey/convey"
)
func TestTeamsNotifier(t *testing.T) {
Convey("Teams notifier tests", t, func() {
Convey("Parsing alert notification from settings", func() {
Convey("empty settings should return error", func() {
json := `{ }`
settingsJSON, _ := simplejson.NewJson([]byte(json))
model := &m.AlertNotification{
Name: "ops",
Type: "teams",
Settings: settingsJSON,
}
_, err := NewTeamsNotifier(model)
So(err, ShouldNotBeNil)
})
Convey("from settings", func() {
json := `
{
"url": "http://google.com"
}`
settingsJSON, _ := simplejson.NewJson([]byte(json))
model := &m.AlertNotification{
Name: "ops",
Type: "teams",
Settings: settingsJSON,
}
not, err := NewTeamsNotifier(model)
teamsNotifier := not.(*TeamsNotifier)
So(err, ShouldBeNil)
So(teamsNotifier.Name, ShouldEqual, "ops")
So(teamsNotifier.Type, ShouldEqual, "teams")
So(teamsNotifier.Url, ShouldEqual, "http://google.com")
})
Convey("from settings with Recipient and Mention", func() {
json := `
{
"url": "http://google.com"
}`
settingsJSON, _ := simplejson.NewJson([]byte(json))
model := &m.AlertNotification{
Name: "ops",
Type: "teams",
Settings: settingsJSON,
}
not, err := NewTeamsNotifier(model)
teamsNotifier := not.(*TeamsNotifier)
So(err, ShouldBeNil)
So(teamsNotifier.Name, ShouldEqual, "ops")
So(teamsNotifier.Type, ShouldEqual, "teams")
So(teamsNotifier.Url, ShouldEqual, "http://google.com")
})
})
})
}

View File

@@ -39,12 +39,13 @@ func (service *CleanUpService) Run(ctx context.Context) error {
func (service *CleanUpService) start(ctx context.Context) error {
service.cleanUpTmpFiles()
ticker := time.NewTicker(time.Hour * 1)
ticker := time.NewTicker(time.Minute * 10)
for {
select {
case <-ticker.C:
service.cleanUpTmpFiles()
service.deleteExpiredSnapshots()
service.deleteExpiredDashboardVersions()
case <-ctx.Done():
return ctx.Err()
}
@@ -83,3 +84,7 @@ func (service *CleanUpService) cleanUpTmpFiles() {
func (service *CleanUpService) deleteExpiredSnapshots() {
bus.Dispatch(&m.DeleteExpiredSnapshotsCommand{})
}
func (service *CleanUpService) deleteExpiredDashboardVersions() {
bus.Dispatch(&m.DeleteExpiredVersionsCommand{})
}

View File

@@ -0,0 +1,148 @@
package datasources
import (
"errors"
"io/ioutil"
"path/filepath"
"strings"
"github.com/grafana/grafana/pkg/bus"
"github.com/grafana/grafana/pkg/log"
"github.com/grafana/grafana/pkg/models"
yaml "gopkg.in/yaml.v2"
)
var (
ErrInvalidConfigToManyDefault = errors.New("datasource.yaml config is invalid. Only one datasource can be marked as default")
)
func Provision(configDirectory string) error {
dc := newDatasourceProvisioner(log.New("provisioning.datasources"))
return dc.applyChanges(configDirectory)
}
type DatasourceProvisioner struct {
log log.Logger
cfgProvider configReader
}
func newDatasourceProvisioner(log log.Logger) DatasourceProvisioner {
return DatasourceProvisioner{
log: log,
cfgProvider: configReader{},
}
}
func (dc *DatasourceProvisioner) apply(cfg *DatasourcesAsConfig) error {
if err := dc.deleteDatasources(cfg.DeleteDatasources); err != nil {
return err
}
for _, ds := range cfg.Datasources {
cmd := &models.GetDataSourceByNameQuery{OrgId: ds.OrgId, Name: ds.Name}
err := bus.Dispatch(cmd)
if err != nil && err != models.ErrDataSourceNotFound {
return err
}
if err == models.ErrDataSourceNotFound {
dc.log.Info("inserting datasource from configuration ", "name", ds.Name)
insertCmd := createInsertCommand(ds)
if err := bus.Dispatch(insertCmd); err != nil {
return err
}
} else {
dc.log.Debug("updating datasource from configuration", "name", ds.Name)
updateCmd := createUpdateCommand(ds, cmd.Result.Id)
if err := bus.Dispatch(updateCmd); err != nil {
return err
}
}
}
return nil
}
func (dc *DatasourceProvisioner) applyChanges(configPath string) error {
configs, err := dc.cfgProvider.readConfig(configPath)
if err != nil {
return err
}
for _, cfg := range configs {
if err := dc.apply(cfg); err != nil {
return err
}
}
return nil
}
func (dc *DatasourceProvisioner) deleteDatasources(dsToDelete []*DeleteDatasourceConfig) error {
for _, ds := range dsToDelete {
cmd := &models.DeleteDataSourceByNameCommand{OrgId: ds.OrgId, Name: ds.Name}
if err := bus.Dispatch(cmd); err != nil {
return err
}
if cmd.DeletedDatasourcesCount > 0 {
dc.log.Info("deleted datasource based on configuration", "name", ds.Name)
}
}
return nil
}
type configReader struct{}
func (configReader) readConfig(path string) ([]*DatasourcesAsConfig, error) {
files, err := ioutil.ReadDir(path)
if err != nil {
return nil, err
}
var datasources []*DatasourcesAsConfig
for _, file := range files {
if strings.HasSuffix(file.Name(), ".yaml") || strings.HasSuffix(file.Name(), ".yml") {
filename, _ := filepath.Abs(filepath.Join(path, file.Name()))
yamlFile, err := ioutil.ReadFile(filename)
if err != nil {
return nil, err
}
var datasource *DatasourcesAsConfig
err = yaml.Unmarshal(yamlFile, &datasource)
if err != nil {
return nil, err
}
datasources = append(datasources, datasource)
}
}
defaultCount := 0
for _, cfg := range datasources {
for _, ds := range cfg.Datasources {
if ds.OrgId == 0 {
ds.OrgId = 1
}
if ds.IsDefault {
defaultCount++
if defaultCount > 1 {
return nil, ErrInvalidConfigToManyDefault
}
}
}
for _, ds := range cfg.DeleteDatasources {
if ds.OrgId == 0 {
ds.OrgId = 1
}
}
}
return datasources, nil
}

View File

@@ -0,0 +1,202 @@
package datasources
import (
"testing"
"github.com/grafana/grafana/pkg/bus"
"github.com/grafana/grafana/pkg/log"
"github.com/grafana/grafana/pkg/models"
. "github.com/smartystreets/goconvey/convey"
)
var (
logger log.Logger = log.New("fake.logger")
oneDatasourcesConfig string = ""
twoDatasourcesConfig string = "./test-configs/two-datasources"
twoDatasourcesConfigPurgeOthers string = "./test-configs/insert-two-delete-two"
doubleDatasourcesConfig string = "./test-configs/double-default"
allProperties string = "./test-configs/all-properties"
brokenYaml string = "./test-configs/broken-yaml"
fakeRepo *fakeRepository
)
func TestDatasourceAsConfig(t *testing.T) {
Convey("Testing datasource as configuration", t, func() {
fakeRepo = &fakeRepository{}
bus.ClearBusHandlers()
bus.AddHandler("test", mockDelete)
bus.AddHandler("test", mockInsert)
bus.AddHandler("test", mockUpdate)
bus.AddHandler("test", mockGet)
bus.AddHandler("test", mockGetAll)
Convey("One configured datasource", func() {
Convey("no datasource in database", func() {
dc := newDatasourceProvisioner(logger)
err := dc.applyChanges(twoDatasourcesConfig)
if err != nil {
t.Fatalf("applyChanges return an error %v", err)
}
So(len(fakeRepo.deleted), ShouldEqual, 0)
So(len(fakeRepo.inserted), ShouldEqual, 2)
So(len(fakeRepo.updated), ShouldEqual, 0)
})
Convey("One datasource in database with same name", func() {
fakeRepo.loadAll = []*models.DataSource{
{Name: "Graphite", OrgId: 1, Id: 1},
}
Convey("should update one datasource", func() {
dc := newDatasourceProvisioner(logger)
err := dc.applyChanges(twoDatasourcesConfig)
if err != nil {
t.Fatalf("applyChanges return an error %v", err)
}
So(len(fakeRepo.deleted), ShouldEqual, 0)
So(len(fakeRepo.inserted), ShouldEqual, 1)
So(len(fakeRepo.updated), ShouldEqual, 1)
})
})
Convey("Two datasources with is_default", func() {
dc := newDatasourceProvisioner(logger)
err := dc.applyChanges(doubleDatasourcesConfig)
Convey("should raise error", func() {
So(err, ShouldEqual, ErrInvalidConfigToManyDefault)
})
})
})
Convey("Two configured datasource and purge others ", func() {
Convey("two other datasources in database", func() {
fakeRepo.loadAll = []*models.DataSource{
{Name: "old-graphite", OrgId: 1, Id: 1},
{Name: "old-graphite2", OrgId: 1, Id: 2},
}
Convey("should have two new datasources", func() {
dc := newDatasourceProvisioner(logger)
err := dc.applyChanges(twoDatasourcesConfigPurgeOthers)
if err != nil {
t.Fatalf("applyChanges return an error %v", err)
}
So(len(fakeRepo.deleted), ShouldEqual, 2)
So(len(fakeRepo.inserted), ShouldEqual, 2)
So(len(fakeRepo.updated), ShouldEqual, 0)
})
})
})
Convey("Two configured datasource and purge others = false", func() {
Convey("two other datasources in database", func() {
fakeRepo.loadAll = []*models.DataSource{
{Name: "Graphite", OrgId: 1, Id: 1},
{Name: "old-graphite2", OrgId: 1, Id: 2},
}
Convey("should have two new datasources", func() {
dc := newDatasourceProvisioner(logger)
err := dc.applyChanges(twoDatasourcesConfig)
if err != nil {
t.Fatalf("applyChanges return an error %v", err)
}
So(len(fakeRepo.deleted), ShouldEqual, 0)
So(len(fakeRepo.inserted), ShouldEqual, 1)
So(len(fakeRepo.updated), ShouldEqual, 1)
})
})
})
Convey("broken yaml should return error", func() {
_, err := configReader{}.readConfig(brokenYaml)
So(err, ShouldNotBeNil)
})
Convey("can read all properties", func() {
cfgProvifer := configReader{}
cfg, err := cfgProvifer.readConfig(allProperties)
if err != nil {
t.Fatalf("readConfig return an error %v", err)
}
So(len(cfg), ShouldEqual, 2)
dsCfg := cfg[0]
ds := dsCfg.Datasources[0]
So(ds.Name, ShouldEqual, "name")
So(ds.Type, ShouldEqual, "type")
So(ds.Access, ShouldEqual, models.DS_ACCESS_PROXY)
So(ds.OrgId, ShouldEqual, 2)
So(ds.Url, ShouldEqual, "url")
So(ds.User, ShouldEqual, "user")
So(ds.Password, ShouldEqual, "password")
So(ds.Database, ShouldEqual, "database")
So(ds.BasicAuth, ShouldBeTrue)
So(ds.BasicAuthUser, ShouldEqual, "basic_auth_user")
So(ds.BasicAuthPassword, ShouldEqual, "basic_auth_password")
So(ds.WithCredentials, ShouldBeTrue)
So(ds.IsDefault, ShouldBeTrue)
So(ds.Editable, ShouldBeTrue)
So(len(ds.JsonData), ShouldBeGreaterThan, 2)
So(ds.JsonData["graphiteVersion"], ShouldEqual, "1.1")
So(ds.JsonData["tlsAuth"], ShouldEqual, true)
So(ds.JsonData["tlsAuthWithCACert"], ShouldEqual, true)
So(len(ds.SecureJsonData), ShouldBeGreaterThan, 2)
So(ds.SecureJsonData["tlsCACert"], ShouldEqual, "MjNOcW9RdkbUDHZmpco2HCYzVq9dE+i6Yi+gmUJotq5CDA==")
So(ds.SecureJsonData["tlsClientCert"], ShouldEqual, "ckN0dGlyMXN503YNfjTcf9CV+GGQneN+xmAclQ==")
So(ds.SecureJsonData["tlsClientKey"], ShouldEqual, "ZkN4aG1aNkja/gKAB1wlnKFIsy2SRDq4slrM0A==")
dstwo := cfg[1].Datasources[0]
So(dstwo.Name, ShouldEqual, "name2")
})
})
}
type fakeRepository struct {
inserted []*models.AddDataSourceCommand
deleted []*models.DeleteDataSourceByNameCommand
updated []*models.UpdateDataSourceCommand
loadAll []*models.DataSource
}
func mockDelete(cmd *models.DeleteDataSourceByNameCommand) error {
fakeRepo.deleted = append(fakeRepo.deleted, cmd)
return nil
}
func mockUpdate(cmd *models.UpdateDataSourceCommand) error {
fakeRepo.updated = append(fakeRepo.updated, cmd)
return nil
}
func mockInsert(cmd *models.AddDataSourceCommand) error {
fakeRepo.inserted = append(fakeRepo.inserted, cmd)
return nil
}
func mockGetAll(cmd *models.GetAllDataSourcesQuery) error {
cmd.Result = fakeRepo.loadAll
return nil
}
func mockGet(cmd *models.GetDataSourceByNameQuery) error {
for _, v := range fakeRepo.loadAll {
if cmd.Name == v.Name && cmd.OrgId == v.OrgId {
cmd.Result = v
return nil
}
}
return models.ErrDataSourceNotFound
}

View File

@@ -0,0 +1,23 @@
datasources:
- name: name
type: type
access: proxy
org_id: 2
url: url
password: password
user: user
database: database
basic_auth: true
basic_auth_user: basic_auth_user
basic_auth_password: basic_auth_password
with_credentials: true
is_default: true
json_data:
graphiteVersion: "1.1"
tlsAuth: true
tlsAuthWithCACert: true
secure_json_data:
tlsCACert: "MjNOcW9RdkbUDHZmpco2HCYzVq9dE+i6Yi+gmUJotq5CDA=="
tlsClientCert: "ckN0dGlyMXN503YNfjTcf9CV+GGQneN+xmAclQ=="
tlsClientKey: "ZkN4aG1aNkja/gKAB1wlnKFIsy2SRDq4slrM0A=="
editable: true

View File

@@ -0,0 +1,7 @@
purge_other_datasources: true
datasources:
- name: name2
type: type2
access: proxy
org_id: 2
url: url2

View File

@@ -0,0 +1,6 @@
#sfxzgnsxzcvnbzcvn
cvbn
cvbn
c
vbn
cvbncvbn

View File

@@ -0,0 +1,7 @@
datasources:
- name: Graphite
type: graphite
access: proxy
url: http://localhost:8080
is_default: true

View File

@@ -0,0 +1,7 @@
datasources:
- name: Graphite
type: graphite
access: proxy
url: http://localhost:8080
is_default: true

View File

@@ -0,0 +1,7 @@
datasources:
- name: Prometheus
type: prometheus
access: proxy
url: http://localhost:9090
delete_datasources:
- name: old-graphite

View File

@@ -0,0 +1,7 @@
datasources:
- name: Graphite
type: graphite
access: proxy
url: http://localhost:8080
delete_datasources:
- name: old-graphite3

View File

@@ -0,0 +1,9 @@
datasources:
- name: Graphite
type: graphite
access: proxy
url: http://localhost:8080
- name: Prometheus
type: prometheus
access: proxy
url: http://localhost:9090

View File

@@ -0,0 +1,92 @@
package datasources
import "github.com/grafana/grafana/pkg/models"
import "github.com/grafana/grafana/pkg/components/simplejson"
type DatasourcesAsConfig struct {
Datasources []*DataSourceFromConfig `json:"datasources" yaml:"datasources"`
DeleteDatasources []*DeleteDatasourceConfig `json:"delete_datasources" yaml:"delete_datasources"`
}
type DeleteDatasourceConfig struct {
OrgId int64 `json:"org_id" yaml:"org_id"`
Name string `json:"name" yaml:"name"`
}
type DataSourceFromConfig struct {
OrgId int64 `json:"org_id" yaml:"org_id"`
Version int `json:"version" yaml:"version"`
Name string `json:"name" yaml:"name"`
Type string `json:"type" yaml:"type"`
Access string `json:"access" yaml:"access"`
Url string `json:"url" yaml:"url"`
Password string `json:"password" yaml:"password"`
User string `json:"user" yaml:"user"`
Database string `json:"database" yaml:"database"`
BasicAuth bool `json:"basic_auth" yaml:"basic_auth"`
BasicAuthUser string `json:"basic_auth_user" yaml:"basic_auth_user"`
BasicAuthPassword string `json:"basic_auth_password" yaml:"basic_auth_password"`
WithCredentials bool `json:"with_credentials" yaml:"with_credentials"`
IsDefault bool `json:"is_default" yaml:"is_default"`
JsonData map[string]interface{} `json:"json_data" yaml:"json_data"`
SecureJsonData map[string]string `json:"secure_json_data" yaml:"secure_json_data"`
Editable bool `json:"editable" yaml:"editable"`
}
func createInsertCommand(ds *DataSourceFromConfig) *models.AddDataSourceCommand {
jsonData := simplejson.New()
if len(ds.JsonData) > 0 {
for k, v := range ds.JsonData {
jsonData.Set(k, v)
}
}
return &models.AddDataSourceCommand{
OrgId: ds.OrgId,
Name: ds.Name,
Type: ds.Type,
Access: models.DsAccess(ds.Access),
Url: ds.Url,
Password: ds.Password,
User: ds.User,
Database: ds.Database,
BasicAuth: ds.BasicAuth,
BasicAuthUser: ds.BasicAuthUser,
BasicAuthPassword: ds.BasicAuthPassword,
WithCredentials: ds.WithCredentials,
IsDefault: ds.IsDefault,
JsonData: jsonData,
SecureJsonData: ds.SecureJsonData,
ReadOnly: !ds.Editable,
}
}
func createUpdateCommand(ds *DataSourceFromConfig, id int64) *models.UpdateDataSourceCommand {
jsonData := simplejson.New()
if len(ds.JsonData) > 0 {
for k, v := range ds.JsonData {
jsonData.Set(k, v)
}
}
return &models.UpdateDataSourceCommand{
Id: id,
OrgId: ds.OrgId,
Name: ds.Name,
Type: ds.Type,
Access: models.DsAccess(ds.Access),
Url: ds.Url,
Password: ds.Password,
User: ds.User,
Database: ds.Database,
BasicAuth: ds.BasicAuth,
BasicAuthUser: ds.BasicAuthUser,
BasicAuthPassword: ds.BasicAuthPassword,
WithCredentials: ds.WithCredentials,
IsDefault: ds.IsDefault,
JsonData: jsonData,
SecureJsonData: ds.SecureJsonData,
ReadOnly: !ds.Editable,
}
}

View File

@@ -0,0 +1,14 @@
package provisioning
import (
"github.com/grafana/grafana/pkg/log"
"github.com/grafana/grafana/pkg/services/provisioning/datasources"
)
var (
logger log.Logger = log.New("services.provisioning")
)
func StartUp(datasourcePath string) error {
return datasources.Provision(datasourcePath)
}

View File

@@ -94,7 +94,12 @@ func HandleAlertsQuery(query *m.GetAlertsQuery) error {
if i > 0 {
sql.WriteString(" OR ")
}
sql.WriteString("state = ? ")
if strings.HasPrefix(v, "not_") {
sql.WriteString("state <> ? ")
v = strings.TrimPrefix(v, "not_")
} else {
sql.WriteString("state = ? ")
}
params = append(params, v)
}
sql.WriteString(")")

View File

@@ -37,16 +37,18 @@ func TestAnnotations(t *testing.T) {
repo := SqlAnnotationRepo{}
Convey("Can save annotation", func() {
err := repo.Save(&annotations.Item{
annotation := &annotations.Item{
OrgId: 1,
UserId: 1,
DashboardId: 1,
Text: "hello",
Epoch: 10,
Tags: []string{"outage", "error", "type:outage", "server:server-1"},
})
}
err := repo.Save(annotation)
So(err, ShouldBeNil)
So(annotation.Id, ShouldBeGreaterThan, 0)
Convey("Can query for annotation", func() {
items, err := repo.Find(&annotations.ItemQuery{

View File

@@ -1,13 +1,17 @@
package sqlstore
import (
"strings"
"github.com/grafana/grafana/pkg/bus"
m "github.com/grafana/grafana/pkg/models"
"github.com/grafana/grafana/pkg/setting"
)
func init() {
bus.AddHandler("sql", GetDashboardVersion)
bus.AddHandler("sql", GetDashboardVersions)
bus.AddHandler("sql", DeleteExpiredVersions)
}
// GetDashboardVersion gets the dashboard version for the given dashboard ID and version number.
@@ -62,3 +66,73 @@ func GetDashboardVersions(query *m.GetDashboardVersionsQuery) error {
}
return nil
}
func DeleteExpiredVersions(cmd *m.DeleteExpiredVersionsCommand) error {
return inTransaction(func(sess *DBSession) error {
expiredCount := int64(0)
versions := []DashboardVersionExp{}
versionsToKeep := setting.DashboardVersionsToKeep
if versionsToKeep < 1 {
versionsToKeep = 1
}
err := sess.Table("dashboard_version").
Select("dashboard_version.id, dashboard_version.version, dashboard_version.dashboard_id").
Where(`dashboard_id IN (
SELECT dashboard_id FROM dashboard_version
GROUP BY dashboard_id HAVING COUNT(dashboard_version.id) > ?
)`, versionsToKeep).
Desc("dashboard_version.dashboard_id", "dashboard_version.version").
Find(&versions)
if err != nil {
return err
}
// Keep last versionsToKeep versions and delete other
versionIdsToDelete := getVersionIDsToDelete(versions, versionsToKeep)
if len(versionIdsToDelete) > 0 {
deleteExpiredSql := `DELETE FROM dashboard_version WHERE id IN (?` + strings.Repeat(",?", len(versionIdsToDelete)-1) + `)`
expiredResponse, err := sess.Exec(deleteExpiredSql, versionIdsToDelete...)
if err != nil {
return err
}
expiredCount, _ = expiredResponse.RowsAffected()
sqlog.Debug("Deleted old/expired dashboard versions", "expired", expiredCount)
}
return nil
})
}
// Short version of DashboardVersion for getting expired versions
type DashboardVersionExp struct {
Id int64 `json:"id"`
DashboardId int64 `json:"dashboardId"`
Version int `json:"version"`
}
func getVersionIDsToDelete(versions []DashboardVersionExp, versionsToKeep int) []interface{} {
versionIds := make([]interface{}, 0)
if len(versions) == 0 {
return versionIds
}
currentDashboard := versions[0].DashboardId
count := 0
for _, v := range versions {
if v.DashboardId == currentDashboard {
count++
} else {
count = 1
currentDashboard = v.DashboardId
}
if count > versionsToKeep {
versionIds = append(versionIds, v.Id)
}
}
return versionIds
}

View File

@@ -8,6 +8,7 @@ import (
"github.com/grafana/grafana/pkg/components/simplejson"
m "github.com/grafana/grafana/pkg/models"
"github.com/grafana/grafana/pkg/setting"
)
func updateTestDashboard(dashboard *m.Dashboard, data map[string]interface{}) {
@@ -101,3 +102,44 @@ func TestGetDashboardVersions(t *testing.T) {
})
})
}
func TestDeleteExpiredVersions(t *testing.T) {
Convey("Testing dashboard versions clean up", t, func() {
InitTestDB(t)
versionsToKeep := 5
versionsToWrite := 10
setting.DashboardVersionsToKeep = versionsToKeep
savedDash := insertTestDashboard("test dash 53", 1, "diff-all")
for i := 0; i < versionsToWrite-1; i++ {
updateTestDashboard(savedDash, map[string]interface{}{
"tags": "different-tag",
})
}
Convey("Clean up old dashboard versions", func() {
err := DeleteExpiredVersions(&m.DeleteExpiredVersionsCommand{})
So(err, ShouldBeNil)
query := m.GetDashboardVersionsQuery{DashboardId: savedDash.Id, OrgId: 1}
GetDashboardVersions(&query)
So(len(query.Result), ShouldEqual, versionsToKeep)
// Ensure latest versions were kept
So(query.Result[versionsToKeep-1].Version, ShouldEqual, versionsToWrite-versionsToKeep+1)
So(query.Result[0].Version, ShouldEqual, versionsToWrite)
})
Convey("Don't delete anything if there're no expired versions", func() {
setting.DashboardVersionsToKeep = versionsToWrite
err := DeleteExpiredVersions(&m.DeleteExpiredVersionsCommand{})
So(err, ShouldBeNil)
query := m.GetDashboardVersionsQuery{DashboardId: savedDash.Id, OrgId: 1}
GetDashboardVersions(&query)
So(len(query.Result), ShouldEqual, versionsToWrite)
})
})
}

View File

@@ -13,6 +13,7 @@ import (
func init() {
bus.AddHandler("sql", GetDataSources)
bus.AddHandler("sql", GetAllDataSources)
bus.AddHandler("sql", AddDataSource)
bus.AddHandler("sql", DeleteDataSourceById)
bus.AddHandler("sql", DeleteDataSourceByName)
@@ -54,10 +55,19 @@ func GetDataSources(query *m.GetDataSourcesQuery) error {
return sess.Find(&query.Result)
}
func GetAllDataSources(query *m.GetAllDataSourcesQuery) error {
sess := x.Limit(1000, 0).Asc("name")
query.Result = make([]*m.DataSource, 0)
return sess.Find(&query.Result)
}
func DeleteDataSourceById(cmd *m.DeleteDataSourceByIdCommand) error {
return inTransaction(func(sess *DBSession) error {
var rawSql = "DELETE FROM data_source WHERE id=? and org_id=?"
_, err := sess.Exec(rawSql, cmd.Id, cmd.OrgId)
result, err := sess.Exec(rawSql, cmd.Id, cmd.OrgId)
affected, _ := result.RowsAffected()
cmd.DeletedDatasourcesCount = affected
return err
})
}
@@ -65,7 +75,9 @@ func DeleteDataSourceById(cmd *m.DeleteDataSourceByIdCommand) error {
func DeleteDataSourceByName(cmd *m.DeleteDataSourceByNameCommand) error {
return inTransaction(func(sess *DBSession) error {
var rawSql = "DELETE FROM data_source WHERE name=? and org_id=?"
_, err := sess.Exec(rawSql, cmd.Name, cmd.OrgId)
result, err := sess.Exec(rawSql, cmd.Name, cmd.OrgId)
affected, _ := result.RowsAffected()
cmd.DeletedDatasourcesCount = affected
return err
})
}
@@ -98,6 +110,7 @@ func AddDataSource(cmd *m.AddDataSourceCommand) error {
Created: time.Now(),
Updated: time.Now(),
Version: 1,
ReadOnly: cmd.ReadOnly,
}
if _, err := sess.Insert(ds); err != nil {
@@ -143,12 +156,14 @@ func UpdateDataSource(cmd *m.UpdateDataSourceCommand) error {
JsonData: cmd.JsonData,
SecureJsonData: securejsondata.GetEncryptedJsonData(cmd.SecureJsonData),
Updated: time.Now(),
ReadOnly: cmd.ReadOnly,
Version: cmd.Version + 1,
}
sess.UseBool("is_default")
sess.UseBool("basic_auth")
sess.UseBool("with_credentials")
sess.UseBool("read_only")
var updateSession *xorm.Session
if cmd.Version != 0 {

View File

@@ -47,6 +47,7 @@ func TestDataAccess(t *testing.T) {
Access: m.DS_ACCESS_DIRECT,
Url: "http://test",
Database: "site",
ReadOnly: true,
})
So(err, ShouldBeNil)
@@ -61,6 +62,7 @@ func TestDataAccess(t *testing.T) {
So(ds.OrgId, ShouldEqual, 10)
So(ds.Database, ShouldEqual, "site")
So(ds.ReadOnly, ShouldBeTrue)
})
Convey("Given a datasource", func() {

View File

@@ -126,4 +126,8 @@ func addDataSourceMigration(mg *Migrator) {
Sqlite(setVersionToOneWhereZero).
Postgres(setVersionToOneWhereZero).
Mysql(setVersionToOneWhereZero))
mg.AddMigration("Add read_only data column", NewAddColumnMigration(tableV2, &Column{
Name: "read_only", Type: DB_Bool, Nullable: true,
}))
}

View File

@@ -158,10 +158,14 @@ func getEngine() (*xorm.Engine, error) {
} else {
engine.SetMaxOpenConns(DbCfg.MaxOpenConn)
engine.SetMaxIdleConns(DbCfg.MaxIdleConn)
engine.SetLogger(&xorm.DiscardLogger{})
// engine.SetLogger(NewXormLogger(log.LvlInfo, log.New("sqlstore.xorm")))
// engine.ShowSQL = true
// engine.ShowInfo = true
debugSql := setting.Cfg.Section("database").Key("log_queries").MustBool(false)
if !debugSql {
engine.SetLogger(&xorm.DiscardLogger{})
} else {
engine.SetLogger(NewXormLogger(log.LvlInfo, log.New("sqlstore.xorm")))
engine.ShowSQL(true)
engine.ShowExecTime(true)
}
}
return engine, nil
}
@@ -190,12 +194,12 @@ func LoadConfig() {
DbCfg.Host = sec.Key("host").String()
DbCfg.Name = sec.Key("name").String()
DbCfg.User = sec.Key("user").String()
DbCfg.MaxOpenConn = sec.Key("max_open_conn").MustInt(0)
DbCfg.MaxIdleConn = sec.Key("max_idle_conn").MustInt(0)
if len(DbCfg.Pwd) == 0 {
DbCfg.Pwd = sec.Key("password").String()
}
}
DbCfg.MaxOpenConn = sec.Key("max_open_conn").MustInt(0)
DbCfg.MaxIdleConn = sec.Key("max_idle_conn").MustInt(0)
if DbCfg.Type == "sqlite3" {
UseSQLite3 = true

View File

@@ -50,11 +50,12 @@ var (
BuildStamp int64
// Paths
LogsPath string
HomePath string
DataPath string
PluginsPath string
CustomInitPath = "conf/custom.ini"
LogsPath string
HomePath string
DataPath string
PluginsPath string
DatasourcesPath string
CustomInitPath = "conf/custom.ini"
// Log settings.
LogModes []string
@@ -89,6 +90,9 @@ var (
SnapShotTTLDays int
SnapShotRemoveExpired bool
// Dashboard history
DashboardVersionsToKeep int
// User settings
AllowUserSignUp bool
AllowUserOrgCreate bool
@@ -470,6 +474,7 @@ func NewConfigContext(args *CommandLineArgs) error {
Env = Cfg.Section("").Key("app_mode").MustString("development")
InstanceName = Cfg.Section("").Key("instance_name").MustString("unknown_instance_name")
PluginsPath = makeAbsolute(Cfg.Section("paths").Key("plugins").String(), HomePath)
DatasourcesPath = makeAbsolute(Cfg.Section("paths").Key("datasources").String(), HomePath)
server := Cfg.Section("server")
AppUrl, AppSubUrl = parseAppUrlAndSubUrl(server)
@@ -518,6 +523,10 @@ func NewConfigContext(args *CommandLineArgs) error {
SnapShotRemoveExpired = snapshots.Key("snapshot_remove_expired").MustBool(true)
SnapShotTTLDays = snapshots.Key("snapshot_TTL_days").MustInt(90)
// read dashboard settings
dashboards := Cfg.Section("dashboards")
DashboardVersionsToKeep = dashboards.Key("versions_to_keep").MustInt(20)
// read data source proxy white list
DataProxyWhiteList = make(map[string]bool)
for _, hostAndIp := range util.SplitString(security.Key("data_source_proxy_whitelist").String()) {
@@ -661,5 +670,6 @@ func LogConfigurationInfo() {
logger.Info("Path Data", "path", DataPath)
logger.Info("Path Logs", "path", LogsPath)
logger.Info("Path Plugins", "path", PluginsPath)
logger.Info("Path Datasources", "path", DatasourcesPath)
logger.Info("App mode " + Env)
}

View File

@@ -17,6 +17,7 @@ import (
"github.com/aws/aws-sdk-go/aws"
"github.com/aws/aws-sdk-go/aws/request"
"github.com/aws/aws-sdk-go/service/cloudwatch"
"github.com/aws/aws-sdk-go/service/ec2/ec2iface"
"github.com/grafana/grafana/pkg/components/null"
"github.com/grafana/grafana/pkg/components/simplejson"
"github.com/grafana/grafana/pkg/metrics"
@@ -24,6 +25,7 @@ import (
type CloudWatchExecutor struct {
*models.DataSource
ec2Svc ec2iface.EC2API
}
type DatasourceInfo struct {
@@ -315,7 +317,8 @@ func parseResponse(resp *cloudwatch.GetMetricStatisticsOutput, query *CloudWatch
var value float64
for _, s := range append(query.Statistics, query.ExtendedStatistics...) {
series := tsdb.TimeSeries{
Tags: map[string]string{},
Tags: map[string]string{},
Points: make([]tsdb.TimePoint, 0),
}
for _, d := range query.Dimensions {
series.Tags[*d.Name] = *d.Value

View File

@@ -87,6 +87,7 @@ func init() {
"AWS/Logs": {"IncomingBytes", "IncomingLogEvents", "ForwardedBytes", "ForwardedLogEvents", "DeliveryErrors", "DeliveryThrottling"},
"AWS/ML": {"PredictCount", "PredictFailureCount"},
"AWS/NATGateway": {"PacketsOutToDestination", "PacketsOutToSource", "PacketsInFromSource", "PacketsInFromDestination", "BytesOutToDestination", "BytesOutToSource", "BytesInFromSource", "BytesInFromDestination", "ErrorPortAllocation", "ActiveConnectionCount", "ConnectionAttemptCount", "ConnectionEstablishedCount", "IdleTimeoutCount", "PacketsDropCount"},
"AWS/NetworkELB": {"ActiveFlowCount", "ConsumedLCUs", "HealthyHostCount", "NewFlowCount", "ProcessedBytes", "TCP_Client_Reset_Count", "TCP_ELB_Reset_Count", "TCP_Target_Reset_Count", "UnHealthyHostCount"},
"AWS/OpsWorks": {"cpu_idle", "cpu_nice", "cpu_system", "cpu_user", "cpu_waitio", "load_1", "load_5", "load_15", "memory_buffers", "memory_cached", "memory_free", "memory_swap", "memory_total", "memory_used", "procs"},
"AWS/Redshift": {"CPUUtilization", "DatabaseConnections", "HealthStatus", "MaintenanceMode", "NetworkReceiveThroughput", "NetworkTransmitThroughput", "PercentageDiskSpaceUsed", "ReadIOPS", "ReadLatency", "ReadThroughput", "WriteIOPS", "WriteLatency", "WriteThroughput"},
"AWS/RDS": {"ActiveTransactions", "AuroraBinlogReplicaLag", "AuroraReplicaLag", "AuroraReplicaLagMaximum", "AuroraReplicaLagMinimum", "BinLogDiskUsage", "BlockedTransactions", "BufferCacheHitRatio", "CommitLatency", "CommitThroughput", "BinLogDiskUsage", "CPUCreditBalance", "CPUCreditUsage", "CPUUtilization", "DatabaseConnections", "DDLLatency", "DDLThroughput", "Deadlocks", "DeleteLatency", "DeleteThroughput", "DiskQueueDepth", "DMLLatency", "DMLThroughput", "EngineUptime", "FailedSqlStatements", "FreeableMemory", "FreeLocalStorage", "FreeStorageSpace", "InsertLatency", "InsertThroughput", "LoginFailures", "NetworkReceiveThroughput", "NetworkTransmitThroughput", "NetworkThroughput", "Queries", "ReadIOPS", "ReadLatency", "ReadThroughput", "ReplicaLag", "ResultSetCacheHitRatio", "SelectLatency", "SelectThroughput", "SwapUsage", "TotalConnections", "UpdateLatency", "UpdateThroughput", "VolumeBytesUsed", "VolumeReadIOPS", "VolumeWriteIOPS", "WriteIOPS", "WriteLatency", "WriteThroughput"},
@@ -132,6 +133,7 @@ func init() {
"AWS/Logs": {"LogGroupName", "DestinationType", "FilterName"},
"AWS/ML": {"MLModelId", "RequestMode"},
"AWS/NATGateway": {"NatGatewayId"},
"AWS/NetworkELB": {"LoadBalancer", "TargetGroup", "AvailabilityZone"},
"AWS/OpsWorks": {"StackId", "LayerId", "InstanceId"},
"AWS/Redshift": {"NodeID", "ClusterIdentifier"},
"AWS/RDS": {"DBInstanceIdentifier", "DBClusterIdentifier", "DbClusterIdentifier", "DatabaseClass", "EngineName", "Role"},
@@ -183,6 +185,18 @@ func (e *CloudWatchExecutor) executeMetricFindQuery(ctx context.Context, queryCo
data, err = e.handleGetEbsVolumeIds(ctx, parameters, queryContext)
break
case "ec2_instance_attribute":
region := parameters.Get("region").MustString()
dsInfo := e.getDsInfo(region)
cfg, err := e.getAwsConfig(dsInfo)
if err != nil {
return nil, errors.New("Failed to call ec2:DescribeInstances")
}
sess, err := session.NewSession(cfg)
if err != nil {
return nil, errors.New("Failed to call ec2:DescribeInstances")
}
e.ec2Svc = ec2.New(sess, cfg)
data, err = e.handleGetEc2InstanceAttribute(ctx, parameters, queryContext)
break
}
@@ -373,14 +387,16 @@ func (e *CloudWatchExecutor) handleGetEc2InstanceAttribute(ctx context.Context,
var filters []*ec2.Filter
for k, v := range filterJson {
if vv, ok := v.([]string); ok {
var vvvv []*string
if vv, ok := v.([]interface{}); ok {
var vvvvv []*string
for _, vvv := range vv {
vvvv = append(vvvv, &vvv)
if vvvv, ok := vvv.(string); ok {
vvvvv = append(vvvvv, &vvvv)
}
}
filters = append(filters, &ec2.Filter{
Name: aws.String(k),
Values: vvvv,
Values: vvvvv,
})
}
}
@@ -467,24 +483,13 @@ func (e *CloudWatchExecutor) cloudwatchListMetrics(region string, namespace stri
}
func (e *CloudWatchExecutor) ec2DescribeInstances(region string, filters []*ec2.Filter, instanceIds []*string) (*ec2.DescribeInstancesOutput, error) {
dsInfo := e.getDsInfo(region)
cfg, err := e.getAwsConfig(dsInfo)
if err != nil {
return nil, errors.New("Failed to call ec2:DescribeInstances")
}
sess, err := session.NewSession(cfg)
if err != nil {
return nil, errors.New("Failed to call ec2:DescribeInstances")
}
svc := ec2.New(sess, cfg)
params := &ec2.DescribeInstancesInput{
Filters: filters,
InstanceIds: instanceIds,
}
var resp ec2.DescribeInstancesOutput
err = svc.DescribeInstancesPages(params,
err := e.ec2Svc.DescribeInstancesPages(params,
func(page *ec2.DescribeInstancesOutput, lastPage bool) bool {
reservations, _ := awsutil.ValuesAtPath(page, "Reservations")
for _, reservation := range reservations {

View File

@@ -1,13 +1,28 @@
package cloudwatch
import (
"context"
"testing"
"github.com/aws/aws-sdk-go/aws"
"github.com/aws/aws-sdk-go/service/cloudwatch"
"github.com/aws/aws-sdk-go/service/ec2"
"github.com/aws/aws-sdk-go/service/ec2/ec2iface"
"github.com/grafana/grafana/pkg/components/simplejson"
"github.com/grafana/grafana/pkg/tsdb"
. "github.com/smartystreets/goconvey/convey"
)
type mockedEc2 struct {
ec2iface.EC2API
Resp ec2.DescribeInstancesOutput
}
func (m mockedEc2) DescribeInstancesPages(in *ec2.DescribeInstancesInput, fn func(*ec2.DescribeInstancesOutput, bool) bool) error {
fn(&m.Resp, true)
return nil
}
func TestCloudWatchMetrics(t *testing.T) {
Convey("When calling getMetricsForCustomMetrics", t, func() {
@@ -66,4 +81,37 @@ func TestCloudWatchMetrics(t *testing.T) {
})
})
Convey("When calling handleGetEc2InstanceAttribute", t, func() {
executor := &CloudWatchExecutor{
ec2Svc: mockedEc2{Resp: ec2.DescribeInstancesOutput{
Reservations: []*ec2.Reservation{
{
Instances: []*ec2.Instance{
{
InstanceId: aws.String("i-12345678"),
Tags: []*ec2.Tag{
{
Key: aws.String("Environment"),
Value: aws.String("production"),
},
},
},
},
},
},
}},
}
json := simplejson.New()
json.Set("region", "us-east-1")
json.Set("attributeName", "InstanceId")
filters := make(map[string]interface{})
filters["tag:Environment"] = []string{"production"}
json.Set("filters", filters)
result, _ := executor.handleGetEc2InstanceAttribute(context.Background(), json, &tsdb.TsdbQuery{})
Convey("Should equal production InstanceId", func() {
So(result[0].Text, ShouldEqual, "i-12345678")
})
})
}

View File

@@ -2,9 +2,11 @@ package influxdb
import (
"strconv"
"time"
"github.com/grafana/grafana/pkg/components/simplejson"
"github.com/grafana/grafana/pkg/models"
"github.com/grafana/grafana/pkg/tsdb"
)
type InfluxdbQueryParser struct{}
@@ -37,13 +39,7 @@ func (qp *InfluxdbQueryParser) Parse(model *simplejson.Json, dsInfo *models.Data
return nil, err
}
interval := model.Get("interval").MustString("")
if interval == "" && dsInfo.JsonData != nil {
dsInterval := dsInfo.JsonData.Get("timeInterval").MustString("")
if dsInterval != "" {
interval = dsInterval
}
}
parsedInterval, err := tsdb.GetIntervalFrom(dsInfo, model, time.Millisecond*1)
return &Query{
Measurement: measurement,
@@ -53,7 +49,7 @@ func (qp *InfluxdbQueryParser) Parse(model *simplejson.Json, dsInfo *models.Data
Tags: tags,
Selects: selects,
RawQuery: rawQuery,
Interval: interval,
Interval: parsedInterval,
Alias: alias,
UseRawQuery: useRawQuery,
}, nil

View File

@@ -2,6 +2,7 @@ package influxdb
import (
"testing"
"time"
"github.com/grafana/grafana/pkg/components/simplejson"
"github.com/grafana/grafana/pkg/models"
@@ -115,7 +116,7 @@ func TestInfluxdbQueryParser(t *testing.T) {
So(len(res.GroupBy), ShouldEqual, 3)
So(len(res.Selects), ShouldEqual, 3)
So(len(res.Tags), ShouldEqual, 2)
So(res.Interval, ShouldEqual, ">20s")
So(res.Interval, ShouldEqual, time.Second*20)
So(res.Alias, ShouldEqual, "serie alias")
})
@@ -174,7 +175,7 @@ func TestInfluxdbQueryParser(t *testing.T) {
So(len(res.GroupBy), ShouldEqual, 2)
So(len(res.Selects), ShouldEqual, 1)
So(len(res.Tags), ShouldEqual, 0)
So(res.Interval, ShouldEqual, ">10s")
So(res.Interval, ShouldEqual, time.Second*10)
})
})
}

View File

@@ -1,5 +1,7 @@
package influxdb
import "time"
type Query struct {
Measurement string
Policy string
@@ -10,8 +12,7 @@ type Query struct {
RawQuery string
UseRawQuery bool
Alias string
Interval string
Interval time.Duration
}
type Tag struct {

View File

@@ -29,10 +29,8 @@ func (query *Query) Build(queryContext *tsdb.TsdbQuery) (string, error) {
res += query.renderGroupBy(queryContext)
}
interval, err := getDefinedInterval(query, queryContext)
if err != nil {
return "", err
}
calculator := tsdb.NewIntervalCalculator(&tsdb.IntervalOptions{})
interval := calculator.Calculate(queryContext.TimeRange, query.Interval)
res = strings.Replace(res, "$timeFilter", query.renderTimeFilter(queryContext), -1)
res = strings.Replace(res, "$interval", interval.Text, -1)
@@ -41,29 +39,6 @@ func (query *Query) Build(queryContext *tsdb.TsdbQuery) (string, error) {
return res, nil
}
func getDefinedInterval(query *Query, queryContext *tsdb.TsdbQuery) (*tsdb.Interval, error) {
defaultInterval := tsdb.CalculateInterval(queryContext.TimeRange)
if query.Interval == "" {
return &defaultInterval, nil
}
setInterval := strings.Replace(strings.Replace(query.Interval, "<", "", 1), ">", "", 1)
parsedSetInterval, err := time.ParseDuration(setInterval)
if err != nil {
return nil, err
}
if strings.Contains(query.Interval, ">") {
if defaultInterval.Value > parsedSetInterval {
return &defaultInterval, nil
}
}
return &tsdb.Interval{Value: parsedSetInterval, Text: setInterval}, nil
}
func (query *Query) renderTags() []string {
var res []string
for i, tag := range query.Tags {

View File

@@ -2,6 +2,7 @@ package influxdb
import (
"testing"
"time"
"strings"
@@ -38,7 +39,7 @@ func TestInfluxdbQueryBuilder(t *testing.T) {
Measurement: "cpu",
Policy: "policy",
GroupBy: []*QueryPart{groupBy1, groupBy3},
Interval: "10s",
Interval: time.Second * 10,
}
rawQuery, err := query.Build(queryContext)
@@ -52,7 +53,7 @@ func TestInfluxdbQueryBuilder(t *testing.T) {
Measurement: "cpu",
GroupBy: []*QueryPart{groupBy1, groupBy2, groupBy3},
Tags: []*Tag{tag1, tag2},
Interval: "5s",
Interval: time.Second * 5,
}
rawQuery, err := query.Build(queryContext)
@@ -64,7 +65,7 @@ func TestInfluxdbQueryBuilder(t *testing.T) {
query := &Query{
Selects: []*Select{{*qp1, *qp2, *mathPartDivideBy100}},
Measurement: "cpu",
Interval: "5s",
Interval: time.Second * 5,
}
rawQuery, err := query.Build(queryContext)
@@ -76,7 +77,7 @@ func TestInfluxdbQueryBuilder(t *testing.T) {
query := &Query{
Selects: []*Select{{*qp1, *qp2, *mathPartDivideByIntervalMs}},
Measurement: "cpu",
Interval: "5s",
Interval: time.Second * 5,
}
rawQuery, err := query.Build(queryContext)
@@ -117,7 +118,7 @@ func TestInfluxdbQueryBuilder(t *testing.T) {
Measurement: "cpu",
Policy: "policy",
GroupBy: []*QueryPart{groupBy1, groupBy3},
Interval: "10s",
Interval: time.Second * 10,
RawQuery: "Raw query",
UseRawQuery: true,
}

View File

@@ -2,14 +2,18 @@ package tsdb
import (
"fmt"
"strings"
"time"
"github.com/grafana/grafana/pkg/components/simplejson"
"github.com/grafana/grafana/pkg/models"
)
var (
defaultRes int64 = 1500
minInterval time.Duration = 1 * time.Millisecond
year time.Duration = time.Hour * 24 * 365
day time.Duration = time.Hour * 24 * 365
defaultRes int64 = 1500
defaultMinInterval time.Duration = 1 * time.Millisecond
year time.Duration = time.Hour * 24 * 365
day time.Duration = time.Hour * 24
)
type Interval struct {
@@ -17,14 +21,68 @@ type Interval struct {
Value time.Duration
}
func CalculateInterval(timerange *TimeRange) Interval {
interval := time.Duration((timerange.MustGetTo().UnixNano() - timerange.MustGetFrom().UnixNano()) / defaultRes)
type intervalCalculator struct {
minInterval time.Duration
}
if interval < minInterval {
return Interval{Text: formatDuration(minInterval), Value: interval}
type IntervalCalculator interface {
Calculate(timeRange *TimeRange, minInterval time.Duration) Interval
}
type IntervalOptions struct {
MinInterval time.Duration
}
func NewIntervalCalculator(opt *IntervalOptions) *intervalCalculator {
if opt == nil {
opt = &IntervalOptions{}
}
return Interval{Text: formatDuration(roundInterval(interval)), Value: interval}
calc := &intervalCalculator{}
if opt.MinInterval == 0 {
calc.minInterval = defaultMinInterval
} else {
calc.minInterval = opt.MinInterval
}
return calc
}
func (ic *intervalCalculator) Calculate(timerange *TimeRange, minInterval time.Duration) Interval {
to := timerange.MustGetTo().UnixNano()
from := timerange.MustGetFrom().UnixNano()
interval := time.Duration((to - from) / defaultRes)
if interval < minInterval {
return Interval{Text: formatDuration(minInterval), Value: minInterval}
}
rounded := roundInterval(interval)
return Interval{Text: formatDuration(rounded), Value: rounded}
}
func GetIntervalFrom(dsInfo *models.DataSource, queryModel *simplejson.Json, defaultInterval time.Duration) (time.Duration, error) {
interval := queryModel.Get("interval").MustString("")
if interval == "" && dsInfo.JsonData != nil {
dsInterval := dsInfo.JsonData.Get("timeInterval").MustString("")
if dsInterval != "" {
interval = dsInterval
}
}
if interval == "" {
return defaultInterval, nil
}
interval = strings.Replace(strings.Replace(interval, "<", "", 1), ">", "", 1)
parsedInterval, err := time.ParseDuration(interval)
if err != nil {
return time.Duration(0), err
}
return parsedInterval, nil
}
func formatDuration(inter time.Duration) string {

View File

@@ -14,31 +14,33 @@ func TestInterval(t *testing.T) {
HomePath: "../../",
})
calculator := NewIntervalCalculator(&IntervalOptions{})
Convey("for 5min", func() {
tr := NewTimeRange("5m", "now")
interval := CalculateInterval(tr)
interval := calculator.Calculate(tr, time.Millisecond*1)
So(interval.Text, ShouldEqual, "200ms")
})
Convey("for 15min", func() {
tr := NewTimeRange("15m", "now")
interval := CalculateInterval(tr)
interval := calculator.Calculate(tr, time.Millisecond*1)
So(interval.Text, ShouldEqual, "500ms")
})
Convey("for 30min", func() {
tr := NewTimeRange("30m", "now")
interval := CalculateInterval(tr)
interval := calculator.Calculate(tr, time.Millisecond*1)
So(interval.Text, ShouldEqual, "1s")
})
Convey("for 1h", func() {
tr := NewTimeRange("1h", "now")
interval := CalculateInterval(tr)
interval := calculator.Calculate(tr, time.Millisecond*1)
So(interval.Text, ShouldEqual, "2s")
})
@@ -51,6 +53,7 @@ func TestInterval(t *testing.T) {
So(formatDuration(time.Second*61), ShouldEqual, "1m")
So(formatDuration(time.Millisecond*30), ShouldEqual, "30ms")
So(formatDuration(time.Hour*23), ShouldEqual, "23h")
So(formatDuration(time.Hour*24), ShouldEqual, "1d")
So(formatDuration(time.Hour*24*367), ShouldEqual, "1y")
})
})

View File

@@ -72,6 +72,7 @@ func (m *PostgresMacroEngine) evaluateMacro(name string, args []string) (string,
}
return fmt.Sprintf("extract(epoch from %s) as \"time\"", args[0]), nil
case "__timeFilter":
// dont use to_timestamp in this macro for redshift compatibility #9566
if len(args) == 0 {
return "", fmt.Errorf("missing time column argument for macro %v", name)
}

View File

@@ -4,6 +4,7 @@ import (
"container/list"
"context"
"fmt"
"net/url"
"strconv"
"time"
@@ -52,7 +53,7 @@ func generateConnectionString(datasource *models.DataSource) string {
}
sslmode := datasource.JsonData.Get("sslmode").MustString("verify-full")
return fmt.Sprintf("postgres://%s:%s@%s/%s?sslmode=%s", datasource.User, password, datasource.Url, datasource.Database, sslmode)
return fmt.Sprintf("postgres://%s:%s@%s/%s?sslmode=%s", url.PathEscape(datasource.User), url.PathEscape(password), url.PathEscape(datasource.Url), url.PathEscape(datasource.Database), url.QueryEscape(sslmode))
}
func (e *PostgresQueryEndpoint) Query(ctx context.Context, dsInfo *models.DataSource, tsdbQuery *tsdb.TsdbQuery) (*tsdb.Response, error) {
@@ -186,7 +187,7 @@ func (e PostgresQueryEndpoint) transformToTimeSeries(query *tsdb.Query, rows *co
case float64:
timestamp = columnValue * 1000
case time.Time:
timestamp = float64(columnValue.Unix() * 1000)
timestamp = float64(columnValue.UnixNano() / 1e6)
default:
return fmt.Errorf("Invalid type for column time, must be of type timestamp or unix timestamp")
}

View File

@@ -48,14 +48,16 @@ func NewPrometheusExecutor(dsInfo *models.DataSource) (tsdb.TsdbQueryEndpoint, e
}
var (
plog log.Logger
legendFormat *regexp.Regexp
plog log.Logger
legendFormat *regexp.Regexp
intervalCalculator tsdb.IntervalCalculator
)
func init() {
plog = log.New("tsdb.prometheus")
tsdb.RegisterTsdbQueryEndpoint("prometheus", NewPrometheusExecutor)
legendFormat = regexp.MustCompile(`\{\{\s*(.+?)\s*\}\}`)
intervalCalculator = tsdb.NewIntervalCalculator(&tsdb.IntervalOptions{MinInterval: time.Second * 1})
}
func (e *PrometheusExecutor) getClient(dsInfo *models.DataSource) (apiv1.API, error) {
@@ -88,7 +90,7 @@ func (e *PrometheusExecutor) Query(ctx context.Context, dsInfo *models.DataSourc
return nil, err
}
query, err := parseQuery(tsdbQuery.Queries, tsdbQuery)
query, err := parseQuery(dsInfo, tsdbQuery.Queries, tsdbQuery)
if err != nil {
return nil, err
}
@@ -138,7 +140,7 @@ func formatLegend(metric model.Metric, query *PrometheusQuery) string {
return string(result)
}
func parseQuery(queries []*tsdb.Query, queryContext *tsdb.TsdbQuery) (*PrometheusQuery, error) {
func parseQuery(dsInfo *models.DataSource, queries []*tsdb.Query, queryContext *tsdb.TsdbQuery) (*PrometheusQuery, error) {
queryModel := queries[0]
expr, err := queryModel.Model.Get("expr").String()
@@ -146,11 +148,6 @@ func parseQuery(queries []*tsdb.Query, queryContext *tsdb.TsdbQuery) (*Prometheu
return nil, err
}
step, err := queryModel.Model.Get("step").Int64()
if err != nil {
return nil, err
}
format := queryModel.Model.Get("legendFormat").MustString("")
start, err := queryContext.TimeRange.ParseFrom()
@@ -163,9 +160,18 @@ func parseQuery(queries []*tsdb.Query, queryContext *tsdb.TsdbQuery) (*Prometheu
return nil, err
}
dsInterval, err := tsdb.GetIntervalFrom(dsInfo, queryModel.Model, time.Second*15)
if err != nil {
return nil, err
}
intervalFactor := queryModel.Model.Get("intervalFactor").MustInt64(1)
interval := intervalCalculator.Calculate(queryContext.TimeRange, dsInterval)
step := time.Duration(int64(interval.Value) * intervalFactor)
return &PrometheusQuery{
Expr: expr,
Step: time.Second * time.Duration(step),
Step: step,
LegendFormat: format,
Start: start,
End: end,

View File

@@ -2,13 +2,21 @@ package prometheus
import (
"testing"
"time"
"github.com/grafana/grafana/pkg/models"
"github.com/grafana/grafana/pkg/tsdb"
"github.com/grafana/grafana/pkg/components/simplejson"
p "github.com/prometheus/common/model"
. "github.com/smartystreets/goconvey/convey"
)
func TestPrometheus(t *testing.T) {
Convey("Prometheus", t, func() {
dsInfo := &models.DataSource{
JsonData: simplejson.New(),
}
Convey("converting metric name", func() {
metric := map[p.LabelName]p.LabelValue{
@@ -36,5 +44,108 @@ func TestPrometheus(t *testing.T) {
So(formatLegend(metric, query), ShouldEqual, `http_request_total{app="backend", device="mobile"}`)
})
Convey("parsing query model with step", func() {
json := `{
"expr": "go_goroutines",
"format": "time_series",
"refId": "A"
}`
jsonModel, _ := simplejson.NewJson([]byte(json))
queryContext := &tsdb.TsdbQuery{}
queryModels := []*tsdb.Query{
{Model: jsonModel},
}
Convey("with 48h time range", func() {
queryContext.TimeRange = tsdb.NewTimeRange("12h", "now")
model, err := parseQuery(dsInfo, queryModels, queryContext)
So(err, ShouldBeNil)
So(model.Step, ShouldEqual, time.Second*30)
})
})
Convey("parsing query model without step parameter", func() {
json := `{
"expr": "go_goroutines",
"format": "time_series",
"intervalFactor": 1,
"refId": "A"
}`
jsonModel, _ := simplejson.NewJson([]byte(json))
queryContext := &tsdb.TsdbQuery{}
queryModels := []*tsdb.Query{
{Model: jsonModel},
}
Convey("with 48h time range", func() {
queryContext.TimeRange = tsdb.NewTimeRange("48h", "now")
model, err := parseQuery(dsInfo, queryModels, queryContext)
So(err, ShouldBeNil)
So(model.Step, ShouldEqual, time.Minute*2)
})
Convey("with 1h time range", func() {
queryContext.TimeRange = tsdb.NewTimeRange("1h", "now")
model, err := parseQuery(dsInfo, queryModels, queryContext)
So(err, ShouldBeNil)
So(model.Step, ShouldEqual, time.Second*15)
})
})
Convey("parsing query model with intervalFactor", func() {
Convey("high intervalFactor", func() {
json := `{
"expr": "go_goroutines",
"format": "time_series",
"intervalFactor": 10,
"refId": "A"
}`
jsonModel, _ := simplejson.NewJson([]byte(json))
queryContext := &tsdb.TsdbQuery{}
queryModels := []*tsdb.Query{
{Model: jsonModel},
}
Convey("with 48h time range", func() {
queryContext.TimeRange = tsdb.NewTimeRange("48h", "now")
model, err := parseQuery(dsInfo, queryModels, queryContext)
So(err, ShouldBeNil)
So(model.Step, ShouldEqual, time.Minute*20)
})
})
Convey("low intervalFactor", func() {
json := `{
"expr": "go_goroutines",
"format": "time_series",
"intervalFactor": 1,
"refId": "A"
}`
jsonModel, _ := simplejson.NewJson([]byte(json))
queryContext := &tsdb.TsdbQuery{}
queryModels := []*tsdb.Query{
{Model: jsonModel},
}
Convey("with 48h time range", func() {
queryContext.TimeRange = tsdb.NewTimeRange("48h", "now")
model, err := parseQuery(dsInfo, queryModels, queryContext)
So(err, ShouldBeNil)
So(model.Step, ShouldEqual, time.Minute*2)
})
})
})
})
}

View File

@@ -57,12 +57,13 @@ func (e *DefaultSqlEngine) InitEngine(driverName string, dsInfo *models.DataSour
}
engine, err := xorm.NewEngine(driverName, cnnstr)
engine.SetMaxOpenConns(10)
engine.SetMaxIdleConns(10)
if err != nil {
return err
}
engine.SetMaxOpenConns(10)
engine.SetMaxIdleConns(10)
engineCache.cache[dsInfo.Id] = engine
e.XormEngine = engine

View File

@@ -8,7 +8,6 @@ import (
"encoding/base64"
"encoding/hex"
"errors"
"fmt"
"hash"
"strings"
)
@@ -30,7 +29,7 @@ func GetRandomString(n int, alphabets ...byte) string {
func EncodePassword(password string, salt string) string {
newPasswd := PBKDF2([]byte(password), []byte(salt), 10000, 50, sha256.New)
return fmt.Sprintf("%x", newPasswd)
return hex.EncodeToString(newPasswd)
}
// Encode string to md5 hex value.

View File

@@ -7,7 +7,6 @@ import (
)
func TestEncoding(t *testing.T) {
Convey("When generating base64 header", t, func() {
result := GetBasicAuthHeader("grafana", "1234")
@@ -23,4 +22,8 @@ func TestEncoding(t *testing.T) {
So(password, ShouldEqual, "1234")
})
Convey("When encoding password", t, func() {
encodedPassword := EncodePassword("iamgod", "pepper")
So(encodedPassword, ShouldEqual, "e59c568621e57756495a468f47c74e07c911b037084dd464bb2ed72410970dc849cabd71b48c394faf08a5405dae53741ce9")
})
}