a lot of work on database schema and migration setup, postgres now works, every integration test passes for all database types, only token table left to do

This commit is contained in:
Torkel Ödegaard 2015-01-20 14:15:48 +01:00
parent 8bb9126b77
commit afb847acc8
20 changed files with 246 additions and 175 deletions

View File

@ -33,6 +33,8 @@ type DataSource struct {
User string
Database string
BasicAuth bool
BasicAuthUser string
BasicAuthPassword string
IsDefault bool
Created time.Time

View File

@ -81,7 +81,7 @@ func TestAccountDataAccess(t *testing.T) {
err := SetUsingAccount(&cmd)
So(err, ShouldBeNil)
Convey("Logged in user query should return correct using account info", func() {
Convey("SignedInUserQuery with a different account", func() {
query := m.GetSignedInUserQuery{UserId: ac2.Id}
err := GetSignedInUser(&query)

View File

@ -44,11 +44,11 @@ func SaveDashboard(cmd *m.SaveDashboardCommand) error {
// insert new tags
tags := dash.GetTags()
if len(tags) > 0 {
tagRows := make([]DashboardTag, len(tags))
for _, tag := range tags {
tagRows = append(tagRows, DashboardTag{Term: tag, DashboardId: dash.Id})
if _, err := sess.Insert(&DashboardTag{DashboardId: dash.Id, Term: tag}); err != nil {
return err
}
}
sess.InsertMulti(&tagRows)
}
cmd.Result = dash
@ -120,8 +120,7 @@ func SearchDashboards(query *m.SearchDashboardsQuery) error {
}
func GetDashboardTags(query *m.GetDashboardTagsQuery) error {
sess := x.Sql("select count() as count, term from dashboard_tag group by term")
sess := x.Sql("select count(*) as count, term from dashboard_tag group by term")
err := sess.Find(&query.Result)
return err
}

View File

@ -95,7 +95,7 @@ func TestDashboardDataAccess(t *testing.T) {
err := GetDashboardTags(&query)
So(err, ShouldBeNil)
So(len(query.Result), ShouldEqual, 3)
So(len(query.Result), ShouldEqual, 2)
})
})
})

View File

@ -8,16 +8,24 @@ import (
. "github.com/smartystreets/goconvey/convey"
m "github.com/torkelo/grafana-pro/pkg/models"
"github.com/torkelo/grafana-pro/pkg/services/sqlstore/sqlutil"
)
func InitTestDB(t *testing.T) {
x, err := xorm.NewEngine("sqlite3", ":memory:")
x, err := xorm.NewEngine(sqlutil.TestDB_Sqlite3.DriverName, sqlutil.TestDB_Sqlite3.ConnStr)
//x, err := xorm.NewEngine(sqlutil.TestDB_Mysql.DriverName, sqlutil.TestDB_Mysql.ConnStr)
//x, err := xorm.NewEngine(sqlutil.TestDB_Postgres.DriverName, sqlutil.TestDB_Postgres.ConnStr)
if err != nil {
t.Fatalf("Failed to init in memory sqllite3 db %v", err)
}
SetEngine(x, false)
sqlutil.CleanDB(x)
if err := SetEngine(x, false); err != nil {
t.Fatal(err)
}
}
type Test struct {

View File

@ -1,12 +1,13 @@
package migrations
package sqlstore
import "time"
import . "github.com/torkelo/grafana-pro/pkg/services/sqlstore/migrator"
func AddMigrations(mg *Migrator) {
func addMigrations(mg *Migrator) {
addMigrationLogMigrations(mg)
addUserMigrations(mg)
addAccountMigrations(mg)
addDashboardMigration(mg)
addDataSourceMigration(mg)
}
func addMigrationLogMigrations(mg *Migrator) {
@ -38,10 +39,10 @@ func addUserMigrations(mg *Migrator) {
))
//------- user table indexes ------------------
mg.AddMigration("add unique index UIX_user.login", new(AddIndexMigration).
Name("UIX_user_login").Table("user").Columns("login"))
mg.AddMigration("add unique index UIX_user.email", new(AddIndexMigration).
Name("UIX_user_email").Table("user").Columns("email"))
mg.AddMigration("add unique index user.login", new(AddIndexMigration).
Table("user").Columns("login").Unique())
mg.AddMigration("add unique index user.email", new(AddIndexMigration).
Table("user").Columns("email").Unique())
}
func addAccountMigrations(mg *Migrator) {
@ -53,8 +54,8 @@ func addAccountMigrations(mg *Migrator) {
&Column{Name: "updated", Type: DB_DateTime, Nullable: false},
))
mg.AddMigration("add unique index UIX_account.name", new(AddIndexMigration).
Name("UIX_account_name").Table("account").Columns("name"))
mg.AddMigration("add unique index account.name", new(AddIndexMigration).
Table("account").Columns("name").Unique())
//------- account_user table -------------------
mg.AddMigration("create account_user table", new(AddTableMigration).
@ -67,20 +68,8 @@ func addAccountMigrations(mg *Migrator) {
&Column{Name: "updated", Type: DB_DateTime},
))
mg.AddMigration("add unique index UIX_account_user", new(AddIndexMigration).
Name("UIX_account_user").Table("account_user").Columns("account_id", "user_id"))
}
type Dashboard struct {
Id int64
Slug string `xorm:"index(IX_AccountIdSlug)"`
AccountId int64 `xorm:"index(IX_AccountIdSlug)"`
Created time.Time
Updated time.Time
Title string
Data map[string]interface{}
mg.AddMigration("add unique index account_user_aid_uid", new(AddIndexMigration).
Name("aid_uid").Table("account_user").Columns("account_id", "user_id").Unique())
}
func addDashboardMigration(mg *Migrator) {
@ -95,10 +84,48 @@ func addDashboardMigration(mg *Migrator) {
&Column{Name: "updated", Type: DB_DateTime, Nullable: false},
))
//------- indexes ------------------
mg.AddMigration("add unique index UIX_dashboard.account_id", new(AddIndexMigration).
Name("UIX_dashboard_account_id").Table("dashboard").Columns("account_id"))
mg.AddMigration("create dashboard_tag table", new(AddTableMigration).
Name("dashboard_tag").WithColumns(
&Column{Name: "id", Type: DB_BigInt, IsPrimaryKey: true, IsAutoIncrement: true},
&Column{Name: "dashboard_id", Type: DB_BigInt, Nullable: false},
&Column{Name: "term", Type: DB_NVarchar, Length: 50, Nullable: false},
))
mg.AddMigration("add unique index UIX_dashboard_account_id_slug", new(AddIndexMigration).
Name("UIX_dashboard_account_id_slug").Table("dashboard").Columns("account_id", "slug"))
//------- indexes ------------------
mg.AddMigration("add index dashboard.account_id", new(AddIndexMigration).
Table("dashboard").Columns("account_id"))
mg.AddMigration("add unique index dashboard_account_id_slug", new(AddIndexMigration).
Table("dashboard").Columns("account_id", "slug").Unique())
mg.AddMigration("add unique index dashboard_tag.dasboard_id_term", new(AddIndexMigration).
Table("dashboard_tag").Columns("dashboard_id", "term").Unique())
}
func addDataSourceMigration(mg *Migrator) {
mg.AddMigration("create data_source table", new(AddTableMigration).
Name("data_source").WithColumns(
&Column{Name: "id", Type: DB_BigInt, IsPrimaryKey: true, IsAutoIncrement: true},
&Column{Name: "account_id", Type: DB_BigInt, Nullable: false},
&Column{Name: "type", Type: DB_NVarchar, Length: 255, Nullable: false},
&Column{Name: "name", Type: DB_NVarchar, Length: 255, Nullable: false},
&Column{Name: "access", Type: DB_NVarchar, Length: 255, Nullable: false},
&Column{Name: "url", Type: DB_NVarchar, Length: 255, Nullable: false},
&Column{Name: "password", Type: DB_NVarchar, Length: 255, Nullable: true},
&Column{Name: "user", Type: DB_NVarchar, Length: 255, Nullable: true},
&Column{Name: "database", Type: DB_NVarchar, Length: 255, Nullable: true},
&Column{Name: "basic_auth", Type: DB_Bool, Nullable: false},
&Column{Name: "basic_auth_user", Type: DB_NVarchar, Length: 255, Nullable: true},
&Column{Name: "basic_auth_password", Type: DB_NVarchar, Length: 255, Nullable: true},
&Column{Name: "is_default", Type: DB_Bool, Nullable: false},
&Column{Name: "created", Type: DB_DateTime, Nullable: false},
&Column{Name: "updated", Type: DB_DateTime, Nullable: false},
))
//------- indexes ------------------
mg.AddMigration("add index data_source.account_id", new(AddIndexMigration).
Table("data_source").Columns("account_id"))
mg.AddMigration("add unique index data_source.account_id_name", new(AddIndexMigration).
Table("data_source").Columns("account_id", "name").Unique())
}

View File

@ -1,80 +0,0 @@
package migrations
import (
"fmt"
"strings"
"testing"
"github.com/go-xorm/xorm"
"github.com/torkelo/grafana-pro/pkg/log"
. "github.com/smartystreets/goconvey/convey"
)
func cleanDB(x *xorm.Engine) {
tables, _ := x.DBMetas()
sess := x.NewSession()
defer sess.Close()
for _, table := range tables {
if _, err := sess.Exec("SET FOREIGN_KEY_CHECKS = 0"); err != nil {
panic("Failed to disable foreign key checks")
}
if _, err := sess.Exec("DROP TABLE " + table.Name); err != nil {
panic(fmt.Sprintf("Failed to delete table: %v, err: %v", table.Name, err))
}
if _, err := sess.Exec("SET FOREIGN_KEY_CHECKS = 1"); err != nil {
panic("Failed to disable foreign key checks")
}
}
}
var indexTypes = []string{"Unknown", "", "UNIQUE"}
func TestMigrations(t *testing.T) {
log.NewLogger(0, "console", `{"level": 0}`)
testDBs := [][]string{
[]string{"postgres", "user=grafanatest password=grafanatest host=localhost port=5432 dbname=grafanatest sslmode=disable"},
[]string{"mysql", "grafana:password@tcp(localhost:3306)/grafana_tests?charset=utf8"},
[]string{"sqlite3", ":memory:"},
}
for _, testDB := range testDBs {
Convey("Initial "+testDB[0]+" migration", t, func() {
x, err := xorm.NewEngine(testDB[0], testDB[1])
So(err, ShouldBeNil)
if testDB[0] == "mysql" {
cleanDB(x)
}
mg := NewMigrator(x)
mg.LogLevel = log.DEBUG
AddMigrations(mg)
err = mg.Start()
So(err, ShouldBeNil)
tables, err := x.DBMetas()
So(err, ShouldBeNil)
fmt.Printf("\nDB Schema after migration: table count: %v\n", len(tables))
for _, table := range tables {
fmt.Printf("\nTable: %v \n", table.Name)
for _, column := range table.Columns() {
fmt.Printf("\t %v \n", column.String(x.Dialect()))
}
if len(table.Indexes) > 0 {
fmt.Printf("\n\tIndexes:\n")
for _, index := range table.Indexes {
fmt.Printf("\t %v (%v) %v \n", index.Name, strings.Join(index.Cols, ","), indexTypes[index.Type])
}
}
}
})
}
}

View File

@ -0,0 +1,62 @@
package sqlstore
import (
"fmt"
"strings"
"testing"
"github.com/go-xorm/xorm"
"github.com/torkelo/grafana-pro/pkg/log"
. "github.com/torkelo/grafana-pro/pkg/services/sqlstore/migrator"
"github.com/torkelo/grafana-pro/pkg/services/sqlstore/sqlutil"
. "github.com/smartystreets/goconvey/convey"
)
var indexTypes = []string{"Unknown", "INDEX", "UNIQUE INDEX"}
func ATestMigrations(t *testing.T) {
log.NewLogger(0, "console", `{"level": 0}`)
testDBs := []sqlutil.TestDB{
sqlutil.TestDB_Sqlite3,
sqlutil.TestDB_Mysql,
sqlutil.TestDB_Postgres,
}
for _, testDB := range testDBs {
Convey("Initial "+testDB.DriverName+" migration", t, func() {
x, err := xorm.NewEngine(testDB.DriverName, testDB.ConnStr)
So(err, ShouldBeNil)
sqlutil.CleanDB(x)
mg := NewMigrator(x)
mg.LogLevel = log.DEBUG
addMigrations(mg)
err = mg.Start()
So(err, ShouldBeNil)
tables, err := x.DBMetas()
So(err, ShouldBeNil)
fmt.Printf("\nDB Schema after migration: table count: %v\n", len(tables))
for _, table := range tables {
fmt.Printf("\nTable: %v \n", table.Name)
for _, column := range table.Columns() {
fmt.Printf("\t %v \n", column.String(x.Dialect()))
}
if len(table.Indexes) > 0 {
fmt.Printf("\n\tIndexes:\n")
for _, index := range table.Indexes {
fmt.Printf("\t %v (%v) %v \n", index.Name, strings.Join(index.Cols, ","), indexTypes[index.Type])
}
}
}
})
}
}

View File

@ -1,4 +1,9 @@
package migrations
package migrator
import (
"fmt"
"strings"
)
type MigrationBase struct {
id string
@ -87,6 +92,9 @@ func (m *AddIndexMigration) Columns(columns ...string) *AddIndexMigration {
}
func (m *AddIndexMigration) Sql(dialect Dialect) string {
if m.index.Name == "" {
m.index.Name = fmt.Sprintf("%s", strings.Join(m.index.Cols, "_"))
}
return dialect.CreateIndexSql(m.tableName, &m.index)
}

View File

@ -1,4 +1,4 @@
package migrations
package migrator
// Notice
// code based on parts from from https://github.com/go-xorm/core/blob/3e0fa232ab5c90996406c0cd7ae86ad0e5ecf85f/column.go

View File

@ -1,4 +1,4 @@
package migrations
package migrator
import (
"fmt"
@ -23,6 +23,19 @@ type Dialect interface {
TableCheckSql(tableName string) (string, []interface{})
}
func NewDialect(name string) Dialect {
switch name {
case MYSQL:
return NewMysqlDialect()
case SQLITE:
return NewSqlite3Dialect()
case POSTGRES:
return NewPostgresDialect()
}
panic("Unsupported database type: " + name)
}
type BaseDialect struct {
dialect Dialect
driverName string

View File

@ -1,4 +1,4 @@
package migrations
package migrator
import (
"time"
@ -32,16 +32,7 @@ func NewMigrator(engine *xorm.Engine) *Migrator {
mg.x = engine
mg.LogLevel = log.WARN
mg.migrations = make([]Migration, 0)
switch mg.x.DriverName() {
case MYSQL:
mg.dialect = NewMysqlDialect()
case SQLITE:
mg.dialect = NewSqlite3Dialect()
case POSTGRES:
mg.dialect = NewPostgresDialect()
}
mg.dialect = NewDialect(mg.x.DriverName())
return mg
}

View File

@ -1,4 +1,4 @@
package migrations
package migrator
import "strconv"

View File

@ -1,10 +1,6 @@
package migrations
package migrator
import (
"strconv"
"github.com/go-xorm/core"
)
import "strconv"
type Postgres struct {
BaseDialect
@ -35,16 +31,16 @@ func (db *Postgres) SqlType(c *Column) string {
case DB_TinyInt:
res = DB_SmallInt
return res
case DB_MediumInt, core.Int, core.Integer:
case DB_MediumInt, DB_Int, DB_Integer:
if c.IsAutoIncrement {
return DB_Serial
}
return DB_Integer
case DB_Serial, core.BigSerial:
case DB_Serial, DB_BigSerial:
c.IsAutoIncrement = true
c.Nullable = false
res = t
case DB_Binary, core.VarBinary:
case DB_Binary, DB_VarBinary:
return DB_Bytea
case DB_DateTime:
res = DB_TimeStamp
@ -52,13 +48,13 @@ func (db *Postgres) SqlType(c *Column) string {
return "timestamp with time zone"
case DB_Float:
res = DB_Real
case DB_TinyText, core.MediumText, core.LongText:
case DB_TinyText, DB_MediumText, DB_LongText:
res = DB_Text
case DB_NVarchar:
res = DB_Varchar
case DB_Uuid:
res = DB_Uuid
case DB_Blob, core.TinyBlob, core.MediumBlob, core.LongBlob:
case DB_Blob, DB_TinyBlob, DB_MediumBlob, DB_LongBlob:
return DB_Bytea
case DB_Double:
return "DOUBLE PRECISION"

View File

@ -1,6 +1,4 @@
package migrations
import "github.com/go-xorm/core"
package migrator
type Sqlite3 struct {
BaseDialect
@ -32,7 +30,7 @@ func (db *Sqlite3) SqlType(c *Column) string {
case DB_TimeStampz:
return DB_Text
case DB_Char, DB_Varchar, DB_NVarchar, DB_TinyText, DB_Text, DB_MediumText, DB_LongText:
return core.Text
return DB_Text
case DB_Bit, DB_TinyInt, DB_SmallInt, DB_MediumInt, DB_Int, DB_Integer, DB_BigInt, DB_Bool:
return DB_Integer
case DB_Float, DB_Double, DB_Real:
@ -45,7 +43,7 @@ func (db *Sqlite3) SqlType(c *Column) string {
c.IsPrimaryKey = true
c.IsAutoIncrement = true
c.Nullable = false
return core.Integer
return DB_Integer
default:
return c.Type
}

View File

@ -1,4 +1,4 @@
package migrations
package migrator
const (
POSTGRES = "postgres"

View File

@ -9,7 +9,7 @@ import (
"github.com/torkelo/grafana-pro/pkg/bus"
"github.com/torkelo/grafana-pro/pkg/log"
m "github.com/torkelo/grafana-pro/pkg/models"
"github.com/torkelo/grafana-pro/pkg/services/sqlstore/migrations"
"github.com/torkelo/grafana-pro/pkg/services/sqlstore/migrator"
"github.com/torkelo/grafana-pro/pkg/setting"
"github.com/torkelo/grafana-pro/pkg/util"
@ -21,6 +21,7 @@ import (
var (
x *xorm.Engine
dialect migrator.Dialect
tables []interface{}
HasEngine bool
@ -34,9 +35,7 @@ var (
func init() {
tables = make([]interface{}, 0)
tables = append(tables, new(m.DataSource), new(DashboardTag),
new(m.Token))
tables = append(tables, new(m.Token))
}
func EnsureAdminUser() {
@ -76,9 +75,10 @@ func NewEngine() {
func SetEngine(engine *xorm.Engine, enableLog bool) (err error) {
x = engine
dialect = migrator.NewDialect(x.DriverName())
migrator := migrations.NewMigrator(x)
migrations.AddMigrations(migrator)
migrator := migrator.NewMigrator(x)
addMigrations(migrator)
if err := migrator.Start(); err != nil {
return fmt.Errorf("Sqlstore::Migration failed err: %v\n", err)

View File

@ -0,0 +1,47 @@
package sqlutil
import (
"fmt"
"github.com/go-xorm/xorm"
)
type TestDB struct {
DriverName string
ConnStr string
}
var TestDB_Sqlite3 = TestDB{DriverName: "sqlite3", ConnStr: ":memory:"}
var TestDB_Mysql = TestDB{DriverName: "mysql", ConnStr: "grafana:password@tcp(localhost:3306)/grafana_tests?charset=utf8"}
var TestDB_Postgres = TestDB{DriverName: "postgres", ConnStr: "user=grafanatest password=grafanatest host=localhost port=5432 dbname=grafanatest sslmode=disable"}
func CleanDB(x *xorm.Engine) {
if x.DriverName() == "postgres" {
sess := x.NewSession()
defer sess.Close()
if _, err := sess.Exec("DROP SCHEMA public CASCADE;"); err != nil {
panic("Failed to drop schema public")
}
if _, err := sess.Exec("CREATE SCHEMA public;"); err != nil {
panic("Failed to create schema public")
}
} else if x.DriverName() == "mysql" {
tables, _ := x.DBMetas()
sess := x.NewSession()
defer sess.Close()
for _, table := range tables {
if _, err := sess.Exec("set foreign_key_checks = 0"); err != nil {
panic("failed to disable foreign key checks")
}
if _, err := sess.Exec("drop table " + table.Name + " ;"); err != nil {
panic(fmt.Sprintf("failed to delete table: %v, err: %v", table.Name, err))
}
if _, err := sess.Exec("set foreign_key_checks = 1"); err != nil {
panic("failed to disable foreign key checks")
}
}
}
}

View File

@ -150,18 +150,18 @@ func GetUserAccounts(query *m.GetUserAccountsQuery) error {
func GetSignedInUser(query *m.GetSignedInUserQuery) error {
var rawSql = `SELECT
user.id as user_id,
user.is_admin as is_grafana_admin,
user.email as email,
user.login as login,
user.name as name,
u.id as user_id,
u.is_admin as is_grafana_admin,
u.email as email,
u.login as login,
u.name as name,
account.name as account_name,
account_user.role as account_role,
account.id as account_id
FROM user
LEFT OUTER JOIN account_user on account_user.account_id = user.account_id and account_user.user_id = user.id
LEFT OUTER JOIN account on account.id = user.account_id
WHERE user.id=?`
FROM ` + dialect.Quote("user") + ` as u
LEFT OUTER JOIN account_user on account_user.account_id = u.account_id and account_user.user_id = u.id
LEFT OUTER JOIN account on account.id = u.account_id
WHERE u.id=?`
var user m.SignedInUser
sess := x.Table("user")