From bec500b69f2169703902b72fa6cbd5ac02418b40 Mon Sep 17 00:00:00 2001 From: Polina Boneva <13227501+polibb@users.noreply.github.com> Date: Fri, 15 Jul 2022 12:06:16 +0300 Subject: [PATCH 001/116] Chore: Test `grafana/public/app/plugins/panel/text/TextPanel.tsx` (#52244) * move sanitize test to its own test file * add a test for renderTextPanelMarkdown to always sanitize * setup TextPanel tests * add tests to always sanitize Text Panel contents and always convert correctly to html/markdown --- .../grafana-data/src/text/markdown.test.ts | 13 +- .../grafana-data/src/text/sanitize.test.ts | 12 ++ .../app/plugins/panel/text/TextPanel.test.tsx | 120 ++++++++++++++++++ public/app/plugins/panel/text/TextPanel.tsx | 8 +- 4 files changed, 142 insertions(+), 11 deletions(-) create mode 100644 packages/grafana-data/src/text/sanitize.test.ts create mode 100644 public/app/plugins/panel/text/TextPanel.test.tsx diff --git a/packages/grafana-data/src/text/markdown.test.ts b/packages/grafana-data/src/text/markdown.test.ts index b9e7052c3ef..20a5bab4611 100644 --- a/packages/grafana-data/src/text/markdown.test.ts +++ b/packages/grafana-data/src/text/markdown.test.ts @@ -1,5 +1,4 @@ -import { renderMarkdown } from './markdown'; -import { sanitizeTextPanelContent } from './sanitize'; +import { renderMarkdown, renderTextPanelMarkdown } from './markdown'; describe('Markdown wrapper', () => { it('should be able to handle undefined value', () => { @@ -12,12 +11,8 @@ describe('Markdown wrapper', () => { expect(str).toBe('<script>alert()</script>'); }); - it('should allow whitelisted styles in text panel', () => { - const html = - '
'; - const str = sanitizeTextPanelContent(html); - expect(str).toBe( - '
' - ); + it('should sanitize content in text panel by default', () => { + const str = renderTextPanelMarkdown(''); + expect(str).toBe('<script>alert()</script>'); }); }); diff --git a/packages/grafana-data/src/text/sanitize.test.ts b/packages/grafana-data/src/text/sanitize.test.ts new file mode 100644 index 00000000000..8d9ff47d4d9 --- /dev/null +++ b/packages/grafana-data/src/text/sanitize.test.ts @@ -0,0 +1,12 @@ +import { sanitizeTextPanelContent } from './sanitize'; + +describe('Sanitize wrapper', () => { + it('should allow whitelisted styles in text panel', () => { + const html = + '
'; + const str = sanitizeTextPanelContent(html); + expect(str).toBe( + '
' + ); + }); +}); diff --git a/public/app/plugins/panel/text/TextPanel.test.tsx b/public/app/plugins/panel/text/TextPanel.test.tsx new file mode 100644 index 00000000000..99a245dff33 --- /dev/null +++ b/public/app/plugins/panel/text/TextPanel.test.tsx @@ -0,0 +1,120 @@ +import { render, screen } from '@testing-library/react'; +import React from 'react'; + +import { dateTime, LoadingState, EventBusSrv } from '@grafana/data'; + +import { Props, TextPanel } from './TextPanel'; +import { TextMode } from './models.gen'; + +const replaceVariablesMock = jest.fn(); +const defaultProps: Props = { + id: 1, + data: { + state: LoadingState.Done, + series: [ + { + fields: [], + length: 0, + }, + ], + timeRange: { + from: dateTime('2022-01-01T15:55:00Z'), + to: dateTime('2022-07-12T15:55:00Z'), + raw: { + from: 'now-15m', + to: 'now', + }, + }, + }, + timeRange: { + from: dateTime('2022-07-11T15:55:00Z'), + to: dateTime('2022-07-12T15:55:00Z'), + raw: { + from: 'now-15m', + to: 'now', + }, + }, + timeZone: 'utc', + transparent: false, + width: 120, + height: 120, + fieldConfig: { + defaults: {}, + overrides: [], + }, + renderCounter: 1, + title: 'Test Text Panel', + eventBus: new EventBusSrv(), + options: { content: '', mode: TextMode.Markdown }, + onOptionsChange: jest.fn(), + onFieldConfigChange: jest.fn(), + replaceVariables: replaceVariablesMock, + onChangeTimeRange: jest.fn(), +}; + +const setup = (props: Props = defaultProps) => { + render(); +}; + +describe('TextPanel', () => { + it('should render panel without content', () => { + expect(() => setup()).not.toThrow(); + }); + + it('sanitizes content in html mode', () => { + const contentTest = '

Form tags are sanitized.

\n'; + replaceVariablesMock.mockReturnValueOnce(contentTest); + const props = Object.assign({}, defaultProps, { + options: { content: contentTest, mode: TextMode.HTML }, + }); + + setup(props); + + expect(screen.getByTestId('TextPanel-converted-content').innerHTML).toEqual( + '<form>

Form tags are sanitized.

</form>\n<script>Script tags are sanitized.</script>' + ); + }); + + it('sanitizes content in markdown mode', () => { + const contentTest = '

Form tags are sanitized.

\n'; + replaceVariablesMock.mockReturnValueOnce(contentTest); + + const props = Object.assign({}, defaultProps, { + options: { content: contentTest, mode: TextMode.Markdown }, + }); + + setup(props); + + expect(screen.getByTestId('TextPanel-converted-content').innerHTML).toEqual( + '<form>

Form tags are sanitized.

</form>\n<script>Script tags are sanitized.</script>' + ); + }); + + it('converts content to markdown when in markdown mode', async () => { + const contentTest = 'We begin by a simple sentence.\n```code block```'; + replaceVariablesMock.mockReturnValueOnce(contentTest); + + const props = Object.assign({}, defaultProps, { + options: { content: contentTest, mode: TextMode.Markdown }, + }); + + setup(props); + + const waited = await screen.getByTestId('TextPanel-converted-content'); + expect(waited.innerHTML).toEqual('

We begin by a simple sentence.\ncode block

\n'); + }); + + it('converts content to html when in html mode', () => { + const contentTest = 'We begin by a simple sentence.\n```This is a code block\n```'; + replaceVariablesMock.mockReturnValueOnce(contentTest); + const props = Object.assign({}, defaultProps, { + options: { content: contentTest, mode: TextMode.HTML }, + }); + + setup(props); + + expect(screen.getByTestId('TextPanel-converted-content').innerHTML).toEqual( + 'We begin by a simple sentence.\n```This is a code block\n```' + ); + }); +}); diff --git a/public/app/plugins/panel/text/TextPanel.tsx b/public/app/plugins/panel/text/TextPanel.tsx index c30ba779ec9..8494f3a76c8 100644 --- a/public/app/plugins/panel/text/TextPanel.tsx +++ b/public/app/plugins/panel/text/TextPanel.tsx @@ -12,7 +12,7 @@ import config from 'app/core/config'; // Types import { PanelOptions, TextMode } from './models.gen'; -interface Props extends PanelProps {} +export interface Props extends PanelProps {} interface State { html: string; @@ -90,7 +90,11 @@ export class TextPanel extends PureComponent { const styles = getStyles(); return ( - + ); } From f3ee57abefe14962a50a3cd2e462f21fac1e735f Mon Sep 17 00:00:00 2001 From: Jo Date: Fri, 15 Jul 2022 09:21:09 +0000 Subject: [PATCH 002/116] Fix: Choose Lookup params per auth module (#395) (#52312) Co-authored-by: Karl Persson Fix: Prefer pointer to struct in lookup Co-authored-by: Karl Persson Fix: user email for ldap Co-authored-by: Karl Persson Fix: Use only login for lookup in LDAP Co-authored-by: Karl Persson Fix: use user email for ldap Co-authored-by: Karl Persson fix remaining test fix nit picks --- pkg/api/ldap_debug.go | 5 ++ pkg/api/login_oauth.go | 5 ++ pkg/api/user_test.go | 3 +- pkg/login/ldap_login.go | 8 ++- pkg/models/user_auth.go | 16 ++++-- pkg/services/contexthandler/auth_jwt.go | 5 ++ .../contexthandler/authproxy/authproxy.go | 10 ++++ pkg/services/login/authinfoservice/service.go | 25 +++++----- .../login/authinfoservice/user_auth_test.go | 49 +++++++++++++------ .../login/loginservice/loginservice.go | 8 ++- .../login/loginservice/loginservice_test.go | 6 ++- pkg/services/login/logintest/logintest.go | 6 ++- 12 files changed, 104 insertions(+), 42 deletions(-) diff --git a/pkg/api/ldap_debug.go b/pkg/api/ldap_debug.go index cc4ef546405..48015002395 100644 --- a/pkg/api/ldap_debug.go +++ b/pkg/api/ldap_debug.go @@ -220,6 +220,11 @@ func (hs *HTTPServer) PostSyncUserWithLDAP(c *models.ReqContext) response.Respon ReqContext: c, ExternalUser: user, SignupAllowed: hs.Cfg.LDAPAllowSignup, + UserLookupParams: models.UserLookupParams{ + UserID: &query.Result.ID, // Upsert by ID only + Email: nil, + Login: nil, + }, } err = hs.Login.UpsertUser(c.Req.Context(), upsertCmd) diff --git a/pkg/api/login_oauth.go b/pkg/api/login_oauth.go index 7beff805990..8495f08de71 100644 --- a/pkg/api/login_oauth.go +++ b/pkg/api/login_oauth.go @@ -305,6 +305,11 @@ func (hs *HTTPServer) SyncUser( ReqContext: ctx, ExternalUser: extUser, SignupAllowed: connect.IsSignupAllowed(), + UserLookupParams: models.UserLookupParams{ + Email: &extUser.Email, + UserID: nil, + Login: nil, + }, } if err := hs.Login.UpsertUser(ctx.Req.Context(), cmd); err != nil { diff --git a/pkg/api/user_test.go b/pkg/api/user_test.go index bdd5cd6e071..c7ec173f524 100644 --- a/pkg/api/user_test.go +++ b/pkg/api/user_test.go @@ -76,7 +76,8 @@ func TestUserAPIEndpoint_userLoggedIn(t *testing.T) { } idToken := "testidtoken" token = token.WithExtra(map[string]interface{}{"id_token": idToken}) - query := &models.GetUserByAuthInfoQuery{Login: "loginuser", AuthModule: "test", AuthId: "test"} + login := "loginuser" + query := &models.GetUserByAuthInfoQuery{AuthModule: "test", AuthId: "test", UserLookupParams: models.UserLookupParams{Login: &login}} cmd := &models.UpdateAuthInfoCommand{ UserId: user.ID, AuthId: query.AuthId, diff --git a/pkg/login/ldap_login.go b/pkg/login/ldap_login.go index 23d77224423..4e06ddc72ce 100644 --- a/pkg/login/ldap_login.go +++ b/pkg/login/ldap_login.go @@ -57,9 +57,13 @@ var loginUsingLDAP = func(ctx context.Context, query *models.LoginUserQuery, log ReqContext: query.ReqContext, ExternalUser: externalUser, SignupAllowed: setting.LDAPAllowSignup, + UserLookupParams: models.UserLookupParams{ + Login: &externalUser.Login, + Email: &externalUser.Email, + UserID: nil, + }, } - err = loginService.UpsertUser(ctx, upsert) - if err != nil { + if err = loginService.UpsertUser(ctx, upsert); err != nil { return true, err } query.User = upsert.Result diff --git a/pkg/models/user_auth.go b/pkg/models/user_auth.go index c9bd9ab0859..0732c81f47d 100644 --- a/pkg/models/user_auth.go +++ b/pkg/models/user_auth.go @@ -57,8 +57,9 @@ type RequestURIKey struct{} // COMMANDS type UpsertUserCommand struct { - ReqContext *ReqContext - ExternalUser *ExternalUserInfo + ReqContext *ReqContext + ExternalUser *ExternalUserInfo + UserLookupParams SignupAllowed bool Result *user.User @@ -98,9 +99,14 @@ type LoginUserQuery struct { type GetUserByAuthInfoQuery struct { AuthModule string AuthId string - UserId int64 - Email string - Login string + UserLookupParams +} + +type UserLookupParams struct { + // Describes lookup order as well + UserID *int64 // if set, will try to find the user by id + Email *string // if set, will try to find the user by email + Login *string // if set, will try to find the user by login } type GetExternalUserInfoByLoginQuery struct { diff --git a/pkg/services/contexthandler/auth_jwt.go b/pkg/services/contexthandler/auth_jwt.go index f0bddf8d6f2..1b1856426ba 100644 --- a/pkg/services/contexthandler/auth_jwt.go +++ b/pkg/services/contexthandler/auth_jwt.go @@ -66,6 +66,11 @@ func (h *ContextHandler) initContextWithJWT(ctx *models.ReqContext, orgId int64) ReqContext: ctx, SignupAllowed: h.Cfg.JWTAuthAutoSignUp, ExternalUser: extUser, + UserLookupParams: models.UserLookupParams{ + UserID: nil, + Login: &query.Login, + Email: &query.Email, + }, } if err := h.loginService.UpsertUser(ctx.Req.Context(), upsert); err != nil { ctx.Logger.Error("Failed to upsert JWT user", "error", err) diff --git a/pkg/services/contexthandler/authproxy/authproxy.go b/pkg/services/contexthandler/authproxy/authproxy.go index 79eb395b004..b9c8cefd89a 100644 --- a/pkg/services/contexthandler/authproxy/authproxy.go +++ b/pkg/services/contexthandler/authproxy/authproxy.go @@ -241,6 +241,11 @@ func (auth *AuthProxy) LoginViaLDAP(reqCtx *models.ReqContext) (int64, error) { ReqContext: reqCtx, SignupAllowed: auth.cfg.LDAPAllowSignup, ExternalUser: extUser, + UserLookupParams: models.UserLookupParams{ + Login: &extUser.Login, + Email: &extUser.Email, + UserID: nil, + }, } if err := auth.loginService.UpsertUser(reqCtx.Req.Context(), upsert); err != nil { return 0, err @@ -298,6 +303,11 @@ func (auth *AuthProxy) loginViaHeader(reqCtx *models.ReqContext) (int64, error) ReqContext: reqCtx, SignupAllowed: auth.cfg.AuthProxyAutoSignUp, ExternalUser: extUser, + UserLookupParams: models.UserLookupParams{ + UserID: nil, + Login: &extUser.Login, + Email: &extUser.Email, + }, } err := auth.loginService.UpsertUser(reqCtx.Req.Context(), upsert) diff --git a/pkg/services/login/authinfoservice/service.go b/pkg/services/login/authinfoservice/service.go index 1e8c79bc39a..ddcc1385352 100644 --- a/pkg/services/login/authinfoservice/service.go +++ b/pkg/services/login/authinfoservice/service.go @@ -44,11 +44,12 @@ func (s *Implementation) LookupAndFix(ctx context.Context, query *models.GetUser } // if user id was specified and doesn't match the user_auth entry, remove it - if query.UserId != 0 && query.UserId != authQuery.Result.UserId { - err := s.authInfoStore.DeleteAuthInfo(ctx, &models.DeleteAuthInfoCommand{ + if query.UserLookupParams.UserID != nil && + *query.UserLookupParams.UserID != 0 && + *query.UserLookupParams.UserID != authQuery.Result.UserId { + if err := s.authInfoStore.DeleteAuthInfo(ctx, &models.DeleteAuthInfoCommand{ UserAuth: authQuery.Result, - }) - if err != nil { + }); err != nil { s.logger.Error("Error removing user_auth entry", "error", err) } @@ -78,29 +79,29 @@ func (s *Implementation) LookupAndFix(ctx context.Context, query *models.GetUser return false, nil, nil, models.ErrUserNotFound } -func (s *Implementation) LookupByOneOf(ctx context.Context, userId int64, email string, login string) (*user.User, error) { +func (s *Implementation) LookupByOneOf(ctx context.Context, params *models.UserLookupParams) (*user.User, error) { var user *user.User var err error // If not found, try to find the user by id - if userId != 0 { - user, err = s.authInfoStore.GetUserById(ctx, userId) + if params.UserID != nil && *params.UserID != 0 { + user, err = s.authInfoStore.GetUserById(ctx, *params.UserID) if err != nil && !errors.Is(err, models.ErrUserNotFound) { return nil, err } } // If not found, try to find the user by email address - if user == nil && email != "" { - user, err = s.authInfoStore.GetUserByEmail(ctx, email) + if user == nil && params.Email != nil && *params.Email != "" { + user, err = s.authInfoStore.GetUserByEmail(ctx, *params.Email) if err != nil && !errors.Is(err, models.ErrUserNotFound) { return nil, err } } // If not found, try to find the user by login - if user == nil && login != "" { - user, err = s.authInfoStore.GetUserByLogin(ctx, login) + if user == nil && params.Login != nil && *params.Login != "" { + user, err = s.authInfoStore.GetUserByLogin(ctx, *params.Login) if err != nil && !errors.Is(err, models.ErrUserNotFound) { return nil, err } @@ -139,7 +140,7 @@ func (s *Implementation) LookupAndUpdate(ctx context.Context, query *models.GetU // 2. FindByUserDetails if !foundUser { - user, err = s.LookupByOneOf(ctx, query.UserId, query.Email, query.Login) + user, err = s.LookupByOneOf(ctx, &query.UserLookupParams) if err != nil { return nil, err } diff --git a/pkg/services/login/authinfoservice/user_auth_test.go b/pkg/services/login/authinfoservice/user_auth_test.go index 0c96d819e7b..c67ea0f5b4d 100644 --- a/pkg/services/login/authinfoservice/user_auth_test.go +++ b/pkg/services/login/authinfoservice/user_auth_test.go @@ -43,7 +43,7 @@ func TestUserAuth(t *testing.T) { // By Login login := "loginuser0" - query := &models.GetUserByAuthInfoQuery{Login: login} + query := &models.GetUserByAuthInfoQuery{UserLookupParams: models.UserLookupParams{Login: &login}} user, err := srv.LookupAndUpdate(context.Background(), query) require.Nil(t, err) @@ -52,7 +52,9 @@ func TestUserAuth(t *testing.T) { // By ID id := user.ID - user, err = srv.LookupByOneOf(context.Background(), id, "", "") + user, err = srv.LookupByOneOf(context.Background(), &models.UserLookupParams{ + UserID: &id, + }) require.Nil(t, err) require.Equal(t, user.ID, id) @@ -60,7 +62,9 @@ func TestUserAuth(t *testing.T) { // By Email email := "user1@test.com" - user, err = srv.LookupByOneOf(context.Background(), 0, email, "") + user, err = srv.LookupByOneOf(context.Background(), &models.UserLookupParams{ + Email: &email, + }) require.Nil(t, err) require.Equal(t, user.Email, email) @@ -68,7 +72,9 @@ func TestUserAuth(t *testing.T) { // Don't find nonexistent user email = "nonexistent@test.com" - user, err = srv.LookupByOneOf(context.Background(), 0, email, "") + user, err = srv.LookupByOneOf(context.Background(), &models.UserLookupParams{ + Email: &email, + }) require.Equal(t, models.ErrUserNotFound, err) require.Nil(t, user) @@ -85,7 +91,7 @@ func TestUserAuth(t *testing.T) { // create user_auth entry login := "loginuser0" - query.Login = login + query.UserLookupParams.Login = &login user, err = srv.LookupAndUpdate(context.Background(), query) require.Nil(t, err) @@ -99,9 +105,9 @@ func TestUserAuth(t *testing.T) { require.Equal(t, user.Login, login) // get with non-matching id - id := user.ID + idPlusOne := user.ID + 1 - query.UserId = id + 1 + query.UserLookupParams.UserID = &idPlusOne user, err = srv.LookupAndUpdate(context.Background(), query) require.Nil(t, err) @@ -143,7 +149,9 @@ func TestUserAuth(t *testing.T) { login := "loginuser0" // Calling GetUserByAuthInfoQuery on an existing user will populate an entry in the user_auth table - query := &models.GetUserByAuthInfoQuery{Login: login, AuthModule: "test", AuthId: "test"} + query := &models.GetUserByAuthInfoQuery{AuthModule: "test", AuthId: "test", UserLookupParams: models.UserLookupParams{ + Login: &login, + }} user, err := srv.LookupAndUpdate(context.Background(), query) require.Nil(t, err) @@ -192,7 +200,9 @@ func TestUserAuth(t *testing.T) { // Calling srv.LookupAndUpdateQuery on an existing user will populate an entry in the user_auth table // Make the first log-in during the past database.GetTime = func() time.Time { return time.Now().AddDate(0, 0, -2) } - query := &models.GetUserByAuthInfoQuery{Login: login, AuthModule: "test1", AuthId: "test1"} + query := &models.GetUserByAuthInfoQuery{AuthModule: "test1", AuthId: "test1", UserLookupParams: models.UserLookupParams{ + Login: &login, + }} user, err := srv.LookupAndUpdate(context.Background(), query) database.GetTime = time.Now @@ -202,7 +212,9 @@ func TestUserAuth(t *testing.T) { // Add a second auth module for this user // Have this module's last log-in be more recent database.GetTime = func() time.Time { return time.Now().AddDate(0, 0, -1) } - query = &models.GetUserByAuthInfoQuery{Login: login, AuthModule: "test2", AuthId: "test2"} + query = &models.GetUserByAuthInfoQuery{AuthModule: "test2", AuthId: "test2", UserLookupParams: models.UserLookupParams{ + Login: &login, + }} user, err = srv.LookupAndUpdate(context.Background(), query) database.GetTime = time.Now @@ -257,7 +269,9 @@ func TestUserAuth(t *testing.T) { // Calling srv.LookupAndUpdateQuery on an existing user will populate an entry in the user_auth table // Make the first log-in during the past database.GetTime = func() time.Time { return fixedTime.AddDate(0, 0, -2) } - queryOne := &models.GetUserByAuthInfoQuery{Login: login, AuthModule: "test1", AuthId: "test1"} + queryOne := &models.GetUserByAuthInfoQuery{AuthModule: "test1", AuthId: "test1", UserLookupParams: models.UserLookupParams{ + Login: &login, + }} user, err := srv.LookupAndUpdate(context.Background(), queryOne) database.GetTime = time.Now @@ -267,7 +281,9 @@ func TestUserAuth(t *testing.T) { // Add a second auth module for this user // Have this module's last log-in be more recent database.GetTime = func() time.Time { return fixedTime.AddDate(0, 0, -1) } - queryTwo := &models.GetUserByAuthInfoQuery{Login: login, AuthModule: "test2", AuthId: "test2"} + queryTwo := &models.GetUserByAuthInfoQuery{AuthModule: "test2", AuthId: "test2", UserLookupParams: models.UserLookupParams{ + Login: &login, + }} user, err = srv.LookupAndUpdate(context.Background(), queryTwo) require.Nil(t, err) require.Equal(t, user.Login, login) @@ -333,16 +349,21 @@ func TestUserAuth(t *testing.T) { // Expect to pass since there's a matching login user database.GetTime = func() time.Time { return time.Now().AddDate(0, 0, -2) } - query := &models.GetUserByAuthInfoQuery{Login: login, AuthModule: genericOAuthModule, AuthId: ""} + query := &models.GetUserByAuthInfoQuery{AuthModule: genericOAuthModule, AuthId: "", UserLookupParams: models.UserLookupParams{ + Login: &login, + }} user, err := srv.LookupAndUpdate(context.Background(), query) database.GetTime = time.Now require.Nil(t, err) require.Equal(t, user.Login, login) + otherLoginUser := "aloginuser" // Should throw a "user not found" error since there's no matching login user database.GetTime = func() time.Time { return time.Now().AddDate(0, 0, -2) } - query = &models.GetUserByAuthInfoQuery{Login: "aloginuser", AuthModule: genericOAuthModule, AuthId: ""} + query = &models.GetUserByAuthInfoQuery{AuthModule: genericOAuthModule, AuthId: "", UserLookupParams: models.UserLookupParams{ + Login: &otherLoginUser, + }} user, err = srv.LookupAndUpdate(context.Background(), query) database.GetTime = time.Now diff --git a/pkg/services/login/loginservice/loginservice.go b/pkg/services/login/loginservice/loginservice.go index f49532c64d0..b138c6e5393 100644 --- a/pkg/services/login/loginservice/loginservice.go +++ b/pkg/services/login/loginservice/loginservice.go @@ -49,11 +49,9 @@ func (ls *Implementation) UpsertUser(ctx context.Context, cmd *models.UpsertUser extUser := cmd.ExternalUser usr, err := ls.AuthInfoService.LookupAndUpdate(ctx, &models.GetUserByAuthInfoQuery{ - AuthModule: extUser.AuthModule, - AuthId: extUser.AuthId, - UserId: extUser.UserId, - Email: extUser.Email, - Login: extUser.Login, + AuthModule: extUser.AuthModule, + AuthId: extUser.AuthId, + UserLookupParams: cmd.UserLookupParams, }) if err != nil { if !errors.Is(err, models.ErrUserNotFound) { diff --git a/pkg/services/login/loginservice/loginservice_test.go b/pkg/services/login/loginservice/loginservice_test.go index dd9328b2d91..1bd5be21c7b 100644 --- a/pkg/services/login/loginservice/loginservice_test.go +++ b/pkg/services/login/loginservice/loginservice_test.go @@ -69,10 +69,12 @@ func Test_teamSync(t *testing.T) { AuthInfoService: authInfoMock, } - upserCmd := &models.UpsertUserCommand{ExternalUser: &models.ExternalUserInfo{Email: "test_user@example.org"}} + email := "test_user@example.org" + upserCmd := &models.UpsertUserCommand{ExternalUser: &models.ExternalUserInfo{Email: email}, + UserLookupParams: models.UserLookupParams{Email: &email}} expectedUser := &user.User{ ID: 1, - Email: "test_user@example.org", + Email: email, Name: "test_user", Login: "test_user", } diff --git a/pkg/services/login/logintest/logintest.go b/pkg/services/login/logintest/logintest.go index 16691823323..d4a9e37c3c6 100644 --- a/pkg/services/login/logintest/logintest.go +++ b/pkg/services/login/logintest/logintest.go @@ -29,7 +29,11 @@ type AuthInfoServiceFake struct { } func (a *AuthInfoServiceFake) LookupAndUpdate(ctx context.Context, query *models.GetUserByAuthInfoQuery) (*user.User, error) { - a.LatestUserID = query.UserId + if query.UserLookupParams.UserID != nil { + a.LatestUserID = *query.UserLookupParams.UserID + } else { + a.LatestUserID = 0 + } return a.ExpectedUser, a.ExpectedError } From 1c48f443f06aabaea2845d0b333f7f263d095d0b Mon Sep 17 00:00:00 2001 From: Andreas Christou Date: Fri, 15 Jul 2022 10:46:30 +0100 Subject: [PATCH 003/116] Upgrade grafana-azure-sdk-go package (#52248) - Includes fix for appropriate selection of system assigned identity when using managed identity credential for Azure Monitor auth --- go.mod | 2 +- go.sum | 2 ++ 2 files changed, 3 insertions(+), 1 deletion(-) diff --git a/go.mod b/go.mod index 78270aecc63..35ad2f3d040 100644 --- a/go.mod +++ b/go.mod @@ -53,7 +53,7 @@ require ( github.com/gosimple/slug v1.12.0 github.com/grafana/cuetsy v0.0.3 github.com/grafana/grafana-aws-sdk v0.10.7 - github.com/grafana/grafana-azure-sdk-go v1.2.0 + github.com/grafana/grafana-azure-sdk-go v1.3.0 github.com/grafana/grafana-plugin-sdk-go v0.138.0 github.com/grpc-ecosystem/go-grpc-middleware v1.3.0 github.com/hashicorp/go-hclog v1.0.0 diff --git a/go.sum b/go.sum index df4a323117f..94fa610c55f 100644 --- a/go.sum +++ b/go.sum @@ -1338,6 +1338,8 @@ github.com/grafana/grafana-aws-sdk v0.10.7 h1:kXOuWCI+fV561/9sOU0DnzlFwqblfW36Xp github.com/grafana/grafana-aws-sdk v0.10.7/go.mod h1:5Iw3xY7iXJfNaYHrRHMXa/kaB2lWoyntg71PPLGvSs8= github.com/grafana/grafana-azure-sdk-go v1.2.0 h1:f/7BjCHGIU0JYOsLIt4oJztDy0fOPBRHB5R0Xe9++ew= github.com/grafana/grafana-azure-sdk-go v1.2.0/go.mod h1:rgrnK9m6CgKlgx4rH3FFP/6dTdyRO6LYC2mVZov35yo= +github.com/grafana/grafana-azure-sdk-go v1.3.0 h1:zboQpq/ljBjqHo/6UQNZAUwqGTtnEGRYSEnqIQvLuAo= +github.com/grafana/grafana-azure-sdk-go v1.3.0/go.mod h1:rgrnK9m6CgKlgx4rH3FFP/6dTdyRO6LYC2mVZov35yo= github.com/grafana/grafana-google-sdk-go v0.0.0-20211104130251-b190293eaf58 h1:2ud7NNM7LrGPO4x0NFR8qLq68CqI4SmB7I2yRN2w9oE= github.com/grafana/grafana-google-sdk-go v0.0.0-20211104130251-b190293eaf58/go.mod h1:Vo2TKWfDVmNTELBUM+3lkrZvFtBws0qSZdXhQxRdJrE= github.com/grafana/grafana-plugin-sdk-go v0.114.0/go.mod h1:D7x3ah+1d4phNXpbnOaxa/osSaZlwh9/ZUnGGzegRbk= From 91fd0223a4e1211ba383b33044ada4df11ebd560 Mon Sep 17 00:00:00 2001 From: George Robinson Date: Fri, 15 Jul 2022 10:48:52 +0100 Subject: [PATCH 004/116] Datasources: Allow configuration of the TTL (#52161) --- pkg/services/datasources/service/cache_service.go | 12 +++++++++--- 1 file changed, 9 insertions(+), 3 deletions(-) diff --git a/pkg/services/datasources/service/cache_service.go b/pkg/services/datasources/service/cache_service.go index bcfcac1315b..38327f7127f 100644 --- a/pkg/services/datasources/service/cache_service.go +++ b/pkg/services/datasources/service/cache_service.go @@ -12,9 +12,14 @@ import ( "github.com/grafana/grafana/pkg/services/sqlstore" ) +const ( + DefaultCacheTTL = 5 * time.Second +) + func ProvideCacheService(cacheService *localcache.CacheService, sqlStore *sqlstore.SQLStore) *CacheServiceImpl { return &CacheServiceImpl{ logger: log.New("datasources"), + cacheTTL: DefaultCacheTTL, CacheService: cacheService, SQLStore: sqlStore, } @@ -22,6 +27,7 @@ func ProvideCacheService(cacheService *localcache.CacheService, sqlStore *sqlsto type CacheServiceImpl struct { logger log.Logger + cacheTTL time.Duration CacheService *localcache.CacheService SQLStore *sqlstore.SQLStore } @@ -56,7 +62,7 @@ func (dc *CacheServiceImpl) GetDatasource( if ds.Uid != "" { dc.CacheService.Set(uidKey(ds.OrgId, ds.Uid), ds, time.Second*5) } - dc.CacheService.Set(cacheKey, ds, time.Second*5) + dc.CacheService.Set(cacheKey, ds, dc.cacheTTL) return ds, nil } @@ -92,8 +98,8 @@ func (dc *CacheServiceImpl) GetDatasourceByUID( ds := query.Result - dc.CacheService.Set(uidCacheKey, ds, time.Second*5) - dc.CacheService.Set(idKey(ds.Id), ds, time.Second*5) + dc.CacheService.Set(uidCacheKey, ds, dc.cacheTTL) + dc.CacheService.Set(idKey(ds.Id), ds, dc.cacheTTL) return ds, nil } From 8fc51932f5e3066c2838d25cfd97cd6247da8d28 Mon Sep 17 00:00:00 2001 From: Ivana Huckova <30407135+ivanahuckova@users.noreply.github.com> Date: Fri, 15 Jul 2022 13:03:14 +0200 Subject: [PATCH 005/116] Loki: Fix incorrect TopK value type in query builder (#52226) * Loki: Fix incorrect TopK value type in query builder * Simplify code * Remove bracket * Brackets are back --- .../shared/operationUtils.test.ts | 39 +++++++++++++++++++ .../querybuilder/shared/operationUtils.ts | 16 ++++---- 2 files changed, 46 insertions(+), 9 deletions(-) diff --git a/public/app/plugins/datasource/prometheus/querybuilder/shared/operationUtils.test.ts b/public/app/plugins/datasource/prometheus/querybuilder/shared/operationUtils.test.ts index 3b6f532237e..72a9c178545 100644 --- a/public/app/plugins/datasource/prometheus/querybuilder/shared/operationUtils.test.ts +++ b/public/app/plugins/datasource/prometheus/querybuilder/shared/operationUtils.test.ts @@ -124,4 +124,43 @@ describe('createAggregationOperationWithParams', () => { }, ]); }); + it('returns correct query string using aggregation definitions with overrides and number type param', () => { + const def = createAggregationOperationWithParam( + 'test_aggregation', + { + params: [{ name: 'K-value', type: 'number' }], + defaultParams: [5], + }, + { category: 'test_category' } + ); + + const topKByDefinition = def[1]; + expect( + topKByDefinition.renderer( + { id: '__topk_by', params: ['5', 'source', 'place'] }, + def[1], + 'rate({place="luna"} |= `` [5m])' + ) + ).toBe('test_aggregation by(source, place) (5, rate({place="luna"} |= `` [5m]))'); + }); + + it('returns correct query string using aggregation definitions with overrides and string type param', () => { + const def = createAggregationOperationWithParam( + 'test_aggregation', + { + params: [{ name: 'Identifier', type: 'string' }], + defaultParams: ['count'], + }, + { category: 'test_category' } + ); + + const countValueDefinition = def[1]; + expect( + countValueDefinition.renderer( + { id: 'count_values', params: ['5', 'source', 'place'] }, + def[1], + 'rate({place="luna"} |= `` [5m])' + ) + ).toBe('test_aggregation by(source, place) ("5", rate({place="luna"} |= `` [5m]))'); + }); }); diff --git a/public/app/plugins/datasource/prometheus/querybuilder/shared/operationUtils.ts b/public/app/plugins/datasource/prometheus/querybuilder/shared/operationUtils.ts index ef6689763d3..024c441bc47 100644 --- a/public/app/plugins/datasource/prometheus/querybuilder/shared/operationUtils.ts +++ b/public/app/plugins/datasource/prometheus/querybuilder/shared/operationUtils.ts @@ -279,15 +279,13 @@ function getAggregationExplainer(aggregationName: string, mode: 'by' | 'without' function getAggregationByRendererWithParameter(aggregation: string) { return function aggregationRenderer(model: QueryBuilderOperation, def: QueryBuilderOperationDef, innerExpr: string) { - function mapType(p: QueryBuilderOperationParamValue) { - if (typeof p === 'string') { - return `\"${p}\"`; - } - return p; - } - const params = model.params.slice(0, -1); - const restParams = model.params.slice(1); - return `${aggregation} by(${restParams.join(', ')}) (${params.map(mapType).join(', ')}, ${innerExpr})`; + const restParamIndex = def.params.findIndex((param) => param.restParam); + const params = model.params.slice(0, restParamIndex); + const restParams = model.params.slice(restParamIndex); + + return `${aggregation} by(${restParams.join(', ')}) (${params + .map((param, idx) => (def.params[idx].type === 'string' ? `\"${param}\"` : param)) + .join(', ')}, ${innerExpr})`; }; } From 10b9830cece0cb8745b0555482a02580c40a6bd4 Mon Sep 17 00:00:00 2001 From: Andres Martinez Gotor Date: Fri, 15 Jul 2022 13:10:03 +0200 Subject: [PATCH 006/116] Azure Monitor: Add template variables for namespaces and resource names (#52247) --- .../azure_monitor_datasource.test.ts | 41 +++++++-- .../azure_monitor/azure_monitor_datasource.ts | 20 +++-- .../azure_monitor/response_parser.ts | 4 +- .../azure_monitor/url_builder.test.ts | 8 +- .../azure_monitor/url_builder.ts | 2 +- .../VariableEditor/VariableEditor.test.tsx | 78 +++++++++++++---- .../VariableEditor/VariableEditor.tsx | 84 +++++++++++++++++-- .../datasource.ts | 10 ++- .../types/query.ts | 6 ++ .../variables.test.ts | 46 ++++++++++ .../variables.ts | 20 ++++- 11 files changed, 278 insertions(+), 41 deletions(-) diff --git a/public/app/plugins/datasource/grafana-azure-monitor-datasource/azure_monitor/azure_monitor_datasource.test.ts b/public/app/plugins/datasource/grafana-azure-monitor-datasource/azure_monitor/azure_monitor_datasource.test.ts index 979e3112d79..3f638ba2ef9 100644 --- a/public/app/plugins/datasource/grafana-azure-monitor-datasource/azure_monitor/azure_monitor_datasource.test.ts +++ b/public/app/plugins/datasource/grafana-azure-monitor-datasource/azure_monitor/azure_monitor_datasource.test.ts @@ -64,7 +64,7 @@ describe('AzureMonitorDatasource', () => { const expected = basePath + '/providers/microsoft.insights/components/resource1' + - '/providers/microsoft.insights/metricNamespaces?api-version=2017-12-01-preview'; + '/providers/microsoft.insights/metricNamespaces?region=global&api-version=2017-12-01-preview'; expect(path).toBe(expected); return Promise.resolve(response); }); @@ -80,7 +80,7 @@ describe('AzureMonitorDatasource', () => { expect(results.length).toEqual(2); expect(results[0].text).toEqual('Azure.ApplicationInsights'); expect(results[0].value).toEqual('Azure.ApplicationInsights'); - expect(results[1].text).toEqual('microsoft.insights-components'); + expect(results[1].text).toEqual('microsoft.insights/components'); expect(results[1].value).toEqual('microsoft.insights/components'); }); }); @@ -405,7 +405,7 @@ describe('AzureMonitorDatasource', () => { ctx.ds.azureMonitorDatasource.getResource = jest.fn().mockImplementation((path: string) => { const basePath = `azuremonitor/subscriptions/${subscription}/resourceGroups`; expect(path).toBe( - `${basePath}/${resourceGroup}/resources?$filter=resourceType eq '${metricDefinition}'&api-version=2021-04-01` + `${basePath}/${resourceGroup}/resources?api-version=2021-04-01&$filter=resourceType eq '${metricDefinition}'` ); return Promise.resolve(response); }); @@ -456,7 +456,7 @@ describe('AzureMonitorDatasource', () => { const basePath = `azuremonitor/subscriptions/${subscription}/resourceGroups`; expect(path).toBe( basePath + - `/${resourceGroup}/resources?$filter=resourceType eq '${validMetricDefinition}'&api-version=2021-04-01` + `/${resourceGroup}/resources?api-version=2021-04-01&$filter=resourceType eq '${validMetricDefinition}'` ); return Promise.resolve(response); }); @@ -467,7 +467,7 @@ describe('AzureMonitorDatasource', () => { expect(results[0].text).toEqual('storagetest/default'); expect(results[0].value).toEqual('storagetest/default'); expect(ctx.ds.azureMonitorDatasource.getResource).toHaveBeenCalledWith( - `azuremonitor/subscriptions/${subscription}/resourceGroups/${resourceGroup}/resources?$filter=resourceType eq '${validMetricDefinition}'&api-version=2021-04-01` + `azuremonitor/subscriptions/${subscription}/resourceGroups/${resourceGroup}/resources?api-version=2021-04-01&$filter=resourceType eq '${validMetricDefinition}'` ); }); }); @@ -497,7 +497,7 @@ describe('AzureMonitorDatasource', () => { const fn = jest.fn(); ctx.ds.azureMonitorDatasource.getResource = fn; const basePath = `azuremonitor/subscriptions/${subscription}/resourceGroups`; - const expectedPath = `${basePath}/${resourceGroup}/resources?$filter=resourceType eq '${metricDefinition}'&api-version=2021-04-01`; + const expectedPath = `${basePath}/${resourceGroup}/resources?api-version=2021-04-01&$filter=resourceType eq '${metricDefinition}'`; // first page fn.mockImplementationOnce((path: string) => { expect(path).toBe(expectedPath); @@ -520,6 +520,35 @@ describe('AzureMonitorDatasource', () => { }); }); }); + + describe('without a resource group or a metric definition', () => { + const response = { + value: [ + { + name: 'Failure Anomalies - nodeapp', + type: 'microsoft.insights/alertrules', + }, + { + name: resourceGroup, + type: metricDefinition, + }, + ], + }; + + beforeEach(() => { + ctx.ds.azureMonitorDatasource.getResource = jest.fn().mockImplementation((path: string) => { + const basePath = `azuremonitor/subscriptions/${subscription}/resources?api-version=2021-04-01`; + expect(path).toBe(basePath); + return Promise.resolve(response); + }); + }); + + it('should return list of Resource Names', () => { + return ctx.ds.getResourceNames(subscription).then((results: Array<{ text: string; value: string }>) => { + expect(results.length).toEqual(2); + }); + }); + }); }); describe('When performing getMetricNames', () => { diff --git a/public/app/plugins/datasource/grafana-azure-monitor-datasource/azure_monitor/azure_monitor_datasource.ts b/public/app/plugins/datasource/grafana-azure-monitor-datasource/azure_monitor/azure_monitor_datasource.ts index f1c06ce1a70..125a83a74b9 100644 --- a/public/app/plugins/datasource/grafana-azure-monitor-datasource/azure_monitor/azure_monitor_datasource.ts +++ b/public/app/plugins/datasource/grafana-azure-monitor-datasource/azure_monitor/azure_monitor_datasource.ts @@ -204,14 +204,18 @@ export default class AzureMonitorDatasource extends DataSourceWithBackend { - return ResponseParser.parseResponseValues(result, 'name', 'properties.metricNamespaceName'); + return ResponseParser.parseResponseValues( + result, + 'properties.metricNamespaceName', + 'properties.metricNamespaceName' + ); }) .then((result) => { if (url.includes('Microsoft.Storage/storageAccounts')) { diff --git a/public/app/plugins/datasource/grafana-azure-monitor-datasource/azure_monitor/response_parser.ts b/public/app/plugins/datasource/grafana-azure-monitor-datasource/azure_monitor/response_parser.ts index 640a127591e..121cf8f54fa 100644 --- a/public/app/plugins/datasource/grafana-azure-monitor-datasource/azure_monitor/response_parser.ts +++ b/public/app/plugins/datasource/grafana-azure-monitor-datasource/azure_monitor/response_parser.ts @@ -33,7 +33,7 @@ export default class ResponseParser { return list; } - static parseResourceNames(result: any, metricDefinition: string): Array<{ text: string; value: string }> { + static parseResourceNames(result: any, metricDefinition?: string): Array<{ text: string; value: string }> { const list: Array<{ text: string; value: string }> = []; if (!result) { @@ -43,7 +43,7 @@ export default class ResponseParser { for (let i = 0; i < result.value.length; i++) { if ( typeof result.value[i].type === 'string' && - result.value[i].type.toLocaleLowerCase() === metricDefinition.toLocaleLowerCase() + (!metricDefinition || result.value[i].type.toLocaleLowerCase() === metricDefinition.toLocaleLowerCase()) ) { list.push({ text: result.value[i].name, diff --git a/public/app/plugins/datasource/grafana-azure-monitor-datasource/azure_monitor/url_builder.test.ts b/public/app/plugins/datasource/grafana-azure-monitor-datasource/azure_monitor/url_builder.test.ts index a7103ef1046..93f1b68cca3 100644 --- a/public/app/plugins/datasource/grafana-azure-monitor-datasource/azure_monitor/url_builder.test.ts +++ b/public/app/plugins/datasource/grafana-azure-monitor-datasource/azure_monitor/url_builder.test.ts @@ -92,7 +92,7 @@ describe('AzureMonitorUrlBuilder', () => { templateSrv ); expect(url).toBe( - '/subscriptions/sub/resource-uri/resource/providers/microsoft.insights/metricNamespaces?api-version=2017-05-01-preview' + '/subscriptions/sub/resource-uri/resource/providers/microsoft.insights/metricNamespaces?region=global&api-version=2017-05-01-preview' ); }); }); @@ -130,7 +130,7 @@ describe('AzureMonitorUrlBuilder', () => { ); expect(url).toBe( '/subscriptions/sub1/resourceGroups/rg/providers/Microsoft.NetApp/netAppAccounts/rn1/capacityPools/rn2/volumes/rn3/' + - 'providers/microsoft.insights/metricNamespaces?api-version=2017-05-01-preview' + 'providers/microsoft.insights/metricNamespaces?region=global&api-version=2017-05-01-preview' ); }); }); @@ -150,7 +150,7 @@ describe('AzureMonitorUrlBuilder', () => { ); expect(url).toBe( '/subscriptions/sub1/resourceGroups/rg/providers/Microsoft.Sql/servers/rn1/databases/rn2/' + - 'providers/microsoft.insights/metricNamespaces?api-version=2017-05-01-preview' + 'providers/microsoft.insights/metricNamespaces?region=global&api-version=2017-05-01-preview' ); }); }); @@ -170,7 +170,7 @@ describe('AzureMonitorUrlBuilder', () => { ); expect(url).toBe( '/subscriptions/sub1/resourceGroups/rg/providers/Microsoft.Sql/servers/rn/' + - 'providers/microsoft.insights/metricNamespaces?api-version=2017-05-01-preview' + 'providers/microsoft.insights/metricNamespaces?region=global&api-version=2017-05-01-preview' ); }); }); diff --git a/public/app/plugins/datasource/grafana-azure-monitor-datasource/azure_monitor/url_builder.ts b/public/app/plugins/datasource/grafana-azure-monitor-datasource/azure_monitor/url_builder.ts index d52df58b96e..b2a9b1212bb 100644 --- a/public/app/plugins/datasource/grafana-azure-monitor-datasource/azure_monitor/url_builder.ts +++ b/public/app/plugins/datasource/grafana-azure-monitor-datasource/azure_monitor/url_builder.ts @@ -57,7 +57,7 @@ export default class UrlBuilder { ); } - return `${baseUrl}${resourceUri}/providers/microsoft.insights/metricNamespaces?api-version=${apiVersion}`; + return `${baseUrl}${resourceUri}/providers/microsoft.insights/metricNamespaces?region=global&api-version=${apiVersion}`; } static buildAzureMonitorGetMetricNamesUrl( diff --git a/public/app/plugins/datasource/grafana-azure-monitor-datasource/components/VariableEditor/VariableEditor.test.tsx b/public/app/plugins/datasource/grafana-azure-monitor-datasource/components/VariableEditor/VariableEditor.test.tsx index 63a649aae0b..7c4c6cef5a1 100644 --- a/public/app/plugins/datasource/grafana-azure-monitor-datasource/components/VariableEditor/VariableEditor.test.tsx +++ b/public/app/plugins/datasource/grafana-azure-monitor-datasource/components/VariableEditor/VariableEditor.test.tsx @@ -29,7 +29,15 @@ const defaultProps = { subscription: 'id', }, onChange: jest.fn(), - datasource: createMockDatasource(), + datasource: createMockDatasource({ + getSubscriptions: jest.fn().mockResolvedValue([{ text: 'Primary Subscription', value: 'sub' }]), + getResourceGroups: jest.fn().mockResolvedValue([{ text: 'rg', value: 'rg' }]), + getMetricNamespaces: jest.fn().mockResolvedValue([{ text: 'foo/bar', value: 'foo/bar' }]), + getVariablesRaw: jest.fn().mockReturnValue([ + { label: 'query0', name: 'sub0' }, + { label: 'query1', name: 'rg', query: { queryType: AzureQueryType.ResourceGroupsQuery } }, + ]), + }), }; const originalConfigValue = grafanaRuntime.config.featureToggles.azTemplateVars; @@ -166,11 +174,8 @@ describe('VariableEditor:', () => { it('should run the query if requesting resource groups', async () => { grafanaRuntime.config.featureToggles.azTemplateVars = true; - const ds = createMockDatasource({ - getSubscriptions: jest.fn().mockResolvedValue([{ text: 'Primary Subscription', value: 'sub' }]), - }); const onChange = jest.fn(); - const { rerender } = render(); + const { rerender } = render(); // wait for initial load await waitFor(() => expect(screen.getByText('Logs')).toBeInTheDocument()); // Select RGs variable @@ -195,14 +200,7 @@ describe('VariableEditor:', () => { it('should show template variables as options ', async () => { const onChange = jest.fn(); grafanaRuntime.config.featureToggles.azTemplateVars = true; - const ds = createMockDatasource({ - getSubscriptions: jest.fn().mockResolvedValue([{ text: 'Primary Subscription', value: 'sub' }]), - getVariablesRaw: jest.fn().mockReturnValue([ - { label: 'query0', name: 'sub0' }, - { label: 'query1', name: 'rg', query: { queryType: AzureQueryType.ResourceGroupsQuery } }, - ]), - }); - const { rerender } = render(); + const { rerender } = render(); // wait for initial load await waitFor(() => expect(screen.getByText('Logs')).toBeInTheDocument()); // Select RGs variable @@ -210,7 +208,7 @@ describe('VariableEditor:', () => { screen.getByText('Resource Groups').click(); // Simulate onChange behavior const newQuery = onChange.mock.calls.at(-1)[0]; - rerender(); + rerender(); await waitFor(() => expect(screen.getByText('Select subscription')).toBeInTheDocument()); // Select a subscription openMenu(screen.getByLabelText('select subscription')); @@ -218,10 +216,60 @@ describe('VariableEditor:', () => { screen.getByText('Template Variables').click(); // Simulate onChange behavior const lastQuery = onChange.mock.calls.at(-1)[0]; - rerender(); + rerender(); await waitFor(() => expect(screen.getByText('query0')).toBeInTheDocument()); // Template variables of the same type than the current one should not appear expect(screen.queryByText('query1')).not.toBeInTheDocument(); }); + + it('should run the query if requesting namespaces', async () => { + grafanaRuntime.config.featureToggles.azTemplateVars = true; + const onChange = jest.fn(); + const { rerender } = render(); + // wait for initial load + await waitFor(() => expect(screen.getByText('Logs')).toBeInTheDocument()); + // Select RGs variable + openMenu(screen.getByLabelText('select query type')); + screen.getByText('Namespaces').click(); + // Simulate onChange behavior + const newQuery = onChange.mock.calls.at(-1)[0]; + rerender(); + await waitFor(() => expect(screen.getByText('Select subscription')).toBeInTheDocument()); + // Select a subscription + openMenu(screen.getByLabelText('select subscription')); + screen.getByText('Primary Subscription').click(); + expect(onChange).toHaveBeenCalledWith( + expect.objectContaining({ + queryType: AzureQueryType.NamespacesQuery, + subscription: 'sub', + refId: 'A', + }) + ); + }); + + it('should run the query if requesting resource names', async () => { + grafanaRuntime.config.featureToggles.azTemplateVars = true; + const onChange = jest.fn(); + const { rerender } = render(); + // wait for initial load + await waitFor(() => expect(screen.getByText('Logs')).toBeInTheDocument()); + // Select RGs variable + openMenu(screen.getByLabelText('select query type')); + screen.getByText('Resource Names').click(); + // Simulate onChange behavior + const newQuery = onChange.mock.calls.at(-1)[0]; + rerender(); + await waitFor(() => expect(screen.getByText('Select subscription')).toBeInTheDocument()); + // Select a subscription + openMenu(screen.getByLabelText('select subscription')); + screen.getByText('Primary Subscription').click(); + expect(onChange).toHaveBeenCalledWith( + expect.objectContaining({ + queryType: AzureQueryType.ResourceNamesQuery, + subscription: 'sub', + refId: 'A', + }) + ); + }); }); }); diff --git a/public/app/plugins/datasource/grafana-azure-monitor-datasource/components/VariableEditor/VariableEditor.tsx b/public/app/plugins/datasource/grafana-azure-monitor-datasource/components/VariableEditor/VariableEditor.tsx index 08cc4aae721..c2abb163768 100644 --- a/public/app/plugins/datasource/grafana-azure-monitor-datasource/components/VariableEditor/VariableEditor.tsx +++ b/public/app/plugins/datasource/grafana-azure-monitor-datasource/components/VariableEditor/VariableEditor.tsx @@ -21,6 +21,8 @@ type Props = { datasource: DataSource; }; +const removeOption: SelectableValue = { label: '-', value: '' }; + const VariableEditor = (props: Props) => { const { query, onChange, datasource } = props; const AZURE_QUERY_VARIABLE_TYPE_OPTIONS = [ @@ -30,13 +32,19 @@ const VariableEditor = (props: Props) => { if (config.featureToggles.azTemplateVars) { AZURE_QUERY_VARIABLE_TYPE_OPTIONS.push({ label: 'Subscriptions', value: AzureQueryType.SubscriptionsQuery }); AZURE_QUERY_VARIABLE_TYPE_OPTIONS.push({ label: 'Resource Groups', value: AzureQueryType.ResourceGroupsQuery }); + AZURE_QUERY_VARIABLE_TYPE_OPTIONS.push({ label: 'Namespaces', value: AzureQueryType.NamespacesQuery }); + AZURE_QUERY_VARIABLE_TYPE_OPTIONS.push({ label: 'Resource Names', value: AzureQueryType.ResourceNamesQuery }); } const [variableOptionGroup, setVariableOptionGroup] = useState<{ label: string; options: AzureMonitorOption[] }>({ label: 'Template Variables', options: [], }); const [requireSubscription, setRequireSubscription] = useState(false); + const [hasResourceGroup, setHasResourceGroup] = useState(false); + const [hasNamespace, setHasNamespace] = useState(false); const [subscriptions, setSubscriptions] = useState([]); + const [resourceGroups, setResourceGroups] = useState([]); + const [namespaces, setNamespaces] = useState([]); const [errorMessage, setError] = useLastError(); const queryType = typeof query === 'string' ? '' : query.queryType; @@ -47,12 +55,22 @@ const VariableEditor = (props: Props) => { }, [query, datasource, onChange]); useEffect(() => { + setRequireSubscription(false); + setHasResourceGroup(false); + setHasNamespace(false); switch (queryType) { case AzureQueryType.ResourceGroupsQuery: setRequireSubscription(true); break; - default: - setRequireSubscription(false); + case AzureQueryType.NamespacesQuery: + setRequireSubscription(true); + setHasResourceGroup(true); + break; + case AzureQueryType.ResourceNamesQuery: + setRequireSubscription(true); + setHasResourceGroup(true); + setHasNamespace(true); + break; } }, [queryType]); @@ -75,6 +93,24 @@ const VariableEditor = (props: Props) => { }); }); + const subscription = typeof query === 'object' && query.subscription; + useEffect(() => { + if (subscription) { + datasource.getResourceGroups(subscription).then((rgs) => { + setResourceGroups(rgs.map((s) => ({ label: s.text, value: s.value }))); + }); + } + }, [datasource, subscription]); + + const resourceGroup = (typeof query === 'object' && query.resourceGroup) || ''; + useEffect(() => { + if (subscription) { + datasource.getMetricNamespaces(subscription, resourceGroup).then((rgs) => { + setNamespaces(rgs.map((s) => ({ label: s.text, value: s.value }))); + }); + } + }, [datasource, subscription, resourceGroup]); + if (typeof query === 'string') { // still migrating the query return null; @@ -98,6 +134,20 @@ const VariableEditor = (props: Props) => { } }; + const onChangeResourceGroup = (selectableValue: SelectableValue) => { + onChange({ + ...query, + resourceGroup: selectableValue.value, + }); + }; + + const onChangeNamespace = (selectableValue: SelectableValue) => { + onChange({ + ...query, + namespace: selectableValue.value, + }); + }; + const onLogsQueryChange = (queryChange: AzureMonitorQuery) => { onChange(queryChange); }; @@ -113,7 +163,7 @@ const VariableEditor = (props: Props) => { value={queryType} /> - {typeof query === 'object' && query.queryType === AzureQueryType.LogAnalytics && ( + {query.queryType === AzureQueryType.LogAnalytics && ( <> { )} )} - {typeof query === 'object' && query.queryType === AzureQueryType.GrafanaTemplateVariableFn && ( + {query.queryType === AzureQueryType.GrafanaTemplateVariableFn && ( )} - {typeof query === 'object' && requireSubscription && ( + {requireSubscription && ( + + )} + {hasNamespace && ( + + )} @@ -118,8 +119,14 @@ export const InspectDataOptions: FC = ({ {showPanelTransformationsOption && onOptionsChange && ( = ({ )} {showFieldConfigsOption && onOptionsChange && ( = ({ /> )} - + diff --git a/public/app/features/inspector/InspectDataTab.tsx b/public/app/features/inspector/InspectDataTab.tsx index 4e6968fc8bd..850e4234116 100644 --- a/public/app/features/inspector/InspectDataTab.tsx +++ b/public/app/features/inspector/InspectDataTab.tsx @@ -1,4 +1,5 @@ import { css } from '@emotion/css'; +import { Trans, t } from '@lingui/macro'; import { saveAs } from 'file-saver'; import React, { PureComponent } from 'react'; import AutoSizer from 'react-virtualized-auto-sizer'; @@ -286,7 +287,7 @@ export class InspectDataTab extends PureComponent { margin-bottom: 10px; `} > - Download CSV + Download CSV {hasLogs && ( )} {hasTraces && ( @@ -309,7 +310,7 @@ export class InspectDataTab extends PureComponent { margin-left: 10px; `} > - Download traces + Download traces )} {hasServiceGraph && ( @@ -321,7 +322,7 @@ export class InspectDataTab extends PureComponent { margin-left: 10px; `} > - Download service graph + Download service graph )} @@ -349,7 +350,10 @@ function buildTransformationOptions() { const transformations: Array> = [ { value: DataTransformerID.seriesToColumns, - label: 'Series joined by time', + label: t({ + id: 'dashboard.inspect-data.transformation', + message: 'Series joined by time', + }), transformer: { id: DataTransformerID.seriesToColumns, options: { byField: 'Time' }, diff --git a/public/app/features/inspector/InspectJSONTab.tsx b/public/app/features/inspector/InspectJSONTab.tsx index 3c9c1adfca9..e5dd4e0c3a9 100644 --- a/public/app/features/inspector/InspectJSONTab.tsx +++ b/public/app/features/inspector/InspectJSONTab.tsx @@ -1,3 +1,4 @@ +import { t } from '@lingui/macro'; import React, { PureComponent } from 'react'; import AutoSizer from 'react-virtualized-auto-sizer'; @@ -17,18 +18,24 @@ enum ShowContent { const options: Array> = [ { - label: 'Panel JSON', - description: 'The model saved in the dashboard JSON that configures how everything works.', + label: t({ id: 'dashboard.inspect-json.panel-json-label', message: 'Panel JSON' }), + description: t({ + id: 'dashboard.inspect-json.panel-json-description', + message: 'The model saved in the dashboard JSON that configures how everything works.', + }), value: ShowContent.PanelJSON, }, { - label: 'Panel data', - description: 'The raw model passed to the panel visualization', + label: t({ id: 'dashboard.inspect-json.panel-data-label', message: 'Panel data' }), + description: t({ + id: 'dashboard.inspect-json.panel-data-description', + message: 'The raw model passed to the panel visualization', + }), value: ShowContent.PanelData, }, { - label: 'DataFrame JSON', - description: 'JSON formatted DataFrames', + label: t({ id: 'dashboard.inspect-json.dataframe-label', message: 'DataFrame JSON' }), + description: t({ id: 'dashboard.inspect-json.dataframe-description', message: 'JSON formatted DataFrames' }), value: ShowContent.DataFrames, }, ]; @@ -83,7 +90,7 @@ export class InspectJSONTab extends PureComponent { return panel!.getSaveModel(); } - return { note: `Unknown Object: ${show}` }; + return { note: t({ id: 'dashboard.inspect-json.unknown', message: `Unknown Object: ${show}` }) }; } onApplyPanelModel = () => { @@ -120,7 +127,10 @@ export class InspectJSONTab extends PureComponent { return (
- + -
-
-
- -
-
- -
-
-
-
- - -
-
Time series:
-- return column named time (in UTC), as a unix time stamp or any sql native date data type. You can use the macros below.
-- any other columns returned will be the time point values.
-Optional:
-  - return column named metric to represent the series name.
-  - If multiple value columns are returned the metric column is used as prefix.
-  - If no column named metric is found the column name of the value column is used as series name
-
-Resultsets of time series queries need to be sorted by time.
-
-Table:
-- return any set of columns
-
-Macros:
-- $__time(column) -> column AS time
-- $__timeEpoch(column) -> DATEDIFF(second, '1970-01-01', column) AS time
-- $__timeFilter(column) -> column BETWEEN '2017-04-21T05:01:17Z' AND '2017-04-21T05:01:17Z'
-- $__unixEpochFilter(column) -> column >= 1492750877 AND column <= 1492750877
-- $__unixEpochNanoFilter(column) ->  column >= 1494410783152415214 AND column <= 1494497183142514872
-- $__timeGroup(column, '5m'[, fillvalue]) -> CAST(ROUND(DATEDIFF(second, '1970-01-01', column)/300.0, 0) as bigint)*300.
-     by setting fillvalue grafana will fill in missing values according to the interval
-     fillvalue can be either a literal value, NULL or previous; previous will fill in the previous seen value or NULL if none has been seen yet
-- $__timeGroupAlias(column, '5m'[, fillvalue]) -> CAST(ROUND(DATEDIFF(second, '1970-01-01', column)/300.0, 0) as bigint)*300 AS [time]
-- $__unixEpochGroup(column,'5m') -> FLOOR(column/300)*300
-- $__unixEpochGroupAlias(column,'5m') -> FLOOR(column/300)*300 AS [time]
-
-Example of group by and order by with $__timeGroup:
-SELECT
-  $__timeGroup(date_time_col, '1h') AS time,
-  sum(value) as value
-FROM yourtable
-GROUP BY $__timeGroup(date_time_col, '1h')
-ORDER BY 1
-
-Or build your own conditionals using these macros which just return the values:
-- $__timeFrom() ->  '2017-04-21T05:01:17Z'
-- $__timeTo() ->  '2017-04-21T05:01:17Z'
-- $__unixEpochFrom() -> 1492750877
-- $__unixEpochTo() -> 1492750877
-- $__unixEpochNanoFrom() ->  1494410783152415214
-- $__unixEpochNanoTo() ->  1494497183142514872
-		
-
- - - -
-
{{ctrl.lastQueryMeta.executedQueryString}}
-
- -
-
{{ctrl.lastQueryError}}
-
- - diff --git a/public/app/plugins/datasource/mssql/query_ctrl.ts b/public/app/plugins/datasource/mssql/query_ctrl.ts deleted file mode 100644 index 2876cbd7395..00000000000 --- a/public/app/plugins/datasource/mssql/query_ctrl.ts +++ /dev/null @@ -1,65 +0,0 @@ -import { auto } from 'angular'; - -import { PanelEvents, QueryResultMeta } from '@grafana/data'; -import { QueryCtrl } from 'app/plugins/sdk'; - -import { MssqlQuery } from './types'; - -const defaultQuery = `SELECT - $__timeEpoch(), - as value, - as metric -FROM - -WHERE - $__timeFilter(time_column) -ORDER BY - ASC`; - -export class MssqlQueryCtrl extends QueryCtrl { - static templateUrl = 'partials/query.editor.html'; - - formats: any[]; - lastQueryMeta?: QueryResultMeta; - lastQueryError?: string; - showHelp = false; - - /** @ngInject */ - constructor($scope: any, $injector: auto.IInjectorService) { - super($scope, $injector); - - this.target.format = this.target.format || 'time_series'; - this.target.alias = ''; - this.formats = [ - { text: 'Time series', value: 'time_series' }, - { text: 'Table', value: 'table' }, - ]; - - if (!this.target.rawSql) { - // special handling when in table panel - if (this.panelCtrl.panel.type === 'table') { - this.target.format = 'table'; - this.target.rawSql = 'SELECT 1'; - } else { - this.target.rawSql = defaultQuery; - } - } - - this.panelCtrl.events.on(PanelEvents.dataReceived, this.onDataReceived.bind(this), $scope); - this.panelCtrl.events.on(PanelEvents.dataError, this.onDataError.bind(this), $scope); - } - - onDataReceived(dataList: any) { - this.lastQueryError = undefined; - this.lastQueryMeta = dataList[0]?.meta; - } - - onDataError(err: any) { - if (err.data && err.data.results) { - const queryRes = err.data.results[this.target.refId]; - if (queryRes) { - this.lastQueryError = queryRes.error; - } - } - } -} diff --git a/public/app/plugins/datasource/mssql/response_parser.ts b/public/app/plugins/datasource/mssql/response_parser.ts index 52e2af016e3..a23672002e1 100644 --- a/public/app/plugins/datasource/mssql/response_parser.ts +++ b/public/app/plugins/datasource/mssql/response_parser.ts @@ -1,18 +1,10 @@ import { uniqBy } from 'lodash'; -import { AnnotationEvent, DataFrame, MetricFindValue } from '@grafana/data'; -import { BackendDataSourceResponse, toDataQueryResponse, FetchResponse } from '@grafana/runtime'; - -export default class ResponseParser { - transformMetricFindResponse(raw: FetchResponse): MetricFindValue[] { - const frames = toDataQueryResponse(raw).data as DataFrame[]; - - if (!frames || !frames.length) { - return []; - } - - const frame = frames[0]; +import { DataFrame, MetricFindValue } from '@grafana/data'; +import { ResponseParser } from 'app/features/plugins/sql/types'; +export class MSSqlResponseParser implements ResponseParser { + transformMetricFindResponse(frame: DataFrame): MetricFindValue[] { const values: MetricFindValue[] = []; const textField = frame.fields.find((f) => f.name === '__text'); const valueField = frame.fields.find((f) => f.name === '__value'); @@ -33,41 +25,4 @@ export default class ResponseParser { return uniqBy(values, 'text'); } - - async transformAnnotationResponse(options: any, data: BackendDataSourceResponse): Promise { - const frames = toDataQueryResponse({ data: data }).data as DataFrame[]; - if (!frames || !frames.length) { - return []; - } - const frame = frames[0]; - const timeField = frame.fields.find((f) => f.name === 'time'); - - if (!timeField) { - return Promise.reject({ message: 'Missing mandatory time column (with time column alias) in annotation query.' }); - } - - const timeEndField = frame.fields.find((f) => f.name === 'timeend'); - const textField = frame.fields.find((f) => f.name === 'text'); - const tagsField = frame.fields.find((f) => f.name === 'tags'); - - const list: AnnotationEvent[] = []; - for (let i = 0; i < frame.length; i++) { - const timeEnd = timeEndField && timeEndField.values.get(i) ? Math.floor(timeEndField.values.get(i)) : undefined; - list.push({ - annotation: options.annotation, - time: Math.floor(timeField.values.get(i)), - timeEnd, - text: textField && textField.values.get(i) ? textField.values.get(i) : '', - tags: - tagsField && tagsField.values.get(i) - ? tagsField.values - .get(i) - .trim() - .split(/\s*,\s*/) - : [], - }); - } - - return list; - } } diff --git a/public/app/plugins/datasource/mssql/sqlCompletionProvider.ts b/public/app/plugins/datasource/mssql/sqlCompletionProvider.ts new file mode 100644 index 00000000000..cc02003ae13 --- /dev/null +++ b/public/app/plugins/datasource/mssql/sqlCompletionProvider.ts @@ -0,0 +1,136 @@ +import { + ColumnDefinition, + CompletionItemKind, + CompletionItemPriority, + LanguageCompletionProvider, + LinkedToken, + StatementPlacementProvider, + SuggestionKindProvider, + TableDefinition, + TokenType, +} from '@grafana/experimental'; +import { AGGREGATE_FNS, OPERATORS } from 'app/features/plugins/sql/constants'; +import { DB, SQLQuery } from 'app/features/plugins/sql/types'; + +import { SCHEMA_NAME } from './sqlUtil'; + +interface CompletionProviderGetterArgs { + getColumns: React.MutableRefObject<(t: SQLQuery) => Promise>; + getTables: React.MutableRefObject<(d?: string) => Promise>; +} + +export const getSqlCompletionProvider: (args: CompletionProviderGetterArgs) => LanguageCompletionProvider = + ({ getColumns, getTables }) => + () => ({ + triggerCharacters: ['.', ' ', '$', ',', '(', "'"], + tables: { + resolve: async () => { + return await getTables.current(); + }, + parseName: (token: LinkedToken) => { + let processedToken = token; + let tablePath = processedToken.value; + + while (processedToken.next && processedToken.next.type !== TokenType.Whitespace) { + tablePath += processedToken.next.value; + processedToken = processedToken.next; + } + + const tableName = tablePath.split('.').pop(); + + return tableName || tablePath; + }, + }, + + columns: { + resolve: async (t: string) => { + return await getColumns.current({ table: t, refId: 'A' }); + }, + }, + supportedFunctions: () => AGGREGATE_FNS, + supportedOperators: () => OPERATORS, + customSuggestionKinds: customSuggestionKinds(getTables, getColumns), + customStatementPlacement, + }); + +export enum CustomStatementPlacement { + AfterDatabase = 'afterDatabase', +} + +export enum CustomSuggestionKind { + TablesWithinDatabase = 'tablesWithinDatabase', +} + +export const customStatementPlacement: StatementPlacementProvider = () => [ + { + id: CustomStatementPlacement.AfterDatabase, + resolve: (currentToken, previousKeyword) => { + return Boolean( + currentToken?.is(TokenType.Delimiter, '.') || + (currentToken?.is(TokenType.Whitespace) && currentToken?.previous?.is(TokenType.Delimiter, '.')) || + (currentToken?.isNumber() && currentToken.value.endsWith('.')) + ); + }, + }, +]; + +export const customSuggestionKinds: ( + getTables: CompletionProviderGetterArgs['getTables'], + getFields: CompletionProviderGetterArgs['getColumns'] +) => SuggestionKindProvider = (getTables) => () => + [ + { + id: CustomSuggestionKind.TablesWithinDatabase, + applyTo: [CustomStatementPlacement.AfterDatabase], + suggestionsResolver: async (ctx) => { + const tablePath = ctx.currentToken ? getDatabaseName(ctx.currentToken) : ''; + const t = await getTables.current(tablePath); + + return t.map((table) => ({ + label: table.name, + insertText: table.completion ?? table.name, + command: { id: 'editor.action.triggerSuggest', title: '' }, + kind: CompletionItemKind.Field, + sortText: CompletionItemPriority.High, + range: { + ...ctx.range, + startColumn: ctx.range.endColumn, + endColumn: ctx.range.endColumn, + }, + })); + }, + }, + ]; + +export function getDatabaseName(token: LinkedToken) { + let processedToken = token; + let database = ''; + while (processedToken?.previous && !processedToken.previous.isWhiteSpace()) { + processedToken = processedToken.previous; + database = processedToken.value + database; + } + + if (database.includes(SCHEMA_NAME)) { + database = database.replace(SCHEMA_NAME, ''); + } + + database = database.trim(); + + return database; +} + +export async function fetchColumns(db: DB, q: SQLQuery) { + const cols = await db.fields(q); + if (cols.length > 0) { + return cols.map((c) => { + return { name: c.value, type: c.value, description: c.value }; + }); + } else { + return []; + } +} + +export async function fetchTables(db: DB, dataset?: string) { + const tables = await db.lookup(dataset); + return tables; +} diff --git a/public/app/plugins/datasource/mssql/sqlUtil.ts b/public/app/plugins/datasource/mssql/sqlUtil.ts new file mode 100644 index 00000000000..2aa41cb3213 --- /dev/null +++ b/public/app/plugins/datasource/mssql/sqlUtil.ts @@ -0,0 +1,131 @@ +import { isEmpty } from 'lodash'; + +import { RAQBFieldTypes, SQLExpression, SQLQuery } from 'app/features/plugins/sql/types'; +import { haveColumns } from 'app/features/plugins/sql/utils/sql.utils'; + +export function getIcon(type: string): string | undefined { + switch (type) { + case 'datetimeoffset': + case 'date': + case 'datetime2': + case 'smalldatetime': + case 'datetime': + case 'time': + return 'clock-nine'; + case 'bit': + return 'toggle-off'; + case 'tinyint': + case 'smallint': + case 'int': + case 'bigint': + case 'decimal': + case 'numeric': + case 'real': + case 'float': + case 'money': + case 'smallmoney': + return 'calculator-alt'; + case 'char': + case 'varchar': + case 'text': + case 'nchar': + case 'nvarchar': + case 'ntext': + case 'binary': + case 'varbinary': + case 'image': + return 'text'; + default: + return undefined; + } +} + +export function getRAQBType(type: string): RAQBFieldTypes { + switch (type) { + case 'datetimeoffset': + case 'datetime2': + case 'smalldatetime': + case 'datetime': + return 'datetime'; + case 'time': + return 'time'; + case 'date': + return 'date'; + case 'bit': + return 'boolean'; + case 'tinyint': + case 'smallint': + case 'int': + case 'bigint': + case 'decimal': + case 'numeric': + case 'real': + case 'float': + case 'money': + case 'smallmoney': + return 'number'; + case 'char': + case 'varchar': + case 'text': + case 'nchar': + case 'nvarchar': + case 'ntext': + case 'binary': + case 'varbinary': + case 'image': + return 'text'; + default: + return 'text'; + } +} + +export const SCHEMA_NAME = 'dbo'; + +export function toRawSql({ sql, dataset, table }: SQLQuery): string { + let rawQuery = ''; + + // Return early with empty string if there is no sql column + if (!sql || !haveColumns(sql.columns)) { + return rawQuery; + } + + rawQuery += createSelectClause(sql.columns, sql.limit); + + if (dataset && table) { + rawQuery += `FROM ${dataset}.${SCHEMA_NAME}.${table} `; + } + + if (sql.whereString) { + rawQuery += `WHERE ${sql.whereString} `; + } + + if (sql.groupBy?.[0]?.property.name) { + const groupBy = sql.groupBy.map((g) => g.property.name).filter((g) => !isEmpty(g)); + rawQuery += `GROUP BY ${groupBy.join(', ')} `; + } + + if (sql.orderBy?.property.name) { + rawQuery += `ORDER BY ${sql.orderBy.property.name} `; + } + + if (sql.orderBy?.property.name && sql.orderByDirection) { + rawQuery += `${sql.orderByDirection} `; + } + + return rawQuery; +} + +function createSelectClause(sqlColumns: NonNullable, limit?: number): string { + const columns = sqlColumns.map((c) => { + let rawColumn = ''; + if (c.name) { + rawColumn += `${c.name}(${c.parameters?.map((p) => `${p.name}`)})`; + } else { + rawColumn += `${c.parameters?.map((p) => `${p.name}`)}`; + } + return rawColumn; + }); + return `SELECT ${isLimit(limit) ? 'TOP(' + limit + ')' : ''} ${columns.join(', ')} `; +} + +const isLimit = (limit: number | undefined): boolean => limit !== undefined && limit >= 0; diff --git a/public/app/plugins/datasource/mssql/types.ts b/public/app/plugins/datasource/mssql/types.ts index 58ffb949a28..2f93dd07977 100644 --- a/public/app/plugins/datasource/mssql/types.ts +++ b/public/app/plugins/datasource/mssql/types.ts @@ -1,21 +1,4 @@ -import { DataQuery, DataSourceJsonData } from '@grafana/data'; -import { SQLConnectionLimits } from 'app/features/plugins/sql/components/configuration/types'; - -export interface MssqlQueryForInterpolation { - alias?: any; - format?: any; - rawSql?: any; - refId: any; - hide?: any; -} - -export type ResultFormat = 'time_series' | 'table'; - -export interface MssqlQuery extends DataQuery { - alias?: string; - format?: ResultFormat; - rawSql?: any; -} +import { SQLOptions } from 'app/features/plugins/sql/types'; export enum MSSQLAuthenticationType { sqlAuth = 'SQL Server Authentication', @@ -27,14 +10,9 @@ export enum MSSQLEncryptOptions { false = 'false', true = 'true', } -export interface MssqlOptions extends DataSourceJsonData, SQLConnectionLimits { - authenticationType: MSSQLAuthenticationType; - encrypt: MSSQLEncryptOptions; - serverName: string; - sslRootCertFile: string; - tlsSkipVerify: boolean; - url: string; - database: string; - timeInterval: string; - user: string; +export interface MssqlOptions extends SQLOptions { + authenticationType?: MSSQLAuthenticationType; + encrypt?: MSSQLEncryptOptions; + sslRootCertFile?: string; + serverName?: string; } diff --git a/public/app/plugins/datasource/mysql/types.ts b/public/app/plugins/datasource/mysql/types.ts index 88b20f3aade..aadcaa1f84f 100644 --- a/public/app/plugins/datasource/mysql/types.ts +++ b/public/app/plugins/datasource/mysql/types.ts @@ -1,5 +1,5 @@ import { DataQuery, DataSourceJsonData } from '@grafana/data'; -import { SQLConnectionLimits } from 'app/features/plugins/sql/components/configuration/types'; +import { SQLConnectionLimits } from 'app/features/plugins/sql/types'; export interface MysqlQueryForInterpolation { alias?: any; format?: any; diff --git a/public/app/plugins/datasource/postgres/types.ts b/public/app/plugins/datasource/postgres/types.ts index 7f5cdc1f18d..a07ff7a831a 100644 --- a/public/app/plugins/datasource/postgres/types.ts +++ b/public/app/plugins/datasource/postgres/types.ts @@ -1,5 +1,5 @@ import { DataQuery, DataSourceJsonData } from '@grafana/data'; -import { SQLConnectionLimits } from 'app/features/plugins/sql/components/configuration/types'; +import { SQLConnectionLimits } from 'app/features/plugins/sql/types'; export enum PostgresTLSModes { disable = 'disable', From 6e1e4a42151ccaf250b925ccc25f396784e77943 Mon Sep 17 00:00:00 2001 From: Yuriy Tseretyan Date: Fri, 15 Jul 2022 14:13:30 -0400 Subject: [PATCH 016/116] Alerting: Update DbStore to use disabled orgs from the config (#52156) * update DbStore to use UnifiedAlerting settings * remove disabled orgs from scheduler and use config in db store instead * remove test --- pkg/services/ngalert/api/api_provisioning_test.go | 14 +++++++++----- pkg/services/ngalert/models/alert_rule.go | 2 -- pkg/services/ngalert/ngalert.go | 3 +-- pkg/services/ngalert/notifier/alertmanager_test.go | 6 ++++-- .../ngalert/provisioning/alert_rules_test.go | 8 ++++++-- pkg/services/ngalert/schedule/fetcher.go | 6 ++---- pkg/services/ngalert/schedule/schedule.go | 10 ++-------- pkg/services/ngalert/schedule/schedule_test.go | 14 -------------- pkg/services/ngalert/store/alert_rule.go | 8 ++++---- pkg/services/ngalert/store/alert_rule_test.go | 9 ++++++--- pkg/services/ngalert/store/database.go | 6 ++---- pkg/services/ngalert/tests/util.go | 6 ++++-- pkg/services/provisioning/provisioning.go | 3 +-- 13 files changed, 41 insertions(+), 54 deletions(-) diff --git a/pkg/services/ngalert/api/api_provisioning_test.go b/pkg/services/ngalert/api/api_provisioning_test.go index 9cdf7b67e6e..db9cd568617 100644 --- a/pkg/services/ngalert/api/api_provisioning_test.go +++ b/pkg/services/ngalert/api/api_provisioning_test.go @@ -8,6 +8,10 @@ import ( "testing" "time" + prometheus "github.com/prometheus/alertmanager/config" + "github.com/prometheus/alertmanager/timeinterval" + "github.com/stretchr/testify/require" + "github.com/grafana/grafana/pkg/components/simplejson" "github.com/grafana/grafana/pkg/infra/log" gfcore "github.com/grafana/grafana/pkg/models" @@ -18,10 +22,8 @@ import ( "github.com/grafana/grafana/pkg/services/secrets" secrets_fakes "github.com/grafana/grafana/pkg/services/secrets/fakes" "github.com/grafana/grafana/pkg/services/sqlstore" + "github.com/grafana/grafana/pkg/setting" "github.com/grafana/grafana/pkg/web" - prometheus "github.com/prometheus/alertmanager/config" - "github.com/prometheus/alertmanager/timeinterval" - "github.com/stretchr/testify/require" ) func TestProvisioningApi(t *testing.T) { @@ -322,8 +324,10 @@ func createTestEnv(t *testing.T) testEnvironment { }) sqlStore := sqlstore.InitTestDB(t) store := store.DBstore{ - SQLStore: sqlStore, - BaseInterval: time.Second * 10, + SQLStore: sqlStore, + Cfg: setting.UnifiedAlertingSettings{ + BaseInterval: time.Second * 10, + }, } quotas := &provisioning.MockQuotaChecker{} quotas.EXPECT().LimitOK() diff --git a/pkg/services/ngalert/models/alert_rule.go b/pkg/services/ngalert/models/alert_rule.go index 4363c3d0475..7c2d26b5b87 100644 --- a/pkg/services/ngalert/models/alert_rule.go +++ b/pkg/services/ngalert/models/alert_rule.go @@ -316,8 +316,6 @@ type ListAlertRulesQuery struct { } type GetAlertRulesForSchedulingQuery struct { - ExcludeOrgIDs []int64 - Result []*SchedulableAlertRule } diff --git a/pkg/services/ngalert/ngalert.go b/pkg/services/ngalert/ngalert.go index 9f58b95e978..5435e1f7250 100644 --- a/pkg/services/ngalert/ngalert.go +++ b/pkg/services/ngalert/ngalert.go @@ -104,8 +104,7 @@ func (ng *AlertNG) init() error { var err error store := &store.DBstore{ - BaseInterval: ng.Cfg.UnifiedAlerting.BaseInterval, - DefaultInterval: ng.Cfg.UnifiedAlerting.DefaultRuleEvaluationInterval, + Cfg: ng.Cfg.UnifiedAlerting, SQLStore: ng.SQLStore, Logger: ng.Log, FolderService: ng.folderService, diff --git a/pkg/services/ngalert/notifier/alertmanager_test.go b/pkg/services/ngalert/notifier/alertmanager_test.go index c308e800dca..07adcaebbe5 100644 --- a/pkg/services/ngalert/notifier/alertmanager_test.go +++ b/pkg/services/ngalert/notifier/alertmanager_test.go @@ -36,8 +36,10 @@ func setupAMTest(t *testing.T) *Alertmanager { m := metrics.NewAlertmanagerMetrics(prometheus.NewRegistry()) sqlStore := sqlstore.InitTestDB(t) s := &store.DBstore{ - BaseInterval: 10 * time.Second, - DefaultInterval: 60 * time.Second, + Cfg: setting.UnifiedAlertingSettings{ + BaseInterval: 10 * time.Second, + DefaultRuleEvaluationInterval: 60 * time.Second, + }, SQLStore: sqlStore, Logger: log.New("alertmanager-test"), DashboardService: dashboards.NewFakeDashboardService(t), diff --git a/pkg/services/ngalert/provisioning/alert_rules_test.go b/pkg/services/ngalert/provisioning/alert_rules_test.go index f020f5b68d8..87728a9db9b 100644 --- a/pkg/services/ngalert/provisioning/alert_rules_test.go +++ b/pkg/services/ngalert/provisioning/alert_rules_test.go @@ -10,6 +10,8 @@ import ( "github.com/grafana/grafana/pkg/services/ngalert/models" "github.com/grafana/grafana/pkg/services/ngalert/store" "github.com/grafana/grafana/pkg/services/sqlstore" + "github.com/grafana/grafana/pkg/setting" + "github.com/stretchr/testify/require" ) @@ -174,8 +176,10 @@ func createAlertRuleService(t *testing.T) AlertRuleService { t.Helper() sqlStore := sqlstore.InitTestDB(t) store := store.DBstore{ - SQLStore: sqlStore, - BaseInterval: time.Second * 10, + SQLStore: sqlStore, + Cfg: setting.UnifiedAlertingSettings{ + BaseInterval: time.Second * 10, + }, } quotas := MockQuotaChecker{} quotas.EXPECT().LimitOK() diff --git a/pkg/services/ngalert/schedule/fetcher.go b/pkg/services/ngalert/schedule/fetcher.go index 9db18218a9b..dd5a4de78b8 100644 --- a/pkg/services/ngalert/schedule/fetcher.go +++ b/pkg/services/ngalert/schedule/fetcher.go @@ -36,16 +36,14 @@ func sortedUIDs(alertRules []*models.SchedulableAlertRule) []string { // updateSchedulableAlertRules updates the alert rules for the scheduler. // It returns an error if the database is unavailable or the query returned // an error. -func (sch *schedule) updateSchedulableAlertRules(ctx context.Context, disabledOrgs []int64) error { +func (sch *schedule) updateSchedulableAlertRules(ctx context.Context) error { start := time.Now() defer func() { sch.metrics.UpdateSchedulableAlertRulesDuration.Observe( time.Since(start).Seconds()) }() - q := models.GetAlertRulesForSchedulingQuery{ - ExcludeOrgIDs: disabledOrgs, - } + q := models.GetAlertRulesForSchedulingQuery{} if err := sch.ruleStore.GetAlertRulesForScheduling(ctx, &q); err != nil { return fmt.Errorf("failed to get alert rules: %w", err) } diff --git a/pkg/services/ngalert/schedule/schedule.go b/pkg/services/ngalert/schedule/schedule.go index 52a3a8e563f..ebf71545b34 100644 --- a/pkg/services/ngalert/schedule/schedule.go +++ b/pkg/services/ngalert/schedule/schedule.go @@ -90,7 +90,6 @@ type schedule struct { metrics *metrics.Scheduler alertsSender AlertsSender - disabledOrgs map[int64]struct{} minRuleInterval time.Duration // schedulableAlertRules contains the alert rules that are considered for @@ -137,7 +136,6 @@ func NewScheduler(cfg SchedulerCfg, appURL *url.URL, stateManager *state.Manager appURL: appURL, disableGrafanaFolder: cfg.Cfg.ReservedLabels.IsReservedLabelDisabled(ngmodels.FolderTitleLabel), stateManager: stateManager, - disabledOrgs: cfg.Cfg.DisabledOrgs, minRuleInterval: cfg.Cfg.MinInterval, schedulableAlertRules: schedulableAlertRulesRegistry{rules: make(map[ngmodels.AlertRuleKey]*ngmodels.SchedulableAlertRule)}, bus: bus, @@ -224,17 +222,13 @@ func (sch *schedule) schedulePeriodic(ctx context.Context) error { sch.metrics.BehindSeconds.Set(start.Sub(tick).Seconds()) tickNum := tick.Unix() / int64(sch.baseInterval.Seconds()) - disabledOrgs := make([]int64, 0, len(sch.disabledOrgs)) - for disabledOrg := range sch.disabledOrgs { - disabledOrgs = append(disabledOrgs, disabledOrg) - } - if err := sch.updateSchedulableAlertRules(ctx, disabledOrgs); err != nil { + if err := sch.updateSchedulableAlertRules(ctx); err != nil { sch.log.Error("scheduler failed to update alert rules", "err", err) } alertRules := sch.schedulableAlertRules.all() - sch.log.Debug("alert rules fetched", "count", len(alertRules), "disabled_orgs", disabledOrgs) + sch.log.Debug("alert rules fetched", "count", len(alertRules)) // registeredDefinitions is a map used for finding deleted alert rules // initially it is assigned to all known alert rules from the previous cycle diff --git a/pkg/services/ngalert/schedule/schedule_test.go b/pkg/services/ngalert/schedule/schedule_test.go index 75f5a3e5e1f..41a1b7d0328 100644 --- a/pkg/services/ngalert/schedule/schedule_test.go +++ b/pkg/services/ngalert/schedule/schedule_test.go @@ -139,8 +139,6 @@ func TestAlertingTicker(t *testing.T) { // create alert rule under main org with one second interval alerts = append(alerts, tests.CreateTestAlertRule(t, ctx, dbstore, 1, mainOrgID)) - const disabledOrgID int64 = 3 - evalAppliedCh := make(chan evalAppliedInfo, len(alerts)) stopAppliedCh := make(chan models.AlertRuleKey, len(alerts)) @@ -149,9 +147,6 @@ func TestAlertingTicker(t *testing.T) { cfg := setting.UnifiedAlertingSettings{ BaseInterval: time.Second, AdminConfigPollInterval: 10 * time.Minute, // do not poll in unit tests. - DisabledOrgs: map[int64]struct{}{ - disabledOrgID: {}, - }, } notifier := &schedule.AlertsSenderMock{} @@ -243,15 +238,6 @@ func TestAlertingTicker(t *testing.T) { tick := advanceClock(t, mockedClock) assertEvalRun(t, evalAppliedCh, tick, expectedAlertRulesEvaluated...) }) - - // create alert rule with one second interval under disabled org - alerts = append(alerts, tests.CreateTestAlertRule(t, ctx, dbstore, 1, disabledOrgID)) - - expectedAlertRulesEvaluated = []models.AlertRuleKey{alerts[2].GetKey()} - t.Run(fmt.Sprintf("on 8th tick alert rules: %s should be evaluated", concatenate(expectedAlertRulesEvaluated)), func(t *testing.T) { - tick := advanceClock(t, mockedClock) - assertEvalRun(t, evalAppliedCh, tick, expectedAlertRulesEvaluated...) - }) } func assertEvalRun(t *testing.T, ch <-chan evalAppliedInfo, tick time.Time, keys ...models.AlertRuleKey) { diff --git a/pkg/services/ngalert/store/alert_rule.go b/pkg/services/ngalert/store/alert_rule.go index f63ff11bcef..d22a2bd59ca 100644 --- a/pkg/services/ngalert/store/alert_rule.go +++ b/pkg/services/ngalert/store/alert_rule.go @@ -403,9 +403,9 @@ func (st DBstore) GetAlertRulesForScheduling(ctx context.Context, query *ngmodel return st.SQLStore.WithDbSession(ctx, func(sess *sqlstore.DBSession) error { alerts := make([]*ngmodels.SchedulableAlertRule, 0) q := sess.Table("alert_rule") - if len(query.ExcludeOrgIDs) > 0 { - excludeOrgs := make([]interface{}, 0, len(query.ExcludeOrgIDs)) - for _, orgID := range query.ExcludeOrgIDs { + if len(st.Cfg.DisabledOrgs) > 0 { + excludeOrgs := make([]interface{}, 0, len(st.Cfg.DisabledOrgs)) + for orgID := range st.Cfg.DisabledOrgs { excludeOrgs = append(excludeOrgs, orgID) } q = q.NotIn("org_id", excludeOrgs...) @@ -449,7 +449,7 @@ func (st DBstore) validateAlertRule(alertRule ngmodels.AlertRule) error { return fmt.Errorf("%w: title is empty", ngmodels.ErrAlertRuleFailedValidation) } - if err := ngmodels.ValidateRuleGroupInterval(alertRule.IntervalSeconds, int64(st.BaseInterval.Seconds())); err != nil { + if err := ngmodels.ValidateRuleGroupInterval(alertRule.IntervalSeconds, int64(st.Cfg.BaseInterval.Seconds())); err != nil { return err } diff --git a/pkg/services/ngalert/store/alert_rule_test.go b/pkg/services/ngalert/store/alert_rule_test.go index ff6f039991e..696fdf4e0ac 100644 --- a/pkg/services/ngalert/store/alert_rule_test.go +++ b/pkg/services/ngalert/store/alert_rule_test.go @@ -12,18 +12,21 @@ import ( "github.com/grafana/grafana/pkg/services/ngalert/models" "github.com/grafana/grafana/pkg/services/sqlstore" + "github.com/grafana/grafana/pkg/setting" "github.com/grafana/grafana/pkg/util" ) func TestUpdateAlertRules(t *testing.T) { sqlStore := sqlstore.InitTestDB(t) store := DBstore{ - SQLStore: sqlStore, - BaseInterval: time.Duration(rand.Int63n(100)) * time.Second, + SQLStore: sqlStore, + Cfg: setting.UnifiedAlertingSettings{ + BaseInterval: time.Duration(rand.Int63n(100)) * time.Second, + }, } createRule := func(t *testing.T) *models.AlertRule { t.Helper() - rule := models.AlertRuleGen(withIntervalMatching(store.BaseInterval))() + rule := models.AlertRuleGen(withIntervalMatching(store.Cfg.BaseInterval))() err := sqlStore.WithDbSession(context.Background(), func(sess *sqlstore.DBSession) error { _, err := sess.Table(models.AlertRule{}).InsertOne(rule) if err != nil { diff --git a/pkg/services/ngalert/store/database.go b/pkg/services/ngalert/store/database.go index 7e0d40ae94b..a86e5168186 100644 --- a/pkg/services/ngalert/store/database.go +++ b/pkg/services/ngalert/store/database.go @@ -9,6 +9,7 @@ import ( "github.com/grafana/grafana/pkg/services/dashboards" "github.com/grafana/grafana/pkg/services/ngalert/models" "github.com/grafana/grafana/pkg/services/sqlstore" + "github.com/grafana/grafana/pkg/setting" ) // TimeNow makes it possible to test usage of time @@ -28,10 +29,7 @@ type AlertingStore interface { // DBstore stores the alert definitions and instances in the database. type DBstore struct { - // the base scheduler tick rate; it's used for validating definition interval - BaseInterval time.Duration - // default alert definiiton interval - DefaultInterval time.Duration + Cfg setting.UnifiedAlertingSettings SQLStore *sqlstore.SQLStore Logger log.Logger FolderService dashboards.FolderService diff --git a/pkg/services/ngalert/tests/util.go b/pkg/services/ngalert/tests/util.go index 3426ffe0ca2..e9b7f21669e 100644 --- a/pkg/services/ngalert/tests/util.go +++ b/pkg/services/ngalert/tests/util.go @@ -68,8 +68,10 @@ func SetupTestEnv(t *testing.T, baseInterval time.Duration) (*ngalert.AlertNG, * ) require.NoError(t, err) return ng, &store.DBstore{ - SQLStore: ng.SQLStore, - BaseInterval: baseInterval * time.Second, + SQLStore: ng.SQLStore, + Cfg: setting.UnifiedAlertingSettings{ + BaseInterval: baseInterval * time.Second, + }, Logger: log.New("ngalert-test"), DashboardService: dashboardService, } diff --git a/pkg/services/provisioning/provisioning.go b/pkg/services/provisioning/provisioning.go index a4c458e73c3..8f50c2c2b5a 100644 --- a/pkg/services/provisioning/provisioning.go +++ b/pkg/services/provisioning/provisioning.go @@ -247,8 +247,7 @@ func (ps *ProvisioningServiceImpl) ProvisionDashboards(ctx context.Context) erro func (ps *ProvisioningServiceImpl) ProvisionAlertRules(ctx context.Context) error { alertRulesPath := filepath.Join(ps.Cfg.ProvisioningPath, "alerting") st := store.DBstore{ - BaseInterval: ps.Cfg.UnifiedAlerting.BaseInterval, - DefaultInterval: ps.Cfg.UnifiedAlerting.DefaultRuleEvaluationInterval, + Cfg: ps.Cfg.UnifiedAlerting, SQLStore: ps.SQLStore, Logger: ps.log, FolderService: nil, // we don't use it yet From 53933972b6602d5f5294f51d13a669c0aa759242 Mon Sep 17 00:00:00 2001 From: Scott Lepper Date: Fri, 15 Jul 2022 16:49:24 -0400 Subject: [PATCH 017/116] mysql query editor - angular to react (#50343) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit mysql conversion to react Co-authored-by: Zoltán Bedi --- .betterer.results | 114 +-- pkg/services/dashboards/store_mock.go | 2 +- .../visual-query-builder/SQLOrderByRow.tsx | 14 +- .../visual-query-builder/SQLSelectRow.tsx | 10 +- .../visual-query-builder/SelectRow.tsx | 10 +- .../plugins/sql/datasource/SqlDatasource.ts | 1 + public/app/features/plugins/sql/types.ts | 17 + .../plugins/datasource/mssql/datasource.ts | 2 + .../datasource/mysql/MySqlDatasource.ts | 112 +++ .../datasource/mysql/MySqlQueryModel.ts | 61 ++ .../datasource/mysql/MySqlResponseParser.ts | 27 + .../plugins/datasource/mysql/datasource.ts | 212 ------ public/app/plugins/datasource/mysql/fields.ts | 91 +++ .../app/plugins/datasource/mysql/functions.ts | 20 + .../plugins/datasource/mysql/meta_query.ts | 142 ---- public/app/plugins/datasource/mysql/module.ts | 43 +- .../datasource/mysql/mySqlMetaQuery.ts | 60 ++ .../datasource/mysql/mysql_query_model.ts | 236 ------- .../mysql/partials/annotations.editor.html | 54 -- .../mysql/partials/query.editor.html | 190 ----- .../plugins/datasource/mysql/query_ctrl.ts | 647 ------------------ .../datasource/mysql/response_parser.ts | 77 --- .../datasource/mysql/specs/datasource.test.ts | 64 +- .../datasource/mysql/sqlCompletionProvider.ts | 281 ++++++++ .../app/plugins/datasource/mysql/sql_part.ts | 86 --- public/app/plugins/datasource/mysql/types.ts | 23 +- 26 files changed, 715 insertions(+), 1881 deletions(-) create mode 100644 public/app/plugins/datasource/mysql/MySqlDatasource.ts create mode 100644 public/app/plugins/datasource/mysql/MySqlQueryModel.ts create mode 100644 public/app/plugins/datasource/mysql/MySqlResponseParser.ts delete mode 100644 public/app/plugins/datasource/mysql/datasource.ts create mode 100644 public/app/plugins/datasource/mysql/fields.ts create mode 100644 public/app/plugins/datasource/mysql/functions.ts delete mode 100644 public/app/plugins/datasource/mysql/meta_query.ts create mode 100644 public/app/plugins/datasource/mysql/mySqlMetaQuery.ts delete mode 100644 public/app/plugins/datasource/mysql/mysql_query_model.ts delete mode 100644 public/app/plugins/datasource/mysql/partials/annotations.editor.html delete mode 100644 public/app/plugins/datasource/mysql/partials/query.editor.html delete mode 100644 public/app/plugins/datasource/mysql/query_ctrl.ts delete mode 100644 public/app/plugins/datasource/mysql/response_parser.ts create mode 100644 public/app/plugins/datasource/mysql/sqlCompletionProvider.ts delete mode 100644 public/app/plugins/datasource/mysql/sql_part.ts diff --git a/.betterer.results b/.betterer.results index 715969079db..343a79f5634 100644 --- a/.betterer.results +++ b/.betterer.results @@ -7778,125 +7778,15 @@ exports[`better eslint`] = { [0, 0, 0, "Do not use any type assertions.", "1"], [0, 0, 0, "Do not use any type assertions.", "2"] ], - "public/app/plugins/datasource/mysql/datasource.ts:5381": [ - [0, 0, 0, "Unexpected any. Specify a different type.", "0"], - [0, 0, 0, "Unexpected any. Specify a different type.", "1"], - [0, 0, 0, "Do not use any type assertions.", "2"], - [0, 0, 0, "Unexpected any. Specify a different type.", "3"], - [0, 0, 0, "Unexpected any. Specify a different type.", "4"], - [0, 0, 0, "Unexpected any. Specify a different type.", "5"], - [0, 0, 0, "Do not use any type assertions.", "6"], - [0, 0, 0, "Unexpected any. Specify a different type.", "7"], - [0, 0, 0, "Unexpected any. Specify a different type.", "8"], - [0, 0, 0, "Unexpected any. Specify a different type.", "9"], - [0, 0, 0, "Unexpected any. Specify a different type.", "10"], - [0, 0, 0, "Unexpected any. Specify a different type.", "11"] - ], - "public/app/plugins/datasource/mysql/meta_query.ts:5381": [ - [0, 0, 0, "Unexpected any. Specify a different type.", "0"], - [0, 0, 0, "Unexpected any. Specify a different type.", "1"] - ], - "public/app/plugins/datasource/mysql/module.ts:5381": [ - [0, 0, 0, "Unexpected any. Specify a different type.", "0"], - [0, 0, 0, "Unexpected any. Specify a different type.", "1"] - ], - "public/app/plugins/datasource/mysql/mysql_query_model.ts:5381": [ - [0, 0, 0, "Unexpected any. Specify a different type.", "0"], - [0, 0, 0, "Unexpected any. Specify a different type.", "1"], - [0, 0, 0, "Unexpected any. Specify a different type.", "2"], - [0, 0, 0, "Unexpected any. Specify a different type.", "3"], - [0, 0, 0, "Unexpected any. Specify a different type.", "4"], - [0, 0, 0, "Unexpected any. Specify a different type.", "5"], - [0, 0, 0, "Unexpected any. Specify a different type.", "6"], - [0, 0, 0, "Unexpected any. Specify a different type.", "7"], - [0, 0, 0, "Unexpected any. Specify a different type.", "8"], - [0, 0, 0, "Unexpected any. Specify a different type.", "9"], - [0, 0, 0, "Unexpected any. Specify a different type.", "10"], - [0, 0, 0, "Unexpected any. Specify a different type.", "11"], - [0, 0, 0, "Unexpected any. Specify a different type.", "12"], - [0, 0, 0, "Unexpected any. Specify a different type.", "13"], - [0, 0, 0, "Unexpected any. Specify a different type.", "14"], - [0, 0, 0, "Unexpected any. Specify a different type.", "15"] - ], - "public/app/plugins/datasource/mysql/query_ctrl.ts:5381": [ - [0, 0, 0, "Unexpected any. Specify a different type.", "0"], - [0, 0, 0, "Unexpected any. Specify a different type.", "1"], - [0, 0, 0, "Unexpected any. Specify a different type.", "2"], - [0, 0, 0, "Unexpected any. Specify a different type.", "3"], - [0, 0, 0, "Unexpected any. Specify a different type.", "4"], - [0, 0, 0, "Unexpected any. Specify a different type.", "5"], - [0, 0, 0, "Unexpected any. Specify a different type.", "6"], - [0, 0, 0, "Unexpected any. Specify a different type.", "7"], - [0, 0, 0, "Unexpected any. Specify a different type.", "8"], - [0, 0, 0, "Unexpected any. Specify a different type.", "9"], - [0, 0, 0, "Unexpected any. Specify a different type.", "10"], - [0, 0, 0, "Unexpected any. Specify a different type.", "11"], - [0, 0, 0, "Unexpected any. Specify a different type.", "12"], - [0, 0, 0, "Unexpected any. Specify a different type.", "13"], - [0, 0, 0, "Unexpected any. Specify a different type.", "14"], - [0, 0, 0, "Unexpected any. Specify a different type.", "15"], - [0, 0, 0, "Unexpected any. Specify a different type.", "16"], - [0, 0, 0, "Unexpected any. Specify a different type.", "17"], - [0, 0, 0, "Unexpected any. Specify a different type.", "18"], - [0, 0, 0, "Unexpected any. Specify a different type.", "19"], - [0, 0, 0, "Unexpected any. Specify a different type.", "20"], - [0, 0, 0, "Unexpected any. Specify a different type.", "21"], - [0, 0, 0, "Unexpected any. Specify a different type.", "22"], - [0, 0, 0, "Unexpected any. Specify a different type.", "23"], - [0, 0, 0, "Do not use any type assertions.", "24"], - [0, 0, 0, "Do not use any type assertions.", "25"], - [0, 0, 0, "Unexpected any. Specify a different type.", "26"], - [0, 0, 0, "Unexpected any. Specify a different type.", "27"], - [0, 0, 0, "Unexpected any. Specify a different type.", "28"], - [0, 0, 0, "Unexpected any. Specify a different type.", "29"], - [0, 0, 0, "Unexpected any. Specify a different type.", "30"], - [0, 0, 0, "Unexpected any. Specify a different type.", "31"], - [0, 0, 0, "Unexpected any. Specify a different type.", "32"], - [0, 0, 0, "Unexpected any. Specify a different type.", "33"], - [0, 0, 0, "Unexpected any. Specify a different type.", "34"], - [0, 0, 0, "Unexpected any. Specify a different type.", "35"], - [0, 0, 0, "Unexpected any. Specify a different type.", "36"], - [0, 0, 0, "Unexpected any. Specify a different type.", "37"], - [0, 0, 0, "Unexpected any. Specify a different type.", "38"], - [0, 0, 0, "Unexpected any. Specify a different type.", "39"], - [0, 0, 0, "Unexpected any. Specify a different type.", "40"], - [0, 0, 0, "Unexpected any. Specify a different type.", "41"], - [0, 0, 0, "Unexpected any. Specify a different type.", "42"], - [0, 0, 0, "Unexpected any. Specify a different type.", "43"], - [0, 0, 0, "Unexpected any. Specify a different type.", "44"], - [0, 0, 0, "Unexpected any. Specify a different type.", "45"], - [0, 0, 0, "Unexpected any. Specify a different type.", "46"], - [0, 0, 0, "Unexpected any. Specify a different type.", "47"], - [0, 0, 0, "Unexpected any. Specify a different type.", "48"], - [0, 0, 0, "Unexpected any. Specify a different type.", "49"], - [0, 0, 0, "Unexpected any. Specify a different type.", "50"], - [0, 0, 0, "Unexpected any. Specify a different type.", "51"], - [0, 0, 0, "Unexpected any. Specify a different type.", "52"], - [0, 0, 0, "Unexpected any. Specify a different type.", "53"], - [0, 0, 0, "Unexpected any. Specify a different type.", "54"] - ], - "public/app/plugins/datasource/mysql/response_parser.ts:5381": [ - [0, 0, 0, "Do not use any type assertions.", "0"], - [0, 0, 0, "Unexpected any. Specify a different type.", "1"], - [0, 0, 0, "Do not use any type assertions.", "2"] - ], "public/app/plugins/datasource/mysql/specs/datasource.test.ts:5381": [ - [0, 0, 0, "Unexpected any. Specify a different type.", "0"], - [0, 0, 0, "Unexpected any. Specify a different type.", "1"] - ], - "public/app/plugins/datasource/mysql/sql_part.ts:5381": [ - [0, 0, 0, "Unexpected any. Specify a different type.", "0"], - [0, 0, 0, "Unexpected any. Specify a different type.", "1"], - [0, 0, 0, "Unexpected any. Specify a different type.", "2"], - [0, 0, 0, "Unexpected any. Specify a different type.", "3"] + [0, 0, 0, "Unexpected any. Specify a different type.", "0"] ], "public/app/plugins/datasource/mysql/types.ts:5381": [ [0, 0, 0, "Unexpected any. Specify a different type.", "0"], [0, 0, 0, "Unexpected any. Specify a different type.", "1"], [0, 0, 0, "Unexpected any. Specify a different type.", "2"], [0, 0, 0, "Unexpected any. Specify a different type.", "3"], - [0, 0, 0, "Unexpected any. Specify a different type.", "4"], - [0, 0, 0, "Unexpected any. Specify a different type.", "5"] + [0, 0, 0, "Unexpected any. Specify a different type.", "4"] ], "public/app/plugins/datasource/opentsdb/datasource.d.ts:5381": [ [0, 0, 0, "Unexpected any. Specify a different type.", "0"] diff --git a/pkg/services/dashboards/store_mock.go b/pkg/services/dashboards/store_mock.go index fe561d3188d..0695797b999 100644 --- a/pkg/services/dashboards/store_mock.go +++ b/pkg/services/dashboards/store_mock.go @@ -435,7 +435,7 @@ func (_m *FakeDashboardStore) ValidateDashboardBeforeSave(dashboard *models.Dash return r0, r1 } -func (_m *FakeDashboardStore) DeleteACLByUser(ctx context.Context, userID int64) error{ +func (_m *FakeDashboardStore) DeleteACLByUser(ctx context.Context, userID int64) error { ret := _m.Called(ctx, userID) var r0 error diff --git a/public/app/features/plugins/sql/components/visual-query-builder/SQLOrderByRow.tsx b/public/app/features/plugins/sql/components/visual-query-builder/SQLOrderByRow.tsx index 80a5d50b213..8b8631290bc 100644 --- a/public/app/features/plugins/sql/components/visual-query-builder/SQLOrderByRow.tsx +++ b/public/app/features/plugins/sql/components/visual-query-builder/SQLOrderByRow.tsx @@ -20,16 +20,18 @@ export function SQLOrderByRow({ fields, query, onQueryChange, db }: SQLOrderByRo let columnsWithIndices: SelectableValue[] = []; if (fields) { + const options = query.sql?.columns?.map((c, i) => { + const value = c.name ? `${c.name}(${c.parameters?.map((p) => p.name)})` : c.parameters?.map((p) => p.name); + return { + value, + label: `${i + 1} - ${value}`, + }; + }); columnsWithIndices = [ { value: '', label: 'Selected columns', - options: query.sql?.columns?.map((c, i) => ({ - value: i + 1, - label: c.name - ? `${i + 1} - ${c.name}(${c.parameters?.map((p) => `${p.name}`)})` - : c.parameters?.map((p) => `${i + 1} - ${p.name}`), - })), + options, expanded: true, }, ...fields, diff --git a/public/app/features/plugins/sql/components/visual-query-builder/SQLSelectRow.tsx b/public/app/features/plugins/sql/components/visual-query-builder/SQLSelectRow.tsx index 29844d0e4ce..ca3470d0ce0 100644 --- a/public/app/features/plugins/sql/components/visual-query-builder/SQLSelectRow.tsx +++ b/public/app/features/plugins/sql/components/visual-query-builder/SQLSelectRow.tsx @@ -1,6 +1,7 @@ import React from 'react'; +import { useAsync } from 'react-use'; -import { SelectableValue } from '@grafana/data'; +import { SelectableValue, toOption } from '@grafana/data'; import { QueryWithDefaults } from '../../defaults'; import { DB, SQLQuery } from '../../types'; @@ -18,5 +19,10 @@ interface SQLSelectRowProps { export function SQLSelectRow({ fields, query, onQueryChange, db }: SQLSelectRowProps) { const { onSqlChange } = useSqlChange({ query, onQueryChange, db }); - return ; + const state = useAsync(async () => { + const functions = await db.functions(); + return functions.map((f) => toOption(f.name)); + }, [db]); + + return ; } diff --git a/public/app/features/plugins/sql/components/visual-query-builder/SelectRow.tsx b/public/app/features/plugins/sql/components/visual-query-builder/SelectRow.tsx index f340989d5ae..b11c7e5b357 100644 --- a/public/app/features/plugins/sql/components/visual-query-builder/SelectRow.tsx +++ b/public/app/features/plugins/sql/components/visual-query-builder/SelectRow.tsx @@ -6,7 +6,6 @@ import { SelectableValue, toOption } from '@grafana/data'; import { EditorField, Stack } from '@grafana/experimental'; import { Button, Select, useStyles2 } from '@grafana/ui'; -import { AGGREGATE_FNS } from '../../constants'; import { QueryEditorExpressionType, QueryEditorFunctionExpression } from '../../expressions'; import { SQLExpression } from '../../types'; import { createFunctionField } from '../../utils/sql.utils'; @@ -15,11 +14,12 @@ interface SelectRowProps { sql: SQLExpression; onSqlChange: (sql: SQLExpression) => void; columns?: Array>; + functions?: Array>; } const asteriskValue = { label: '*', value: '*' }; -export function SelectRow({ sql, columns, onSqlChange }: SelectRowProps) { +export function SelectRow({ sql, columns, onSqlChange, functions }: SelectRowProps) { const styles = useStyles2(getStyles); const columnsWithAsterisk = [asteriskValue, ...(columns || [])]; @@ -101,7 +101,7 @@ export function SelectRow({ sql, columns, onSqlChange }: SelectRowProps) { isClearable menuShouldPortal allowCustomValue - options={aggregateFnOptions} + options={functions} onChange={onAggregationChange(item, index)} /> @@ -133,10 +133,6 @@ const getStyles = () => { return { addButton: css({ alignSelf: 'flex-start' }) }; }; -const aggregateFnOptions = AGGREGATE_FNS.map((v: { id: string; name: string; description: string }) => - toOption(v.name) -); - function getColumnValue({ parameters }: QueryEditorFunctionExpression): SelectableValue | null { const column = parameters?.find((p) => p.type === QueryEditorExpressionType.FunctionParameter); if (column?.name) { diff --git a/public/app/features/plugins/sql/datasource/SqlDatasource.ts b/public/app/features/plugins/sql/datasource/SqlDatasource.ts index 6737b6ca645..3adf02b4944 100644 --- a/public/app/features/plugins/sql/datasource/SqlDatasource.ts +++ b/public/app/features/plugins/sql/datasource/SqlDatasource.ts @@ -204,6 +204,7 @@ export abstract class SqlDatasource extends DataSourceWithBackend Promise; datasets: () => Promise; @@ -123,6 +139,7 @@ export interface DB { lookup: (path?: string) => Promise>; getSqlCompletionProvider: () => LanguageCompletionProvider; toRawSql?: (query: SQLQuery) => string; + functions: () => Promise; } export interface QueryEditorProps { diff --git a/public/app/plugins/datasource/mssql/datasource.ts b/public/app/plugins/datasource/mssql/datasource.ts index b3766a58d98..2447d8acaa2 100644 --- a/public/app/plugins/datasource/mssql/datasource.ts +++ b/public/app/plugins/datasource/mssql/datasource.ts @@ -1,6 +1,7 @@ import { DataSourceInstanceSettings, ScopedVars } from '@grafana/data'; import { LanguageCompletionProvider } from '@grafana/experimental'; import { TemplateSrv } from '@grafana/runtime'; +import { AGGREGATE_FNS } from 'app/features/plugins/sql/constants'; import { SqlDatasource } from 'app/features/plugins/sql/datasource/SqlDatasource'; import { DB, ResponseParser, SQLQuery, SQLSelectableValue } from 'app/features/plugins/sql/types'; @@ -92,6 +93,7 @@ export class MssqlDatasource extends SqlDatasource { } } }, + functions: async () => AGGREGATE_FNS, }; } } diff --git a/public/app/plugins/datasource/mysql/MySqlDatasource.ts b/public/app/plugins/datasource/mysql/MySqlDatasource.ts new file mode 100644 index 00000000000..49dce6901aa --- /dev/null +++ b/public/app/plugins/datasource/mysql/MySqlDatasource.ts @@ -0,0 +1,112 @@ +import { DataSourceInstanceSettings, ScopedVars, TimeRange } from '@grafana/data'; +import { CompletionItemKind, LanguageCompletionProvider } from '@grafana/experimental'; +import { TemplateSrv } from '@grafana/runtime'; +import { SqlDatasource } from 'app/features/plugins/sql/datasource/SqlDatasource'; +import { DB, ResponseParser, SQLQuery } from 'app/features/plugins/sql/types'; + +import MySQLQueryModel from './MySqlQueryModel'; +import MySqlResponseParser from './MySqlResponseParser'; +import { mapFieldsToTypes } from './fields'; +import { buildColumnQuery, buildTableQuery, showDatabases } from './mySqlMetaQuery'; +import { fetchColumns, fetchTables, getFunctions, getSqlCompletionProvider } from './sqlCompletionProvider'; +import { MySQLOptions } from './types'; + +export class MySqlDatasource extends SqlDatasource { + responseParser: MySqlResponseParser; + completionProvider: LanguageCompletionProvider | undefined; + + constructor(private instanceSettings: DataSourceInstanceSettings) { + super(instanceSettings); + this.responseParser = new MySqlResponseParser(); + this.completionProvider = undefined; + } + + getQueryModel(target?: Partial, templateSrv?: TemplateSrv, scopedVars?: ScopedVars): MySQLQueryModel { + return new MySQLQueryModel(target!, templateSrv, scopedVars); + } + + getResponseParser(): ResponseParser { + return this.responseParser; + } + + getSqlCompletionProvider(db: DB): LanguageCompletionProvider { + if (this.completionProvider !== undefined) { + return this.completionProvider; + } + + const args = { + getColumns: { current: (query: SQLQuery) => fetchColumns(db, query) }, + getTables: { current: (dataset?: string) => fetchTables(db, { dataset }) }, + fetchMeta: { current: (path?: string) => this.fetchMeta(path) }, + getFunctions: { current: () => getFunctions() }, + }; + this.completionProvider = getSqlCompletionProvider(args); + return this.completionProvider; + } + + async fetchDatasets(): Promise { + const datasets = await this.runSql(showDatabases(), { refId: 'datasets' }); + return datasets.map((t) => t[0]); + } + + async fetchTables(dataset?: string): Promise { + const tables = await this.runSql(buildTableQuery(dataset), { refId: 'tables' }); + return tables.map((t) => t[0]); + } + + async fetchFields(query: Partial) { + if (!query.dataset || !query.table) { + return []; + } + const queryString = buildColumnQuery(this.getQueryModel(query), query.table!); + const frame = await this.runSql(queryString, { refId: 'fields' }); + const fields = frame.map((f) => ({ name: f[0], text: f[0], value: f[0], type: f[1], label: f[0] })); + return mapFieldsToTypes(fields); + } + + async fetchMeta(path?: string) { + const defaultDB = this.instanceSettings.jsonData.database; + path = path?.trim(); + if (!path && defaultDB) { + const tables = await this.fetchTables(defaultDB); + return tables.map((t) => ({ name: t, completion: t, kind: CompletionItemKind.Class })); + } else if (!path) { + const datasets = await this.fetchDatasets(); + return datasets.map((d) => ({ name: d, completion: `${d}.`, kind: CompletionItemKind.Module })); + } else { + const parts = path.split('.').filter((s: string) => s); + if (parts.length > 2) { + return []; + } + if (parts.length === 1 && !defaultDB) { + const tables = await this.fetchTables(parts[0]); + return tables.map((t) => ({ name: t, completion: t, kind: CompletionItemKind.Class })); + } else if (parts.length === 1 && defaultDB) { + const fields = await this.fetchFields({ dataset: defaultDB, table: parts[0] }); + return fields.map((t) => ({ name: t.value, completion: t.value, kind: CompletionItemKind.Field })); + } else if (parts.length === 2 && !defaultDB) { + const fields = await this.fetchFields({ dataset: parts[0], table: parts[1] }); + return fields.map((t) => ({ name: t.value, completion: t.value, kind: CompletionItemKind.Field })); + } else { + return []; + } + } + } + + getDB(): DB { + if (this.db !== undefined) { + return this.db; + } + return { + datasets: () => this.fetchDatasets(), + tables: (dataset?: string) => this.fetchTables(dataset), + fields: (query: SQLQuery) => this.fetchFields(query), + validateQuery: (query: SQLQuery, range?: TimeRange) => + Promise.resolve({ query, error: '', isError: false, isValid: true }), + dsID: () => this.id, + lookup: (path?: string) => this.fetchMeta(path), + getSqlCompletionProvider: () => this.getSqlCompletionProvider(this.db), + functions: async () => getFunctions(), + }; + } +} diff --git a/public/app/plugins/datasource/mysql/MySqlQueryModel.ts b/public/app/plugins/datasource/mysql/MySqlQueryModel.ts new file mode 100644 index 00000000000..dbd0fee6895 --- /dev/null +++ b/public/app/plugins/datasource/mysql/MySqlQueryModel.ts @@ -0,0 +1,61 @@ +import { map } from 'lodash'; + +import { ScopedVars } from '@grafana/data'; +import { TemplateSrv } from '@grafana/runtime'; + +import { MySQLQuery } from './types'; + +export default class MySQLQueryModel { + target: Partial; + templateSrv?: TemplateSrv; + scopedVars?: ScopedVars; + + constructor(target: Partial, templateSrv?: TemplateSrv, scopedVars?: ScopedVars) { + this.target = target; + this.templateSrv = templateSrv; + this.scopedVars = scopedVars; + } + + // remove identifier quoting from identifier to use in metadata queries + unquoteIdentifier(value: string) { + if (value[0] === '"' && value[value.length - 1] === '"') { + return value.substring(1, value.length - 1).replace(/""/g, '"'); + } else { + return value; + } + } + + quoteIdentifier(value: string) { + return '"' + value.replace(/"/g, '""') + '"'; + } + + quoteLiteral(value: string) { + return "'" + value.replace(/'/g, "''") + "'"; + } + + escapeLiteral(value: string) { + return String(value).replace(/'/g, "''"); + } + + format = (value: string, variable: { multi: boolean; includeAll: boolean }) => { + // if no multi or include all do not regexEscape + if (!variable.multi && !variable.includeAll) { + return this.escapeLiteral(value); + } + + if (typeof value === 'string') { + return this.quoteLiteral(value); + } + + const escapedValues = map(value, this.quoteLiteral); + return escapedValues.join(','); + }; + + interpolate() { + return this.templateSrv!.replace(this.target.rawSql, this.scopedVars, this.format); + } + + getDatabase() { + return this.target.dataset; + } +} diff --git a/public/app/plugins/datasource/mysql/MySqlResponseParser.ts b/public/app/plugins/datasource/mysql/MySqlResponseParser.ts new file mode 100644 index 00000000000..d4ef6c9f0d7 --- /dev/null +++ b/public/app/plugins/datasource/mysql/MySqlResponseParser.ts @@ -0,0 +1,27 @@ +import { uniqBy } from 'lodash'; + +import { DataFrame, MetricFindValue } from '@grafana/data'; + +export default class ResponseParser { + transformMetricFindResponse(frame: DataFrame): MetricFindValue[] { + const values: MetricFindValue[] = []; + const textField = frame.fields.find((f) => f.name === '__text'); + const valueField = frame.fields.find((f) => f.name === '__value'); + + if (textField && valueField) { + for (let i = 0; i < textField.values.length; i++) { + values.push({ text: '' + textField.values.get(i), value: '' + valueField.values.get(i) }); + } + } else { + values.push( + ...frame.fields + .flatMap((f) => f.values.toArray()) + .map((v) => ({ + text: v, + })) + ); + } + + return uniqBy(values, 'text'); + } +} diff --git a/public/app/plugins/datasource/mysql/datasource.ts b/public/app/plugins/datasource/mysql/datasource.ts deleted file mode 100644 index 904099254e0..00000000000 --- a/public/app/plugins/datasource/mysql/datasource.ts +++ /dev/null @@ -1,212 +0,0 @@ -import { map as _map } from 'lodash'; -import { lastValueFrom, of } from 'rxjs'; -import { catchError, map, mapTo } from 'rxjs/operators'; - -import { AnnotationEvent, DataSourceInstanceSettings, MetricFindValue, ScopedVars } from '@grafana/data'; -import { BackendDataSourceResponse, DataSourceWithBackend, FetchResponse, getBackendSrv } from '@grafana/runtime'; -import { toTestingStatus } from '@grafana/runtime/src/utils/queryResponse'; -import { getTemplateSrv, TemplateSrv } from 'app/features/templating/template_srv'; -import MySQLQueryModel from 'app/plugins/datasource/mysql/mysql_query_model'; - -import { getSearchFilterScopedVar } from '../../../features/variables/utils'; - -import ResponseParser from './response_parser'; -import { MySQLOptions, MySQLQuery, MysqlQueryForInterpolation } from './types'; - -export class MysqlDatasource extends DataSourceWithBackend { - id: any; - name: any; - responseParser: ResponseParser; - queryModel: MySQLQueryModel; - interval: string; - - constructor( - instanceSettings: DataSourceInstanceSettings, - private readonly templateSrv: TemplateSrv = getTemplateSrv() - ) { - super(instanceSettings); - this.name = instanceSettings.name; - this.id = instanceSettings.id; - this.responseParser = new ResponseParser(); - this.queryModel = new MySQLQueryModel({}); - const settingsData = instanceSettings.jsonData || ({} as MySQLOptions); - this.interval = settingsData.timeInterval || '1m'; - } - - interpolateVariable = (value: string | string[] | number, variable: any) => { - if (typeof value === 'string') { - if (variable.multi || variable.includeAll) { - const result = this.queryModel.quoteLiteral(value); - return result; - } else { - return value; - } - } - - if (typeof value === 'number') { - return value; - } - - const quotedValues = _map(value, (v: any) => { - return this.queryModel.quoteLiteral(v); - }); - return quotedValues.join(','); - }; - - interpolateVariablesInQueries( - queries: MysqlQueryForInterpolation[], - scopedVars: ScopedVars - ): MysqlQueryForInterpolation[] { - let expandedQueries = queries; - if (queries && queries.length > 0) { - expandedQueries = queries.map((query) => { - const expandedQuery = { - ...query, - datasource: this.getRef(), - rawSql: this.templateSrv.replace(query.rawSql, scopedVars, this.interpolateVariable), - rawQuery: true, - }; - return expandedQuery; - }); - } - return expandedQueries; - } - - filterQuery(query: MySQLQuery): boolean { - return !query.hide; - } - - applyTemplateVariables(target: MySQLQuery, scopedVars: ScopedVars): Record { - const queryModel = new MySQLQueryModel(target, this.templateSrv, scopedVars); - return { - refId: target.refId, - datasource: this.getRef(), - rawSql: queryModel.render(this.interpolateVariable as any), - format: target.format, - }; - } - - async annotationQuery(options: any): Promise { - if (!options.annotation.rawQuery) { - return Promise.reject({ - message: 'Query missing in annotation definition', - }); - } - - const query = { - refId: options.annotation.name, - datasource: this.getRef(), - rawSql: this.templateSrv.replace(options.annotation.rawQuery, options.scopedVars, this.interpolateVariable), - format: 'table', - }; - - return lastValueFrom( - getBackendSrv() - .fetch({ - url: '/api/ds/query', - method: 'POST', - data: { - from: options.range.from.valueOf().toString(), - to: options.range.to.valueOf().toString(), - queries: [query], - }, - requestId: options.annotation.name, - }) - .pipe( - map( - async (res: FetchResponse) => - await this.responseParser.transformAnnotationResponse(options, res.data) - ) - ) - ); - } - - metricFindQuery(query: string, optionalOptions: any): Promise { - let refId = 'tempvar'; - if (optionalOptions && optionalOptions.variable && optionalOptions.variable.name) { - refId = optionalOptions.variable.name; - } - - const rawSql = this.templateSrv.replace( - query, - getSearchFilterScopedVar({ query, wildcardChar: '%', options: optionalOptions }), - this.interpolateVariable - ); - - const interpolatedQuery = { - refId: refId, - datasource: this.getRef(), - rawSql, - format: 'table', - }; - - const range = optionalOptions?.range; - - return lastValueFrom( - getBackendSrv() - .fetch({ - url: '/api/ds/query', - method: 'POST', - data: { - from: range?.from?.valueOf()?.toString(), - to: range?.to?.valueOf()?.toString(), - queries: [interpolatedQuery], - }, - requestId: refId, - }) - .pipe( - map((rsp) => { - return this.responseParser.transformMetricFindResponse(rsp); - }), - catchError((err) => { - return of([]); - }) - ) - ); - } - - testDatasource(): Promise { - return lastValueFrom( - getBackendSrv() - .fetch({ - url: '/api/ds/query', - method: 'POST', - data: { - from: '5m', - to: 'now', - queries: [ - { - refId: 'A', - intervalMs: 1, - maxDataPoints: 1, - datasource: this.getRef(), - rawSql: 'SELECT 1', - format: 'table', - }, - ], - }, - }) - .pipe( - mapTo({ status: 'success', message: 'Database Connection OK' }), - catchError((err) => { - return of(toTestingStatus(err)); - }) - ) - ); - } - - targetContainsTemplate(target: any) { - let rawSql = ''; - - if (target.rawQuery) { - rawSql = target.rawSql; - } else { - const query = new MySQLQueryModel(target); - rawSql = query.buildQuery(); - } - - rawSql = rawSql.replace('$__', ''); - - return this.templateSrv.containsTemplate(rawSql); - } -} diff --git a/public/app/plugins/datasource/mysql/fields.ts b/public/app/plugins/datasource/mysql/fields.ts new file mode 100644 index 00000000000..96eb122c35e --- /dev/null +++ b/public/app/plugins/datasource/mysql/fields.ts @@ -0,0 +1,91 @@ +import { RAQBFieldTypes, SQLSelectableValue } from 'app/features/plugins/sql/types'; + +export function mapFieldsToTypes(columns: SQLSelectableValue[]) { + const fields: SQLSelectableValue[] = []; + for (const col of columns) { + let type: RAQBFieldTypes = 'text'; + switch (col.type?.toUpperCase()) { + case 'BOOLEAN': + case 'BOOL': { + type = 'boolean'; + break; + } + case 'BYTES': + case 'VARCHAR': { + type = 'text'; + break; + } + case 'FLOAT': + case 'FLOAT64': + case 'INT': + case 'INTEGER': + case 'INT64': + case 'NUMERIC': + case 'BIGNUMERIC': { + type = 'number'; + break; + } + case 'DATE': { + type = 'date'; + break; + } + case 'DATETIME': { + type = 'datetime'; + break; + } + case 'TIME': { + type = 'time'; + break; + } + case 'TIMESTAMP': { + type = 'datetime'; + break; + } + case 'GEOGRAPHY': { + type = 'text'; + break; + } + default: + break; + } + + fields.push({ ...col, raqbFieldType: type, icon: mapColumnTypeToIcon(col.type!.toUpperCase()) }); + } + return fields; +} + +export function mapColumnTypeToIcon(type: string) { + switch (type) { + case 'TIME': + case 'DATETIME': + case 'TIMESTAMP': + return 'clock-nine'; + case 'BOOLEAN': + return 'toggle-off'; + case 'INTEGER': + case 'FLOAT': + case 'FLOAT64': + case 'INT': + case 'SMALLINT': + case 'BIGINT': + case 'TINYINT': + case 'BYTEINT': + case 'INT64': + case 'NUMERIC': + case 'DECIMAL': + return 'calculator-alt'; + case 'CHAR': + case 'VARCHAR': + case 'STRING': + case 'BYTES': + case 'TEXT': + case 'TINYTEXT': + case 'MEDIUMTEXT': + case 'LONGTEXT': + return 'text'; + case 'GEOGRAPHY': + return 'map'; + default: + return undefined; + } +} diff --git a/public/app/plugins/datasource/mysql/functions.ts b/public/app/plugins/datasource/mysql/functions.ts new file mode 100644 index 00000000000..0ccb8ce0165 --- /dev/null +++ b/public/app/plugins/datasource/mysql/functions.ts @@ -0,0 +1,20 @@ +export const FUNCTIONS = [ + { + id: 'STDDEV', + name: 'STDDEV', + description: `STDDEV( + expression + ) + + Returns the standard deviation of non-NULL input values, or NaN if the input contains a NaN.`, + }, + { + id: 'VARIANCE', + name: 'VARIANCE', + description: `VARIANCE( + expression + ) + + Returns the variance of non-NULL input values, or NaN if the input contains a NaN.`, + }, +]; diff --git a/public/app/plugins/datasource/mysql/meta_query.ts b/public/app/plugins/datasource/mysql/meta_query.ts deleted file mode 100644 index eb03dc40f99..00000000000 --- a/public/app/plugins/datasource/mysql/meta_query.ts +++ /dev/null @@ -1,142 +0,0 @@ -export class MysqlMetaQuery { - constructor(private target: any, private queryModel: any) {} - - getOperators(datatype: string) { - switch (datatype) { - case 'double': - case 'float': { - return ['=', '!=', '<', '<=', '>', '>=']; - } - case 'text': - case 'tinytext': - case 'mediumtext': - case 'longtext': - case 'varchar': - case 'char': { - return ['=', '!=', '<', '<=', '>', '>=', 'IN', 'NOT IN', 'LIKE', 'NOT LIKE']; - } - default: { - return ['=', '!=', '<', '<=', '>', '>=', 'IN', 'NOT IN']; - } - } - } - - // quote identifier as literal to use in metadata queries - quoteIdentAsLiteral(value: string) { - return this.queryModel.quoteLiteral(this.queryModel.unquoteIdentifier(value)); - } - - findMetricTable() { - // query that returns first table found that has a timestamp(tz) column and a float column - const query = ` - SELECT - table_name as table_name, - ( SELECT - column_name as column_name - FROM information_schema.columns c - WHERE - c.table_schema = t.table_schema AND - c.table_name = t.table_name AND - c.data_type IN ('timestamp', 'datetime') - ORDER BY ordinal_position LIMIT 1 - ) AS time_column, - ( SELECT - column_name AS column_name - FROM information_schema.columns c - WHERE - c.table_schema = t.table_schema AND - c.table_name = t.table_name AND - c.data_type IN('float', 'int', 'bigint') - ORDER BY ordinal_position LIMIT 1 - ) AS value_column - FROM information_schema.tables t - WHERE - t.table_schema = database() AND - EXISTS - ( SELECT 1 - FROM information_schema.columns c - WHERE - c.table_schema = t.table_schema AND - c.table_name = t.table_name AND - c.data_type IN ('timestamp', 'datetime') - ) AND - EXISTS - ( SELECT 1 - FROM information_schema.columns c - WHERE - c.table_schema = t.table_schema AND - c.table_name = t.table_name AND - c.data_type IN('float', 'int', 'bigint') - ) - LIMIT 1 -;`; - return query; - } - - buildTableConstraint(table: string) { - let query = ''; - - // check for schema qualified table - if (table.includes('.')) { - const parts = table.split('.'); - query = 'table_schema = ' + this.quoteIdentAsLiteral(parts[0]); - query += ' AND table_name = ' + this.quoteIdentAsLiteral(parts[1]); - return query; - } else { - query = 'table_schema = database() AND table_name = ' + this.quoteIdentAsLiteral(table); - - return query; - } - } - - buildTableQuery() { - return 'SELECT table_name FROM information_schema.tables WHERE table_schema = database() ORDER BY table_name'; - } - - buildColumnQuery(type?: string) { - let query = 'SELECT column_name FROM information_schema.columns WHERE '; - query += this.buildTableConstraint(this.target.table); - - switch (type) { - case 'time': { - query += " AND data_type IN ('timestamp','datetime','bigint','int','double','float')"; - break; - } - case 'metric': { - query += " AND data_type IN ('text','tinytext','mediumtext','longtext','varchar','char')"; - break; - } - case 'value': { - query += " AND data_type IN ('bigint','int','smallint','mediumint','tinyint','double','decimal','float')"; - query += ' AND column_name <> ' + this.quoteIdentAsLiteral(this.target.timeColumn); - break; - } - case 'group': { - query += " AND data_type IN ('text','tinytext','mediumtext','longtext','varchar','char')"; - break; - } - } - - query += ' ORDER BY column_name'; - - return query; - } - - buildValueQuery(column: string) { - let query = 'SELECT DISTINCT QUOTE(' + column + ')'; - query += ' FROM ' + this.target.table; - query += ' WHERE $__timeFilter(' + this.target.timeColumn + ')'; - query += ' ORDER BY 1 LIMIT 100'; - return query; - } - - buildDatatypeQuery(column: string) { - let query = ` -SELECT data_type -FROM information_schema.columns -WHERE `; - query += ' table_name = ' + this.quoteIdentAsLiteral(this.target.table); - query += ' AND column_name = ' + this.quoteIdentAsLiteral(column); - return query; - } -} diff --git a/public/app/plugins/datasource/mysql/module.ts b/public/app/plugins/datasource/mysql/module.ts index 7044b6dd35b..c53f5b5594c 100644 --- a/public/app/plugins/datasource/mysql/module.ts +++ b/public/app/plugins/datasource/mysql/module.ts @@ -1,40 +1,11 @@ import { DataSourcePlugin } from '@grafana/data'; +import { SqlQueryEditor } from 'app/features/plugins/sql/components/QueryEditor'; +import { SQLQuery } from 'app/features/plugins/sql/types'; +import { MySqlDatasource } from './MySqlDatasource'; import { ConfigurationEditor } from './configuration/ConfigurationEditor'; -import { MysqlDatasource } from './datasource'; -import { MysqlQueryCtrl } from './query_ctrl'; -import { MySQLQuery } from './types'; +import { MySQLOptions } from './types'; -const defaultQuery = `SELECT - UNIX_TIMESTAMP() as time_sec, - as text, - as tags - FROM
- WHERE $__timeFilter(time_column) - ORDER BY ASC - LIMIT 100 - `; - -class MysqlAnnotationsQueryCtrl { - static templateUrl = 'partials/annotations.editor.html'; - - declare annotation: any; - - /** @ngInject */ - constructor($scope: any) { - this.annotation = $scope.ctrl.annotation; - this.annotation.rawQuery = this.annotation.rawQuery || defaultQuery; - } -} - -export { - MysqlDatasource, - MysqlDatasource as Datasource, - MysqlQueryCtrl as QueryCtrl, - MysqlAnnotationsQueryCtrl as AnnotationsQueryCtrl, -}; - -export const plugin = new DataSourcePlugin(MysqlDatasource) - .setQueryCtrl(MysqlQueryCtrl) - .setConfigEditor(ConfigurationEditor) - .setAnnotationQueryCtrl(MysqlAnnotationsQueryCtrl); +export const plugin = new DataSourcePlugin(MySqlDatasource) + .setQueryEditor(SqlQueryEditor) + .setConfigEditor(ConfigurationEditor); diff --git a/public/app/plugins/datasource/mysql/mySqlMetaQuery.ts b/public/app/plugins/datasource/mysql/mySqlMetaQuery.ts new file mode 100644 index 00000000000..25d09bcb8e0 --- /dev/null +++ b/public/app/plugins/datasource/mysql/mySqlMetaQuery.ts @@ -0,0 +1,60 @@ +import MySQLQueryModel from './MySqlQueryModel'; + +export function buildTableQuery(dataset?: string) { + const database = dataset !== undefined ? `'${dataset}'` : 'database()'; + return `SELECT table_name FROM information_schema.tables WHERE table_schema = ${database} ORDER BY table_name`; +} + +export function showDatabases() { + return `SELECT DISTINCT TABLE_SCHEMA from information_schema.TABLES where TABLE_TYPE != 'SYSTEM VIEW' ORDER BY TABLE_SCHEMA`; +} + +export function buildColumnQuery(queryModel: MySQLQueryModel, table: string, type?: string, timeColumn?: string) { + let query = 'SELECT column_name, data_type FROM information_schema.columns WHERE '; + query += buildTableConstraint(queryModel, table); + + switch (type) { + case 'time': { + query += " AND data_type IN ('timestamp','datetime','bigint','int','double','float')"; + break; + } + case 'metric': { + query += " AND data_type IN ('text','tinytext','mediumtext','longtext','varchar','char')"; + break; + } + case 'value': { + query += " AND data_type IN ('bigint','int','smallint','mediumint','tinyint','double','decimal','float')"; + query += ' AND column_name <> ' + quoteIdentAsLiteral(queryModel, timeColumn!); + break; + } + case 'group': { + query += " AND data_type IN ('text','tinytext','mediumtext','longtext','varchar','char')"; + break; + } + } + + query += ' ORDER BY column_name'; + + return query; +} + +export function buildTableConstraint(queryModel: MySQLQueryModel, table: string) { + let query = ''; + + // check for schema qualified table + if (table.includes('.')) { + const parts = table.split('.'); + query = 'table_schema = ' + quoteIdentAsLiteral(queryModel, parts[0]); + query += ' AND table_name = ' + quoteIdentAsLiteral(queryModel, parts[1]); + return query; + } else { + const database = queryModel.getDatabase() !== undefined ? `'${queryModel.getDatabase()}'` : 'database()'; + query = `table_schema = ${database} AND table_name = ` + quoteIdentAsLiteral(queryModel, table); + + return query; + } +} + +export function quoteIdentAsLiteral(queryModel: MySQLQueryModel, value: string) { + return queryModel.quoteLiteral(queryModel.unquoteIdentifier(value)); +} diff --git a/public/app/plugins/datasource/mysql/mysql_query_model.ts b/public/app/plugins/datasource/mysql/mysql_query_model.ts deleted file mode 100644 index 4fc106f9072..00000000000 --- a/public/app/plugins/datasource/mysql/mysql_query_model.ts +++ /dev/null @@ -1,236 +0,0 @@ -import { find, map } from 'lodash'; - -import { ScopedVars } from '@grafana/data'; -import { TemplateSrv } from '@grafana/runtime'; - -export default class MySQLQueryModel { - target: any; - templateSrv: any; - scopedVars: any; - - /** @ngInject */ - constructor(target: any, templateSrv?: TemplateSrv, scopedVars?: ScopedVars) { - this.target = target; - this.templateSrv = templateSrv; - this.scopedVars = scopedVars; - - target.format = target.format || 'time_series'; - target.timeColumn = target.timeColumn || 'time'; - target.metricColumn = target.metricColumn || 'none'; - - target.group = target.group || []; - target.where = target.where || [{ type: 'macro', name: '$__timeFilter', params: [] }]; - target.select = target.select || [[{ type: 'column', params: ['value'] }]]; - - // handle pre query gui panels gracefully - if (!('rawQuery' in this.target)) { - if ('rawSql' in target) { - // pre query gui panel - target.rawQuery = true; - } else { - // new panel - target.rawQuery = false; - } - } - - // give interpolateQueryStr access to this - this.interpolateQueryStr = this.interpolateQueryStr.bind(this); - } - - // remove identifier quoting from identifier to use in metadata queries - unquoteIdentifier(value: string) { - if (value[0] === '"' && value[value.length - 1] === '"') { - return value.substring(1, value.length - 1).replace(/""/g, '"'); - } else { - return value; - } - } - - quoteIdentifier(value: string) { - return '"' + value.replace(/"/g, '""') + '"'; - } - - quoteLiteral(value: string) { - return "'" + value.replace(/'/g, "''") + "'"; - } - - escapeLiteral(value: any) { - return String(value).replace(/'/g, "''"); - } - - hasTimeGroup() { - return find(this.target.group, (g: any) => g.type === 'time'); - } - - hasMetricColumn() { - return this.target.metricColumn !== 'none'; - } - - interpolateQueryStr(value: string, variable: { multi: any; includeAll: any }, defaultFormatFn: any) { - // if no multi or include all do not regexEscape - if (!variable.multi && !variable.includeAll) { - return this.escapeLiteral(value); - } - - if (typeof value === 'string') { - return this.quoteLiteral(value); - } - - const escapedValues = map(value, this.quoteLiteral); - return escapedValues.join(','); - } - - render(interpolate?: boolean) { - const target = this.target; - - // new query with no table set yet - if (!this.target.rawQuery && !('table' in this.target)) { - return ''; - } - - if (!target.rawQuery) { - target.rawSql = this.buildQuery(); - } - - if (interpolate) { - return this.templateSrv.replace(target.rawSql, this.scopedVars, this.interpolateQueryStr); - } else { - return target.rawSql; - } - } - - hasUnixEpochTimecolumn() { - return ['int', 'bigint', 'double'].indexOf(this.target.timeColumnType) > -1; - } - - buildTimeColumn(alias = true) { - const timeGroup = this.hasTimeGroup(); - let query; - let macro = '$__timeGroup'; - - if (timeGroup) { - let args; - if (timeGroup.params.length > 1 && timeGroup.params[1] !== 'none') { - args = timeGroup.params.join(','); - } else { - args = timeGroup.params[0]; - } - if (this.hasUnixEpochTimecolumn()) { - macro = '$__unixEpochGroup'; - } - if (alias) { - macro += 'Alias'; - } - query = macro + '(' + this.target.timeColumn + ',' + args + ')'; - } else { - query = this.target.timeColumn; - if (alias) { - query += ' AS "time"'; - } - } - - return query; - } - - buildMetricColumn() { - if (this.hasMetricColumn()) { - return this.target.metricColumn + ' AS metric'; - } - - return ''; - } - - buildValueColumns() { - let query = ''; - for (const column of this.target.select) { - query += ',\n ' + this.buildValueColumn(column); - } - - return query; - } - - buildValueColumn(column: any) { - let query = ''; - - const columnName: any = find(column, (g: any) => g.type === 'column'); - query = columnName.params[0]; - - const aggregate: any = find(column, (g: any) => g.type === 'aggregate'); - - if (aggregate) { - const func = aggregate.params[0]; - query = func + '(' + query + ')'; - } - - const alias: any = find(column, (g: any) => g.type === 'alias'); - if (alias) { - query += ' AS ' + this.quoteIdentifier(alias.params[0]); - } - - return query; - } - - buildWhereClause() { - let query = ''; - const conditions = map(this.target.where, (tag, index) => { - switch (tag.type) { - case 'macro': - return tag.name + '(' + this.target.timeColumn + ')'; - break; - case 'expression': - return tag.params.join(' '); - break; - } - }); - - if (conditions.length > 0) { - query = '\nWHERE\n ' + conditions.join(' AND\n '); - } - - return query; - } - - buildGroupClause() { - let query = ''; - let groupSection = ''; - - for (let i = 0; i < this.target.group.length; i++) { - const part = this.target.group[i]; - if (i > 0) { - groupSection += ', '; - } - if (part.type === 'time') { - groupSection += '1'; - } else { - groupSection += part.params[0]; - } - } - - if (groupSection.length) { - query = '\nGROUP BY ' + groupSection; - if (this.hasMetricColumn()) { - query += ',2'; - } - } - return query; - } - - buildQuery() { - let query = 'SELECT'; - - query += '\n ' + this.buildTimeColumn(); - if (this.hasMetricColumn()) { - query += ',\n ' + this.buildMetricColumn(); - } - query += this.buildValueColumns(); - - query += '\nFROM ' + this.target.table; - - query += this.buildWhereClause(); - query += this.buildGroupClause(); - - query += '\nORDER BY ' + this.buildTimeColumn(false); - - return query; - } -} diff --git a/public/app/plugins/datasource/mysql/partials/annotations.editor.html b/public/app/plugins/datasource/mysql/partials/annotations.editor.html deleted file mode 100644 index a9530b03f8e..00000000000 --- a/public/app/plugins/datasource/mysql/partials/annotations.editor.html +++ /dev/null @@ -1,54 +0,0 @@ -
-
-
- -
-
- -
-
- -
-
- -
-
Annotation Query Format
-An annotation is an event that is overlaid on top of graphs. The query can have up to four columns per row, the time or time_sec column is mandatory. Annotation rendering is expensive so it is important to limit the number of rows returned. - -- column with alias: time or time_sec for the annotation event time. Use epoch time or any native date data type. -- column with alias: timeend for the annotation event end time. Use epoch time or any native date data type. -- column with alias: text for the annotation text -- column with alias: tags for annotation tags. This is a comma separated string of tags e.g. 'tag1,tag2' - - -Macros: -- $__time(column) -> UNIX_TIMESTAMP(column) as time (or as time_sec) -- $__timeEpoch(column) -> UNIX_TIMESTAMP(column) as time (or as time_sec) -- $__timeFilter(column) -> column BETWEEN FROM_UNIXTIME(1492750877) AND FROM_UNIXTIME(1492750877) -- $__unixEpochFilter(column) -> time_unix_epoch > 1492750877 AND time_unix_epoch < 1492750877 -- $__unixEpochNanoFilter(column) -> column >= 1494410783152415214 AND column <= 1494497183142514872 - -Or build your own conditionals using these macros which just return the values: -- $__timeFrom() -> FROM_UNIXTIME(1492750877) -- $__timeTo() -> FROM_UNIXTIME(1492750877) -- $__unixEpochFrom() -> 1492750877 -- $__unixEpochTo() -> 1492750877 -- $__unixEpochNanoFrom() -> 1494410783152415214 -- $__unixEpochNanoTo() -> 1494497183142514872 -
-
-
diff --git a/public/app/plugins/datasource/mysql/partials/query.editor.html b/public/app/plugins/datasource/mysql/partials/query.editor.html deleted file mode 100644 index 014135413b6..00000000000 --- a/public/app/plugins/datasource/mysql/partials/query.editor.html +++ /dev/null @@ -1,190 +0,0 @@ - - -
-
-
- - -
-
-
- -
-
-
- - - - - - - - -
- -
-
-
- -
- -
-
- -
- -
- - -
- -
- -
- -
-
-
-
- -
-
- -
- -
- - -
- -
- -
- -
-
-
- -
- -
-
- - - - -
- -
- -
- -
-
-
-
- -
- -
-
- -
- -
-
-
- -
-
- -
-
- -
-
-
-
-
- -
-
Time series:
-- return column named time or time_sec (in UTC), as a unix time stamp or any sql native date data type. You can use the macros below.
-- return column(s) with numeric datatype as values
-Optional:
-  - return column named metric to represent the series name.
-  - If multiple value columns are returned the metric column is used as prefix.
-  - If no column named metric is found the column name of the value column is used as series name
-
-Resultsets of time series queries need to be sorted by time.
-
-Table:
-- return any set of columns
-
-Macros:
-- $__time(column) -> UNIX_TIMESTAMP(column) as time_sec
-- $__timeEpoch(column) -> UNIX_TIMESTAMP(column) as time_sec
-- $__timeFilter(column) -> column BETWEEN FROM_UNIXTIME(1492750877) AND FROM_UNIXTIME(1492750877)
-- $__unixEpochFilter(column) ->  time_unix_epoch > 1492750877 AND time_unix_epoch < 1492750877
-- $__unixEpochNanoFilter(column) ->  column >= 1494410783152415214 AND column <= 1494497183142514872
-- $__timeGroup(column,'5m'[, fillvalue]) -> cast(cast(UNIX_TIMESTAMP(column)/(300) as signed)*300 as signed)
-     by setting fillvalue grafana will fill in missing values according to the interval
-     fillvalue can be either a literal value, NULL or previous; previous will fill in the previous seen value or NULL if none has been seen yet
-- $__timeGroupAlias(column,'5m') -> cast(cast(UNIX_TIMESTAMP(column)/(300) as signed)*300 as signed) AS "time"
-- $__unixEpochGroup(column,'5m') -> column DIV 300 * 300
-- $__unixEpochGroupAlias(column,'5m') -> column DIV 300 * 300 AS "time"
-
-Example of group by and order by with $__timeGroup:
-SELECT
-  $__timeGroupAlias(timestamp_col, '1h'),
-  sum(value_double) as value
-FROM yourtable
-GROUP BY 1
-ORDER BY 1
-
-Or build your own conditionals using these macros which just return the values:
-- $__timeFrom() -> FROM_UNIXTIME(1492750877)
-- $__timeTo() ->  FROM_UNIXTIME(1492750877)
-- $__unixEpochFrom() ->  1492750877
-- $__unixEpochTo() ->  1492750877
-- $__unixEpochNanoFrom() ->  1494410783152415214
-- $__unixEpochNanoTo() ->  1494497183142514872
-    
-
- - - -
-
{{ctrl.lastQueryMeta.executedQueryString}}
-
- -
-
{{ctrl.lastQueryError}}
-
- -
diff --git a/public/app/plugins/datasource/mysql/query_ctrl.ts b/public/app/plugins/datasource/mysql/query_ctrl.ts deleted file mode 100644 index 62a9d69cf93..00000000000 --- a/public/app/plugins/datasource/mysql/query_ctrl.ts +++ /dev/null @@ -1,647 +0,0 @@ -import { auto } from 'angular'; -import { clone, filter, find, findIndex, indexOf, map } from 'lodash'; - -import { PanelEvents, QueryResultMeta } from '@grafana/data'; -import { TemplateSrv } from '@grafana/runtime'; -import { SqlPart } from 'app/angular/components/sql_part/sql_part'; -import appEvents from 'app/core/app_events'; -import { VariableWithMultiSupport } from 'app/features/variables/types'; -import { QueryCtrl } from 'app/plugins/sdk'; - -import { ShowConfirmModalEvent } from '../../../types/events'; - -import { MysqlMetaQuery } from './meta_query'; -import MySQLQueryModel from './mysql_query_model'; -import sqlPart from './sql_part'; - -const defaultQuery = `SELECT - UNIX_TIMESTAMP() as time_sec, - as value, - as metric -FROM
-WHERE $__timeFilter(time_column) -ORDER BY ASC -`; - -export class MysqlQueryCtrl extends QueryCtrl { - static templateUrl = 'partials/query.editor.html'; - - formats: any[]; - lastQueryError?: string; - showHelp!: boolean; - - queryModel: MySQLQueryModel; - metaBuilder: MysqlMetaQuery; - lastQueryMeta?: QueryResultMeta; - tableSegment: any; - whereAdd: any; - timeColumnSegment: any; - metricColumnSegment: any; - selectMenu: any[] = []; - selectParts: SqlPart[][] = []; - groupParts: SqlPart[] = []; - whereParts: SqlPart[] = []; - groupAdd: any; - - /** @ngInject */ - constructor( - $scope: any, - $injector: auto.IInjectorService, - private templateSrv: TemplateSrv, - private uiSegmentSrv: any - ) { - super($scope, $injector); - - this.target = this.target; - this.queryModel = new MySQLQueryModel(this.target, templateSrv, this.panel.scopedVars); - this.metaBuilder = new MysqlMetaQuery(this.target, this.queryModel); - this.updateProjection(); - - this.formats = [ - { text: 'Time series', value: 'time_series' }, - { text: 'Table', value: 'table' }, - ]; - - if (!this.target.rawSql) { - // special handling when in table panel - if (this.panelCtrl.panel.type === 'table') { - this.target.format = 'table'; - this.target.rawSql = 'SELECT 1'; - this.target.rawQuery = true; - } else { - this.target.rawSql = defaultQuery; - this.datasource.metricFindQuery(this.metaBuilder.findMetricTable()).then((result: any) => { - if (result.length > 0) { - this.target.table = result[0].text; - let segment = this.uiSegmentSrv.newSegment(this.target.table); - this.tableSegment.html = segment.html; - this.tableSegment.value = segment.value; - - this.target.timeColumn = result[1].text; - segment = this.uiSegmentSrv.newSegment(this.target.timeColumn); - this.timeColumnSegment.html = segment.html; - this.timeColumnSegment.value = segment.value; - - this.target.timeColumnType = 'timestamp'; - this.target.select = [[{ type: 'column', params: [result[2].text] }]]; - this.updateProjection(); - this.updateRawSqlAndRefresh(); - } - }); - } - } - - if (!this.target.table) { - this.tableSegment = uiSegmentSrv.newSegment({ value: 'select table', fake: true }); - } else { - this.tableSegment = uiSegmentSrv.newSegment(this.target.table); - } - - this.timeColumnSegment = uiSegmentSrv.newSegment(this.target.timeColumn); - this.metricColumnSegment = uiSegmentSrv.newSegment(this.target.metricColumn); - - this.buildSelectMenu(); - this.whereAdd = this.uiSegmentSrv.newPlusButton(); - this.groupAdd = this.uiSegmentSrv.newPlusButton(); - - this.panelCtrl.events.on(PanelEvents.dataReceived, this.onDataReceived.bind(this), $scope); - this.panelCtrl.events.on(PanelEvents.dataError, this.onDataError.bind(this), $scope); - } - - updateRawSqlAndRefresh() { - if (!this.target.rawQuery) { - this.target.rawSql = this.queryModel.buildQuery(); - } - - this.panelCtrl.refresh(); - } - - updateProjection() { - this.selectParts = map(this.target.select, (parts: any) => { - return map(parts, sqlPart.create).filter((n) => n); - }); - this.whereParts = map(this.target.where, sqlPart.create).filter((n) => n); - this.groupParts = map(this.target.group, sqlPart.create).filter((n) => n); - } - - updatePersistedParts() { - this.target.select = map(this.selectParts, (selectParts) => { - return map(selectParts, (part: any) => { - return { type: part.def.type, datatype: part.datatype, params: part.params }; - }); - }); - this.target.where = map(this.whereParts, (part: any) => { - return { type: part.def.type, datatype: part.datatype, name: part.name, params: part.params }; - }); - this.target.group = map(this.groupParts, (part: any) => { - return { type: part.def.type, datatype: part.datatype, params: part.params }; - }); - } - - buildSelectMenu() { - const aggregates = { - text: 'Aggregate Functions', - value: 'aggregate', - submenu: [ - { text: 'Average', value: 'avg' }, - { text: 'Count', value: 'count' }, - { text: 'Maximum', value: 'max' }, - { text: 'Minimum', value: 'min' }, - { text: 'Sum', value: 'sum' }, - { text: 'Standard deviation', value: 'stddev' }, - { text: 'Variance', value: 'variance' }, - ], - }; - - this.selectMenu.push(aggregates); - this.selectMenu.push({ text: 'Alias', value: 'alias' }); - this.selectMenu.push({ text: 'Column', value: 'column' }); - } - - toggleEditorMode() { - if (this.target.rawQuery) { - appEvents.publish( - new ShowConfirmModalEvent({ - title: 'Warning', - text2: 'Switching to query builder may overwrite your raw SQL.', - icon: 'exclamation-triangle', - yesText: 'Switch', - onConfirm: () => { - // This could be called from React, so wrap in $evalAsync. - // Will then either run as part of the current digest cycle or trigger a new one. - this.$scope.$evalAsync(() => { - this.target.rawQuery = !this.target.rawQuery; - }); - }, - }) - ); - } else { - // This could be called from React, so wrap in $evalAsync. - // Will then either run as part of the current digest cycle or trigger a new one. - this.$scope.$evalAsync(() => { - this.target.rawQuery = !this.target.rawQuery; - }); - } - } - - resetPlusButton(button: { html: any; value: any }) { - const plusButton = this.uiSegmentSrv.newPlusButton(); - button.html = plusButton.html; - button.value = plusButton.value; - } - - getTableSegments() { - return this.datasource - .metricFindQuery(this.metaBuilder.buildTableQuery()) - .then(this.transformToSegments({})) - .catch(this.handleQueryError.bind(this)); - } - - tableChanged() { - this.target.table = this.tableSegment.value; - this.target.where = []; - this.target.group = []; - this.updateProjection(); - - const segment = this.uiSegmentSrv.newSegment('none'); - this.metricColumnSegment.html = segment.html; - this.metricColumnSegment.value = segment.value; - this.target.metricColumn = 'none'; - - const task1 = this.datasource.metricFindQuery(this.metaBuilder.buildColumnQuery('time')).then((result: any) => { - // check if time column is still valid - if (result.length > 0 && !find(result, (r: any) => r.text === this.target.timeColumn)) { - const segment = this.uiSegmentSrv.newSegment(result[0].text); - this.timeColumnSegment.html = segment.html; - this.timeColumnSegment.value = segment.value; - } - return this.timeColumnChanged(false); - }); - const task2 = this.datasource.metricFindQuery(this.metaBuilder.buildColumnQuery('value')).then((result: any) => { - if (result.length > 0) { - this.target.select = [[{ type: 'column', params: [result[0].text] }]]; - this.updateProjection(); - } - }); - - Promise.all([task1, task2]).then(() => { - this.updateRawSqlAndRefresh(); - }); - } - - getTimeColumnSegments() { - return this.datasource - .metricFindQuery(this.metaBuilder.buildColumnQuery('time')) - .then(this.transformToSegments({})) - .catch(this.handleQueryError.bind(this)); - } - - timeColumnChanged(refresh?: boolean) { - this.target.timeColumn = this.timeColumnSegment.value; - return this.datasource - .metricFindQuery(this.metaBuilder.buildDatatypeQuery(this.target.timeColumn)) - .then((result: any) => { - if (result.length === 1) { - if (this.target.timeColumnType !== result[0].text) { - this.target.timeColumnType = result[0].text; - } - let partModel; - if (this.queryModel.hasUnixEpochTimecolumn()) { - partModel = sqlPart.create({ type: 'macro', name: '$__unixEpochFilter', params: [] }); - } else { - partModel = sqlPart.create({ type: 'macro', name: '$__timeFilter', params: [] }); - } - - if (this.whereParts.length >= 1 && this.whereParts[0].def.type === 'macro') { - // replace current macro - this.whereParts[0] = partModel; - } else { - this.whereParts.splice(0, 0, partModel); - } - } - - this.updatePersistedParts(); - if (refresh !== false) { - this.updateRawSqlAndRefresh(); - } - }); - } - - getMetricColumnSegments() { - return this.datasource - .metricFindQuery(this.metaBuilder.buildColumnQuery('metric')) - .then(this.transformToSegments({ addNone: true })) - .catch(this.handleQueryError.bind(this)); - } - - metricColumnChanged() { - this.target.metricColumn = this.metricColumnSegment.value; - this.updateRawSqlAndRefresh(); - } - - onDataReceived(dataList: any) { - this.lastQueryError = undefined; - this.lastQueryMeta = dataList[0]?.meta; - } - - onDataError(err: any) { - if (err.data && err.data.results) { - const queryRes = err.data.results[this.target.refId]; - if (queryRes) { - this.lastQueryError = queryRes.error; - } - } - } - - transformToSegments(config: any) { - return (results: any) => { - const segments = map(results, (segment) => { - return this.uiSegmentSrv.newSegment({ - value: segment.text, - expandable: segment.expandable, - }); - }); - - if (config.addTemplateVars) { - for (const variable of this.templateSrv.getVariables()) { - let value; - value = '$' + variable.name; - if (config.templateQuoter && (variable as unknown as VariableWithMultiSupport).multi === false) { - value = config.templateQuoter(value); - } - - segments.unshift( - this.uiSegmentSrv.newSegment({ - type: 'template', - value: value, - expandable: true, - }) - ); - } - } - - if (config.addNone) { - segments.unshift(this.uiSegmentSrv.newSegment({ type: 'template', value: 'none', expandable: true })); - } - - return segments; - }; - } - - findAggregateIndex(selectParts: any) { - return findIndex(selectParts, (p: any) => p.def.type === 'aggregate' || p.def.type === 'percentile'); - } - - findWindowIndex(selectParts: any) { - return findIndex(selectParts, (p: any) => p.def.type === 'window' || p.def.type === 'moving_window'); - } - - addSelectPart(selectParts: any[], item: { value: any }, subItem: { type: any; value: any }) { - let partType = item.value; - if (subItem && subItem.type) { - partType = subItem.type; - } - let partModel = sqlPart.create({ type: partType }); - if (subItem) { - partModel.params[0] = subItem.value; - } - let addAlias = false; - - switch (partType) { - case 'column': - const parts = map(selectParts, (part: any) => { - return sqlPart.create({ type: part.def.type, params: clone(part.params) }); - }); - this.selectParts.push(parts); - break; - case 'percentile': - case 'aggregate': - // add group by if no group by yet - if (this.target.group.length === 0) { - this.addGroup('time', '$__interval'); - } - const aggIndex = this.findAggregateIndex(selectParts); - if (aggIndex !== -1) { - // replace current aggregation - selectParts[aggIndex] = partModel; - } else { - selectParts.splice(1, 0, partModel); - } - if (!find(selectParts, (p: any) => p.def.type === 'alias')) { - addAlias = true; - } - break; - case 'moving_window': - case 'window': - const windowIndex = this.findWindowIndex(selectParts); - if (windowIndex !== -1) { - // replace current window function - selectParts[windowIndex] = partModel; - } else { - const aggIndex = this.findAggregateIndex(selectParts); - if (aggIndex !== -1) { - selectParts.splice(aggIndex + 1, 0, partModel); - } else { - selectParts.splice(1, 0, partModel); - } - } - if (!find(selectParts, (p: any) => p.def.type === 'alias')) { - addAlias = true; - } - break; - case 'alias': - addAlias = true; - break; - } - - if (addAlias) { - // set initial alias name to column name - partModel = sqlPart.create({ type: 'alias', params: [selectParts[0].params[0].replace(/"/g, '')] }); - if (selectParts[selectParts.length - 1].def.type === 'alias') { - selectParts[selectParts.length - 1] = partModel; - } else { - selectParts.push(partModel); - } - } - - this.updatePersistedParts(); - this.updateRawSqlAndRefresh(); - } - - removeSelectPart(selectParts: any, part: { def: { type: string } }) { - if (part.def.type === 'column') { - // remove all parts of column unless its last column - if (this.selectParts.length > 1) { - const modelsIndex = indexOf(this.selectParts, selectParts); - this.selectParts.splice(modelsIndex, 1); - } - } else { - const partIndex = indexOf(selectParts, part); - selectParts.splice(partIndex, 1); - } - - this.updatePersistedParts(); - } - - handleSelectPartEvent(selectParts: any, part: { def: any }, evt: { name: any }) { - switch (evt.name) { - case 'get-param-options': { - switch (part.def.type) { - // case 'aggregate': - // return this.datasource - // .metricFindQuery(this.metaBuilder.buildAggregateQuery()) - // .then(this.transformToSegments({})) - // .catch(this.handleQueryError.bind(this)); - case 'column': - return this.datasource - .metricFindQuery(this.metaBuilder.buildColumnQuery('value')) - .then(this.transformToSegments({})) - .catch(this.handleQueryError.bind(this)); - } - } - case 'part-param-changed': { - this.updatePersistedParts(); - this.updateRawSqlAndRefresh(); - break; - } - case 'action': { - this.removeSelectPart(selectParts, part); - this.updateRawSqlAndRefresh(); - break; - } - case 'get-part-actions': { - return Promise.resolve([{ text: 'Remove', value: 'remove-part' }]); - } - } - } - - handleGroupPartEvent(part: any, index: any, evt: { name: any }) { - switch (evt.name) { - case 'get-param-options': { - return this.datasource - .metricFindQuery(this.metaBuilder.buildColumnQuery()) - .then(this.transformToSegments({})) - .catch(this.handleQueryError.bind(this)); - } - case 'part-param-changed': { - this.updatePersistedParts(); - this.updateRawSqlAndRefresh(); - break; - } - case 'action': { - this.removeGroup(part, index); - this.updateRawSqlAndRefresh(); - break; - } - case 'get-part-actions': { - return Promise.resolve([{ text: 'Remove', value: 'remove-part' }]); - } - } - } - - addGroup(partType: string, value: string) { - let params = [value]; - if (partType === 'time') { - params = ['$__interval', 'none']; - } - const partModel = sqlPart.create({ type: partType, params: params }); - - if (partType === 'time') { - // put timeGroup at start - this.groupParts.splice(0, 0, partModel); - } else { - this.groupParts.push(partModel); - } - - // add aggregates when adding group by - for (const selectParts of this.selectParts) { - if (!selectParts.some((part) => part.def.type === 'aggregate')) { - const aggregate = sqlPart.create({ type: 'aggregate', params: ['avg'] }); - selectParts.splice(1, 0, aggregate); - if (!selectParts.some((part) => part.def.type === 'alias')) { - const alias = sqlPart.create({ type: 'alias', params: [selectParts[0].part.params[0]] }); - selectParts.push(alias); - } - } - } - - this.updatePersistedParts(); - } - - removeGroup(part: { def: { type: string } }, index: number) { - if (part.def.type === 'time') { - // remove aggregations - this.selectParts = map(this.selectParts, (s: any) => { - return filter(s, (part: any) => { - if (part.def.type === 'aggregate' || part.def.type === 'percentile') { - return false; - } - return true; - }); - }); - } - - this.groupParts.splice(index, 1); - this.updatePersistedParts(); - } - - handleWherePartEvent(whereParts: any, part: any, evt: any, index: any) { - switch (evt.name) { - case 'get-param-options': { - switch (evt.param.name) { - case 'left': - return this.datasource - .metricFindQuery(this.metaBuilder.buildColumnQuery()) - .then(this.transformToSegments({})) - .catch(this.handleQueryError.bind(this)); - case 'right': - if (['int', 'bigint', 'double', 'datetime'].indexOf(part.datatype) > -1) { - // don't do value lookups for numerical fields - return Promise.resolve([]); - } else { - return this.datasource - .metricFindQuery(this.metaBuilder.buildValueQuery(part.params[0])) - .then( - this.transformToSegments({ - addTemplateVars: true, - templateQuoter: (v: string) => { - return this.queryModel.quoteLiteral(v); - }, - }) - ) - .catch(this.handleQueryError.bind(this)); - } - case 'op': - return Promise.resolve(this.uiSegmentSrv.newOperators(this.metaBuilder.getOperators(part.datatype))); - default: - return Promise.resolve([]); - } - } - case 'part-param-changed': { - this.updatePersistedParts(); - this.datasource.metricFindQuery(this.metaBuilder.buildDatatypeQuery(part.params[0])).then((d: any) => { - if (d.length === 1) { - part.datatype = d[0].text; - } - }); - this.updateRawSqlAndRefresh(); - break; - } - case 'action': { - // remove element - whereParts.splice(index, 1); - this.updatePersistedParts(); - this.updateRawSqlAndRefresh(); - break; - } - case 'get-part-actions': { - return Promise.resolve([{ text: 'Remove', value: 'remove-part' }]); - } - } - } - - getWhereOptions() { - const options = []; - if (this.queryModel.hasUnixEpochTimecolumn()) { - options.push(this.uiSegmentSrv.newSegment({ type: 'macro', value: '$__unixEpochFilter' })); - } else { - options.push(this.uiSegmentSrv.newSegment({ type: 'macro', value: '$__timeFilter' })); - } - options.push(this.uiSegmentSrv.newSegment({ type: 'expression', value: 'Expression' })); - return Promise.resolve(options); - } - - addWhereAction(part: any, index: number) { - switch (this.whereAdd.type) { - case 'macro': { - const partModel = sqlPart.create({ type: 'macro', name: this.whereAdd.value, params: [] }); - if (this.whereParts.length >= 1 && this.whereParts[0].def.type === 'macro') { - // replace current macro - this.whereParts[0] = partModel; - } else { - this.whereParts.splice(0, 0, partModel); - } - break; - } - default: { - this.whereParts.push(sqlPart.create({ type: 'expression', params: ['value', '=', 'value'] })); - } - } - - this.updatePersistedParts(); - this.resetPlusButton(this.whereAdd); - this.updateRawSqlAndRefresh(); - } - - getGroupOptions() { - return this.datasource - .metricFindQuery(this.metaBuilder.buildColumnQuery('group')) - .then((tags: any) => { - const options = []; - if (!this.queryModel.hasTimeGroup()) { - options.push(this.uiSegmentSrv.newSegment({ type: 'time', value: 'time($__interval,none)' })); - } - for (const tag of tags) { - options.push(this.uiSegmentSrv.newSegment({ type: 'column', value: tag.text })); - } - return options; - }) - .catch(this.handleQueryError.bind(this)); - } - - addGroupAction() { - switch (this.groupAdd.value) { - default: { - this.addGroup(this.groupAdd.type, this.groupAdd.value); - } - } - - this.resetPlusButton(this.groupAdd); - this.updateRawSqlAndRefresh(); - } - - handleQueryError(err: any): any[] { - this.error = err.message || 'Failed to issue metric query'; - return []; - } -} diff --git a/public/app/plugins/datasource/mysql/response_parser.ts b/public/app/plugins/datasource/mysql/response_parser.ts deleted file mode 100644 index ff16bd60551..00000000000 --- a/public/app/plugins/datasource/mysql/response_parser.ts +++ /dev/null @@ -1,77 +0,0 @@ -import { uniqBy } from 'lodash'; - -import { AnnotationEvent, DataFrame, MetricFindValue } from '@grafana/data'; -import { BackendDataSourceResponse, FetchResponse, toDataQueryResponse } from '@grafana/runtime'; - -export default class ResponseParser { - transformMetricFindResponse(raw: FetchResponse): MetricFindValue[] { - const frames = toDataQueryResponse(raw).data as DataFrame[]; - - if (!frames || !frames.length) { - return []; - } - - const frame = frames[0]; - - const values: MetricFindValue[] = []; - const textField = frame.fields.find((f) => f.name === '__text'); - const valueField = frame.fields.find((f) => f.name === '__value'); - - if (textField && valueField) { - for (let i = 0; i < textField.values.length; i++) { - values.push({ text: '' + textField.values.get(i), value: '' + valueField.values.get(i) }); - } - } else { - values.push( - ...frame.fields - .flatMap((f) => f.values.toArray()) - .map((v) => ({ - text: v, - })) - ); - } - - return uniqBy(values, 'text'); - } - - async transformAnnotationResponse(options: any, data: BackendDataSourceResponse): Promise { - const frames = toDataQueryResponse({ data: data }).data as DataFrame[]; - if (!frames || !frames.length) { - return []; - } - const frame = frames[0]; - const timeField = frame.fields.find((f) => f.name === 'time' || f.name === 'time_sec'); - - if (!timeField) { - throw new Error('Missing mandatory time column (with time column alias) in annotation query'); - } - - if (frame.fields.find((f) => f.name === 'title')) { - throw new Error('The title column for annotations is deprecated, now only a column named text is returned'); - } - - const timeEndField = frame.fields.find((f) => f.name === 'timeend'); - const textField = frame.fields.find((f) => f.name === 'text'); - const tagsField = frame.fields.find((f) => f.name === 'tags'); - - const list: AnnotationEvent[] = []; - for (let i = 0; i < frame.length; i++) { - const timeEnd = timeEndField && timeEndField.values.get(i) ? Math.floor(timeEndField.values.get(i)) : undefined; - list.push({ - annotation: options.annotation, - time: Math.floor(timeField.values.get(i)), - timeEnd, - text: textField && textField.values.get(i) ? textField.values.get(i) : '', - tags: - tagsField && tagsField.values.get(i) - ? tagsField.values - .get(i) - .trim() - .split(/\s*,\s*/) - : [], - }); - } - - return list; - } -} diff --git a/public/app/plugins/datasource/mysql/specs/datasource.test.ts b/public/app/plugins/datasource/mysql/specs/datasource.test.ts index 54ab5575079..6e977681d00 100644 --- a/public/app/plugins/datasource/mysql/specs/datasource.test.ts +++ b/public/app/plugins/datasource/mysql/specs/datasource.test.ts @@ -9,12 +9,12 @@ import { } from '@grafana/data'; import { FetchResponse, setBackendSrv } from '@grafana/runtime'; import { backendSrv } from 'app/core/services/backend_srv'; // will use the version in __mocks__ +import { SQLQuery } from 'app/features/plugins/sql/types'; import { TemplateSrv } from 'app/features/templating/template_srv'; +import { initialCustomVariableModelState } from 'app/features/variables/custom/reducer'; -import { initialCustomVariableModelState } from '../../../../features/variables/custom/reducer'; -import { MysqlDatasource } from '../datasource'; - -import { MySQLOptions, MySQLQuery } from './../types'; +import { MySqlDatasource } from '../MySqlDatasource'; +import { MySQLOptions } from '../types'; describe('MySQLDatasource', () => { const setupTextContext = (response: any) => { @@ -30,7 +30,8 @@ describe('MySQLDatasource', () => { const variable = { ...initialCustomVariableModelState }; fetchMock.mockImplementation((options) => of(createFetchResponse(response))); - const ds = new MysqlDatasource(instanceSettings, templateSrv); + const ds = new MySqlDatasource(instanceSettings); + Reflect.set(ds, 'templateSrv', templateSrv); return { ds, variable, templateSrv, fetchMock }; }; @@ -52,7 +53,7 @@ describe('MySQLDatasource', () => { hide: true, }, ], - } as unknown as DataQueryRequest; + } as unknown as DataQueryRequest; const { ds, fetchMock } = setupTextContext({}); @@ -63,54 +64,6 @@ describe('MySQLDatasource', () => { }); }); - describe('When performing annotationQuery', () => { - let results: any; - const annotationName = 'MyAnno'; - const options = { - annotation: { - name: annotationName, - rawQuery: 'select time_sec, text, tags from table;', - }, - range: { - from: dateTime(1432288354), - to: dateTime(1432288401), - }, - }; - const response = { - results: { - MyAnno: { - frames: [ - dataFrameToJSON( - new MutableDataFrame({ - fields: [ - { name: 'time_sec', values: [1432288355, 1432288390, 1432288400] }, - { name: 'text', values: ['some text', 'some text2', 'some text3'] }, - { name: 'tags', values: ['TagA,TagB', ' TagB , TagC', null] }, - ], - }) - ), - ], - }, - }, - }; - - beforeEach(async () => { - const { ds } = setupTextContext(response); - const data = await ds.annotationQuery(options); - results = data; - }); - - it('should return annotation list', async () => { - expect(results.length).toBe(3); - expect(results[0].text).toBe('some text'); - expect(results[0].tags[0]).toBe('TagA'); - expect(results[0].tags[1]).toBe('TagB'); - expect(results[1].tags[0]).toBe('TagB'); - expect(results[1].tags[1]).toBe('TagC'); - expect(results[2].tags.length).toBe(0); - }); - }); - describe('When performing metricFindQuery that returns multiple string fields', () => { const query = 'select * from atable'; const response = { @@ -376,6 +329,7 @@ describe('MySQLDatasource', () => { grafana_metric WHERE $__timeFilter(createdAt) AND + foo = 'bar' AND measurement = 'logins.count' AND hostname IN($host) GROUP BY 1, 3 @@ -383,6 +337,7 @@ describe('MySQLDatasource', () => { const query = { rawSql, rawQuery: true, + refId: '', }; templateSrv.init([ { type: 'query', name: 'summarize', current: { value: '1m' } }, @@ -407,6 +362,7 @@ describe('MySQLDatasource', () => { const query = { rawSql, rawQuery: true, + refId: '', }; templateSrv.init([ { type: 'query', name: 'summarize', current: { value: '1m' } }, diff --git a/public/app/plugins/datasource/mysql/sqlCompletionProvider.ts b/public/app/plugins/datasource/mysql/sqlCompletionProvider.ts new file mode 100644 index 00000000000..59adea66426 --- /dev/null +++ b/public/app/plugins/datasource/mysql/sqlCompletionProvider.ts @@ -0,0 +1,281 @@ +import { + ColumnDefinition, + CompletionItemKind, + CompletionItemPriority, + LanguageCompletionProvider, + LinkedToken, + StatementPlacementProvider, + StatementPosition, + SuggestionKindProvider, + TableDefinition, + TokenType, +} from '@grafana/experimental'; +import { PositionContext } from '@grafana/experimental/dist/sql-editor/types'; +import { AGGREGATE_FNS, OPERATORS } from 'app/features/plugins/sql/constants'; +import { Aggregate, DB, MetaDefinition, SQLQuery } from 'app/features/plugins/sql/types'; + +import { FUNCTIONS } from './functions'; + +interface CompletionProviderGetterArgs { + getColumns: React.MutableRefObject<(t: SQLQuery) => Promise>; + getTables: React.MutableRefObject<(d?: string) => Promise>; + fetchMeta: React.MutableRefObject<(d?: string) => Promise>; + getFunctions: React.MutableRefObject<(d?: string) => Aggregate[]>; +} + +export const getSqlCompletionProvider: (args: CompletionProviderGetterArgs) => LanguageCompletionProvider = + ({ getColumns, getTables, fetchMeta, getFunctions }) => + () => ({ + triggerCharacters: ['.', ' ', '$', ',', '(', "'"], + supportedFunctions: () => getFunctions.current(), + supportedOperators: () => OPERATORS, + customSuggestionKinds: customSuggestionKinds(getTables, getColumns, fetchMeta), + customStatementPlacement, + }); + +export enum CustomStatementPlacement { + AfterDataset = 'afterDataset', + AfterFrom = 'afterFrom', + AfterSelect = 'afterSelect', +} + +export enum CustomSuggestionKind { + TablesWithinDataset = 'tablesWithinDataset', +} + +export enum Direction { + Next = 'next', + Previous = 'previous', +} + +const TRIGGER_SUGGEST = 'editor.action.triggerSuggest'; + +enum Keyword { + Select = 'SELECT', + Where = 'WHERE', + From = 'FROM', +} + +export const customStatementPlacement: StatementPlacementProvider = () => [ + { + id: CustomStatementPlacement.AfterDataset, + resolve: (currentToken, previousKeyword) => { + return Boolean( + currentToken?.is(TokenType.Delimiter, '.') || + (currentToken?.is(TokenType.Whitespace) && currentToken?.previous?.is(TokenType.Delimiter, '.')) + ); + }, + }, + { + id: CustomStatementPlacement.AfterFrom, + resolve: (currentToken, previousKeyword) => { + return Boolean(isAfterFrom(currentToken)); + }, + }, + { + id: CustomStatementPlacement.AfterSelect, + resolve: (token, previousKeyword) => { + const is = + isDirectlyAfter(token, Keyword.Select) || + (isAfterSelect(token) && token?.previous?.is(TokenType.Delimiter, ',')); + return Boolean(is); + }, + }, +]; + +export const customSuggestionKinds: ( + getTables: CompletionProviderGetterArgs['getTables'], + getFields: CompletionProviderGetterArgs['getColumns'], + fetchMeta: CompletionProviderGetterArgs['fetchMeta'] +) => SuggestionKindProvider = (getTables, _, fetchMeta) => () => + [ + { + id: CustomSuggestionKind.TablesWithinDataset, + applyTo: [CustomStatementPlacement.AfterDataset], + suggestionsResolver: async (ctx) => { + const tablePath = ctx.currentToken ? getTablePath(ctx.currentToken) : ''; + const t = await getTables.current(tablePath); + return t.map((table) => suggestion(table.name, table.completion ?? table.name, CompletionItemKind.Field, ctx)); + }, + }, + { + id: 'metaAfterSelect', + applyTo: [CustomStatementPlacement.AfterSelect], + suggestionsResolver: async (ctx) => { + const path = getPath(ctx.currentToken, Direction.Next); + const t = await fetchMeta.current(path); + return t.map((meta) => { + const completion = meta.kind === CompletionItemKind.Class ? `${meta.completion}.` : meta.completion; + return suggestion(meta.name, completion!, meta.kind, ctx); + }); + }, + }, + { + id: 'metaAfterSelectFuncArg', + applyTo: [StatementPosition.AfterSelectFuncFirstArgument], + suggestionsResolver: async (ctx) => { + const path = getPath(ctx.currentToken, Direction.Next); + const t = await fetchMeta.current(path); + return t.map((meta) => { + const completion = meta.kind === CompletionItemKind.Class ? `${meta.completion}.` : meta.completion; + return suggestion(meta.name, completion!, meta.kind, ctx); + }); + }, + }, + { + id: 'metaAfterFrom', + applyTo: [CustomStatementPlacement.AfterFrom], + suggestionsResolver: async (ctx) => { + // TODO: why is this triggering when isAfterFrom is false + if (!isAfterFrom(ctx.currentToken)) { + return []; + } + const path = ctx.currentToken?.value || ''; + const t = await fetchMeta.current(path); + return t.map((meta) => suggestion(meta.name, meta.completion!, meta.kind, ctx)); + }, + }, + { + id: `MYSQL${StatementPosition.WhereKeyword}`, + applyTo: [StatementPosition.WhereKeyword], + suggestionsResolver: async (ctx) => { + const path = getPath(ctx.currentToken, Direction.Previous); + const t = await fetchMeta.current(path); + return t.map((meta) => { + const completion = meta.kind === CompletionItemKind.Class ? `${meta.completion}.` : meta.completion; + return suggestion(meta.name, completion!, meta.kind, ctx); + }); + }, + }, + { + id: StatementPosition.WhereComparisonOperator, + applyTo: [StatementPosition.WhereComparisonOperator], + suggestionsResolver: async (ctx) => { + if (!isAfterWhere(ctx.currentToken)) { + return []; + } + const path = getPath(ctx.currentToken, Direction.Previous); + const t = await fetchMeta.current(path); + const sugg = t.map((meta) => { + const completion = meta.kind === CompletionItemKind.Class ? `${meta.completion}.` : meta.completion; + return suggestion(meta.name, completion!, meta.kind, ctx); + }); + return sugg; + }, + }, + ]; + +function getPath(token: LinkedToken | null, direction: Direction) { + let path = token?.value || ''; + const fromValue = keywordValue(token, Keyword.From, direction); + if (fromValue) { + path = fromValue; + } + return path; +} + +export function getTablePath(token: LinkedToken) { + let processedToken = token; + let tablePath = ''; + while (processedToken?.previous && !processedToken.previous.isWhiteSpace()) { + processedToken = processedToken.previous; + tablePath = processedToken.value + tablePath; + } + + tablePath = tablePath.trim(); + return tablePath; +} + +function suggestion(label: string, completion: string, kind: CompletionItemKind, ctx: PositionContext) { + return { + label, + insertText: completion, + command: { id: TRIGGER_SUGGEST, title: '' }, + kind, + sortText: CompletionItemPriority.High, + range: { + ...ctx.range, + startColumn: ctx.range.endColumn, + endColumn: ctx.range.endColumn, + }, + }; +} + +function isAfterSelect(token: LinkedToken | null) { + return isAfterKeyword(token, Keyword.Select); +} + +function isAfterFrom(token: LinkedToken | null) { + return isDirectlyAfter(token, Keyword.From); +} + +function isAfterWhere(token: LinkedToken | null) { + return isAfterKeyword(token, Keyword.Where); +} + +function isAfterKeyword(token: LinkedToken | null, keyword: string) { + if (!token?.is(TokenType.Keyword)) { + let curToken = token; + while (true) { + if (!curToken) { + return false; + } + if (curToken.is(TokenType.Keyword, keyword)) { + return true; + } + if (curToken.isKeyword()) { + return false; + } + curToken = curToken?.previous || null; + } + } + return false; +} + +function isDirectlyAfter(token: LinkedToken | null, keyword: string) { + return token?.is(TokenType.Whitespace) && token?.previous?.is(TokenType.Keyword, keyword); +} + +function keywordValue(token: LinkedToken | null, keyword: Keyword, direction: Direction) { + let next = token; + while (next) { + if (next.is(TokenType.Keyword, keyword)) { + return tokenValue(next); + } + next = next[direction]; + } + return false; +} + +function tokenValue(token: LinkedToken | null): string | undefined { + const ws = token?.next; + if (ws?.isWhiteSpace()) { + const v = ws.next; + const delim = v?.next; + if (!delim?.is(TokenType.Delimiter)) { + return v?.value; + } + return `${v?.value}${delim?.value}${delim.next?.value}`; + } + return undefined; +} + +export async function fetchColumns(db: DB, q: SQLQuery) { + const cols = await db.fields(q); + if (cols.length > 0) { + return cols.map((c) => { + return { name: c.value, type: c.value, description: c.value }; + }); + } else { + return []; + } +} + +export async function fetchTables(db: DB, q: Partial) { + const tables = await db.lookup(q.dataset); + return tables; +} + +export function getFunctions(): Aggregate[] { + return [...AGGREGATE_FNS, ...FUNCTIONS]; +} diff --git a/public/app/plugins/datasource/mysql/sql_part.ts b/public/app/plugins/datasource/mysql/sql_part.ts deleted file mode 100644 index 7506431c3d0..00000000000 --- a/public/app/plugins/datasource/mysql/sql_part.ts +++ /dev/null @@ -1,86 +0,0 @@ -import { SqlPartDef, SqlPart } from 'app/angular/components/sql_part/sql_part'; - -const index: any[] = []; - -function createPart(part: any): any { - const def = index[part.type]; - if (!def) { - return null; - } - - return new SqlPart(part, def); -} - -function register(options: any) { - index[options.type] = new SqlPartDef(options); -} - -register({ - type: 'column', - style: 'label', - params: [{ type: 'column', dynamicLookup: true }], - defaultParams: ['value'], -}); - -register({ - type: 'expression', - style: 'expression', - label: 'Expr:', - params: [ - { name: 'left', type: 'string', dynamicLookup: true }, - { name: 'op', type: 'string', dynamicLookup: true }, - { name: 'right', type: 'string', dynamicLookup: true }, - ], - defaultParams: ['value', '=', 'value'], -}); - -register({ - type: 'macro', - style: 'label', - label: 'Macro:', - params: [], - defaultParams: [], -}); - -register({ - type: 'aggregate', - style: 'label', - params: [ - { - name: 'name', - type: 'string', - options: ['avg', 'count', 'min', 'max', 'sum', 'stddev', 'variance'], - }, - ], - defaultParams: ['avg'], -}); - -register({ - type: 'alias', - style: 'label', - params: [{ name: 'name', type: 'string', quote: 'double' }], - defaultParams: ['alias'], -}); - -register({ - type: 'time', - style: 'function', - label: 'time', - params: [ - { - name: 'interval', - type: 'interval', - options: ['$__interval', '1s', '10s', '1m', '5m', '10m', '15m', '1h'], - }, - { - name: 'fill', - type: 'string', - options: ['none', 'NULL', 'previous', '0'], - }, - ], - defaultParams: ['$__interval', 'none'], -}); - -export default { - create: createPart, -}; diff --git a/public/app/plugins/datasource/mysql/types.ts b/public/app/plugins/datasource/mysql/types.ts index aadcaa1f84f..dc1702c8b51 100644 --- a/public/app/plugins/datasource/mysql/types.ts +++ b/public/app/plugins/datasource/mysql/types.ts @@ -1,5 +1,5 @@ -import { DataQuery, DataSourceJsonData } from '@grafana/data'; -import { SQLConnectionLimits } from 'app/features/plugins/sql/types'; +import { SQLOptions, SQLQuery } from 'app/features/plugins/sql/types'; + export interface MysqlQueryForInterpolation { alias?: any; format?: any; @@ -8,21 +8,6 @@ export interface MysqlQueryForInterpolation { hide?: any; } -export interface MySQLOptions extends DataSourceJsonData, SQLConnectionLimits { - tlsAuth: boolean; - tlsAuthWithCACert: boolean; - timezone: string; - tlsSkipVerify: boolean; - user: string; - database: string; - url: string; - timeInterval: string; -} +export interface MySQLOptions extends SQLOptions {} -export type ResultFormat = 'time_series' | 'table'; - -export interface MySQLQuery extends DataQuery { - alias?: string; - format?: ResultFormat; - rawSql?: any; -} +export interface MySQLQuery extends SQLQuery {} From 1834973cc677f6df9886d380dc086f489b6fb410 Mon Sep 17 00:00:00 2001 From: Christopher Moyer <35463610+chri2547@users.noreply.github.com> Date: Fri, 15 Jul 2022 16:28:02 -0500 Subject: [PATCH 018/116] Docs: removes old whats new docs (#52188) * removes old whats new docs * adds v7 whats new back in --- docs/sources/setup-grafana/upgrade-grafana.md | 2 +- docs/sources/whatsnew/_index.md | 73 ++++-- docs/sources/whatsnew/whats-new-in-v2-0.md | 185 -------------- docs/sources/whatsnew/whats-new-in-v2-1.md | 155 ------------ docs/sources/whatsnew/whats-new-in-v2-5.md | 121 --------- docs/sources/whatsnew/whats-new-in-v2-6.md | 144 ----------- docs/sources/whatsnew/whats-new-in-v3-0.md | 236 ------------------ docs/sources/whatsnew/whats-new-in-v3-1.md | 68 ----- docs/sources/whatsnew/whats-new-in-v4-0.md | 184 -------------- docs/sources/whatsnew/whats-new-in-v4-1.md | 77 ------ docs/sources/whatsnew/whats-new-in-v4-2.md | 93 ------- docs/sources/whatsnew/whats-new-in-v4-3.md | 109 -------- docs/sources/whatsnew/whats-new-in-v4-4.md | 55 ---- docs/sources/whatsnew/whats-new-in-v4-5.md | 77 ------ docs/sources/whatsnew/whats-new-in-v4-6.md | 80 ------ docs/sources/whatsnew/whats-new-in-v5-0.md | 155 ------------ docs/sources/whatsnew/whats-new-in-v5-1.md | 129 ---------- docs/sources/whatsnew/whats-new-in-v5-2.md | 106 -------- docs/sources/whatsnew/whats-new-in-v5-3.md | 97 ------- docs/sources/whatsnew/whats-new-in-v5-4.md | 88 ------- docs/sources/whatsnew/whats-new-in-v6-0.md | 181 -------------- docs/sources/whatsnew/whats-new-in-v6-1.md | 64 ----- docs/sources/whatsnew/whats-new-in-v6-2.md | 101 -------- docs/sources/whatsnew/whats-new-in-v6-3.md | 152 ----------- docs/sources/whatsnew/whats-new-in-v6-4.md | 152 ----------- docs/sources/whatsnew/whats-new-in-v6-5.md | 216 ---------------- docs/sources/whatsnew/whats-new-in-v6-6.md | 225 ----------------- docs/sources/whatsnew/whats-new-in-v6-7.md | 105 -------- 28 files changed, 54 insertions(+), 3376 deletions(-) delete mode 100644 docs/sources/whatsnew/whats-new-in-v2-0.md delete mode 100644 docs/sources/whatsnew/whats-new-in-v2-1.md delete mode 100644 docs/sources/whatsnew/whats-new-in-v2-5.md delete mode 100644 docs/sources/whatsnew/whats-new-in-v2-6.md delete mode 100644 docs/sources/whatsnew/whats-new-in-v3-0.md delete mode 100644 docs/sources/whatsnew/whats-new-in-v3-1.md delete mode 100644 docs/sources/whatsnew/whats-new-in-v4-0.md delete mode 100644 docs/sources/whatsnew/whats-new-in-v4-1.md delete mode 100644 docs/sources/whatsnew/whats-new-in-v4-2.md delete mode 100644 docs/sources/whatsnew/whats-new-in-v4-3.md delete mode 100644 docs/sources/whatsnew/whats-new-in-v4-4.md delete mode 100644 docs/sources/whatsnew/whats-new-in-v4-5.md delete mode 100644 docs/sources/whatsnew/whats-new-in-v4-6.md delete mode 100644 docs/sources/whatsnew/whats-new-in-v5-0.md delete mode 100644 docs/sources/whatsnew/whats-new-in-v5-1.md delete mode 100644 docs/sources/whatsnew/whats-new-in-v5-2.md delete mode 100644 docs/sources/whatsnew/whats-new-in-v5-3.md delete mode 100644 docs/sources/whatsnew/whats-new-in-v5-4.md delete mode 100644 docs/sources/whatsnew/whats-new-in-v6-0.md delete mode 100644 docs/sources/whatsnew/whats-new-in-v6-1.md delete mode 100644 docs/sources/whatsnew/whats-new-in-v6-2.md delete mode 100644 docs/sources/whatsnew/whats-new-in-v6-3.md delete mode 100644 docs/sources/whatsnew/whats-new-in-v6-4.md delete mode 100644 docs/sources/whatsnew/whats-new-in-v6-5.md delete mode 100644 docs/sources/whatsnew/whats-new-in-v6-6.md delete mode 100644 docs/sources/whatsnew/whats-new-in-v6-7.md diff --git a/docs/sources/setup-grafana/upgrade-grafana.md b/docs/sources/setup-grafana/upgrade-grafana.md index 56c5d583696..f6c48ddeaf1 100644 --- a/docs/sources/setup-grafana/upgrade-grafana.md +++ b/docs/sources/setup-grafana/upgrade-grafana.md @@ -246,7 +246,7 @@ change the `[security]` setting `cookie_secure` to `true` and use HTTPS when `co ### PhantomJS removed -PhantomJS was deprecated in [Grafana v6.4]({{< relref "../whatsnew/whats-new-in-v6-4/#phantomjs-deprecation" >}}) and starting from Grafana v7.0.0, all PhantomJS support has been removed. This means that Grafana no longer ships with a built-in image renderer, and we advise you to install the [Grafana Image Renderer plugin](https://grafana.com/grafana/plugins/grafana-image-renderer). +PhantomJS was deprecated in Grafana v6.4 and starting from Grafana v7.0.0, all PhantomJS support has been removed. This means that Grafana no longer ships with a built-in image renderer, and we advise you to install the [Grafana Image Renderer plugin](https://grafana.com/grafana/plugins/grafana-image-renderer). ### Dashboard minimum refresh interval enforced diff --git a/docs/sources/whatsnew/_index.md b/docs/sources/whatsnew/_index.md index 37a8c807c8e..34970d95552 100644 --- a/docs/sources/whatsnew/_index.md +++ b/docs/sources/whatsnew/_index.md @@ -2,11 +2,63 @@ aliases: - /docs/grafana/latest/guides/ - /docs/grafana/latest/whatsnew/ + - /docs/grafana/latest/guides/whats-new-in-v2/ + - /docs/grafana/latest/whatsnew/whats-new-in-v2-0/ + - /docs/grafana/latest/guides/whats-new-in-v2-1/ + - /docs/grafana/latest/whatsnew/whats-new-in-v2-1/ + - /docs/grafana/latest/guides/whats-new-in-v2-5/ + - /docs/grafana/latest/whatsnew/whats-new-in-v2-5/ + - /docs/grafana/latest/guides/whats-new-in-v2-6/ + - /docs/grafana/latest/whatsnew/whats-new-in-v2-6/ + - /docs/grafana/latest/guides/whats-new-in-v3/ + - /docs/grafana/latest/whatsnew/whats-new-in-v3-0/ + - /docs/grafana/latest/guides/whats-new-in-v3-1/ + - /docs/grafana/latest/whatsnew/whats-new-in-v3-1/ + - /docs/grafana/latest/guides/whats-new-in-v4/ + - /docs/grafana/latest/whatsnew/whats-new-in-v4-0/ + - /docs/grafana/latest/guides/whats-new-in-v4-1/ + - /docs/grafana/latest/whatsnew/whats-new-in-v4-1/ + - /docs/grafana/latest/guides/whats-new-in-v4-2/ + - /docs/grafana/latest/whatsnew/whats-new-in-v4-2/ + - /docs/grafana/latest/guides/whats-new-in-v4-3/ + - /docs/grafana/latest/whatsnew/whats-new-in-v4-3/ + - /docs/grafana/latest/guides/whats-new-in-v4-4/ + - /docs/grafana/latest/whatsnew/whats-new-in-v4-4/ + - /docs/grafana/latest/guides/whats-new-in-v4-5/ + - /docs/grafana/latest/whatsnew/whats-new-in-v4-5/ + - /docs/grafana/latest/guides/whats-new-in-v4-6/ + - /docs/grafana/latest/whatsnew/whats-new-in-v4-6/ + - /docs/grafana/latest/guides/whats-new-in-v5/ + - /docs/grafana/latest/whatsnew/whats-new-in-v5-0/ + - /docs/grafana/latest/guides/whats-new-in-v5-1/ + - /docs/grafana/latest/whatsnew/whats-new-in-v5-1/ + - /docs/grafana/latest/guides/whats-new-in-v5-2/ + - /docs/grafana/latest/whatsnew/whats-new-in-v5-2/ + - /docs/grafana/latest/guides/whats-new-in-v5-3/ + - /docs/grafana/latest/whatsnew/whats-new-in-v5-3/ + - /docs/grafana/latest/guides/whats-new-in-v5-4/ + - /docs/grafana/latest/whatsnew/whats-new-in-v5-4/ + - /docs/grafana/latest/guides/whats-new-in-v6-0/ + - /docs/grafana/latest/whatsnew/whats-new-in-v6-0/ + - /docs/grafana/latest/guides/whats-new-in-v6-1/ + - /docs/grafana/latest/whatsnew/whats-new-in-v6-1/ + - /docs/grafana/latest/guides/whats-new-in-v6-2/ + - /docs/grafana/latest/whatsnew/whats-new-in-v6-2/ + - /docs/grafana/latest/guides/whats-new-in-v6-3/ + - /docs/grafana/latest/whatsnew/whats-new-in-v6-3/ + - /docs/grafana/latest/guides/whats-new-in-v6-4/ + - /docs/grafana/latest/whatsnew/whats-new-in-v6-4/ + - /docs/grafana/latest/guides/whats-new-in-v6-5/ + - /docs/grafana/latest/whatsnew/whats-new-in-v6-5/ + - /docs/grafana/latest/guides/whats-new-in-v6-6/ + - /docs/grafana/latest/whatsnew/whats-new-in-v6-6/ + - /docs/grafana/latest/guides/whats-new-in-v6-7/ + - /docs/grafana/latest/whatsnew/whats-new-in-v6-7/ title: What's new weight: 1 --- -# What's new Grafana +# What's new in Grafana Grafana is changing all the time. For release highlights checkout links below, if you want a complete list of every change, as well as info on deprecations, breaking changes and plugin development read the [release notes]({{< relref "../release-notes/" >}}). @@ -32,22 +84,3 @@ as info on deprecations, breaking changes and plugin development read the [relea - [What's new in 7.2]({{< relref "whats-new-in-v7-2/" >}}) - [What's new in 7.1]({{< relref "whats-new-in-v7-1/" >}}) - [What's new in 7.0]({{< relref "whats-new-in-v7-0/" >}}) - -## Grafana 6 - -- [What's new in 6.7]({{< relref "whats-new-in-v6-7/" >}}) -- [What's new in 6.6]({{< relref "whats-new-in-v6-6/" >}}) -- [What's new in 6.5]({{< relref "whats-new-in-v6-5/" >}}) -- [What's new in 6.4]({{< relref "whats-new-in-v6-4/" >}}) -- [What's new in 6.3]({{< relref "whats-new-in-v6-3/" >}}) -- [What's new in 6.2]({{< relref "whats-new-in-v6-2/" >}}) -- [What's new in 6.1]({{< relref "whats-new-in-v6-1/" >}}) -- [What's new in 6.0]({{< relref "whats-new-in-v6-0/" >}}) - -## Grafana 5 - -- [What's new in 5.4]({{< relref "whats-new-in-v5-4/" >}}) -- [What's new in 5.3]({{< relref "whats-new-in-v5-3/" >}}) -- [What's new in 5.2]({{< relref "whats-new-in-v5-2/" >}}) -- [What's new in 5.1]({{< relref "whats-new-in-v5-1/" >}}) -- [What's new in 5.0]({{< relref "whats-new-in-v5-0/" >}}) diff --git a/docs/sources/whatsnew/whats-new-in-v2-0.md b/docs/sources/whatsnew/whats-new-in-v2-0.md deleted file mode 100644 index ad6dab1fc8c..00000000000 --- a/docs/sources/whatsnew/whats-new-in-v2-0.md +++ /dev/null @@ -1,185 +0,0 @@ ---- -_build: - list: false -aliases: - - /docs/grafana/latest/guides/whats-new-in-v2/ - - /docs/grafana/latest/whatsnew/whats-new-in-v2-0/ -description: Feature and improvement highlights for Grafana v2.0 -keywords: - - grafana - - new - - documentation - - '2.0' - - release notes -title: What's new in Grafana v2.0 -weight: -1 ---- - -# What's new in Grafana v2.0 - -Grafana 2.0 represents months of work by the Grafana team and the community. We are pleased to be able to -release the Grafana 2.0 beta. This is a guide that describes some of changes and new features that can -be found in Grafana V2.0. - -If you are interested in how to migrate from Grafana V1.x to V2.0, please read our [Migration Guide](../installation/migrating_to2.md) - -## New backend server - -Grafana now ships with its own required backend server. Also completely open-source, it's written in Go and has a full HTTP API. - -In addition to new features, the backend server makes it much easier to set up and enjoy Grafana. Grafana 2.0 now ships as cross platform binaries with no dependencies. Authentication is built in, and Grafana is now capable of proxying connections to Data Sources. There are no longer any CORS (Cross Origin Resource Sharing) issues requiring messy workarounds. Elasticsearch is no longer required just to store dashboards. - -## User and Organization permissions - -All Dashboards and Data Sources are linked to an Organization (not to a User). Users are linked to -Organizations via a role. That role can be: - -- `Viewer`: Can only view dashboards, not save / create them. -- `Editor`: Can view, update and create dashboards. -- `Admin`: Everything an Editor can plus edit and add data sources and organization users. - -> **Note:** A `Viewer` can still view all metrics exposed through a data source, not only -> the metrics used in already existing dashboards. That is because there are not -> per series permissions in Graphite, InfluxDB or OpenTSDB. - -There are currently no permissions on individual dashboards. - -Read more about Grafana's new user model on the [Admin section](../reference/admin/) - -## Dashboard Snapshot sharing - -A Dashboard Snapshot is an easy way to create and share a URL for a stripped down, point-in-time version of any Dashboard. -You can give this URL to anyone or everyone, and they can view the Snapshot even if they're not a User of your Grafana instance. - -You can set an expiration time for any Snapshots you create. When you create a Snapshot, we strip sensitive data, like -panel metric queries, annotation and template queries and panel links. The data points displayed on -screen for that specific time period in your Dashboard is saved in the JSON of the Snapshot itself. - -Sharing a Snapshot is similar to sharing a link to a screenshot of your dashboard, only way better (they'll look great at any screen resolution, you can hover over series, -even zoom in). Also they are fast to load as they aren't actually connected to any live Data Sources in any way. - -They're a great way to communicate about a particular incident with specific people who aren't users of your Grafana instance. You can also use them to show off your dashboards over the Internet. - -![](/static/img/docs/v2/dashboard_snapshot_dialog.png) - -### Publish snapshots - -You can publish snapshots locally or to [snapshots.raintank.io](https://snapshots.raintank.io). snapshots.raintank.io is a free service provided by [Grafana Labs](https://grafana.com) for hosting external Grafana snapshots. - -Either way, anyone with the link (and access to your Grafana instance for local snapshots) can view it. - -## Panel time overrides and timeshift - -In Grafana v2.x you can now override the relative time range for individual panels, causing them to be different than what is selected in the Dashboard time picker in the upper right. You can also add a time shift to individual panels. This allows you to show metrics from different time periods or days at the same time. - -![](/static/img/docs/v2/panel_time_override.jpg) - -You control these overrides in panel editor mode and the new tab `Time Range`. - -![](/static/img/docs/v2/time_range_tab.jpg) - -When you zoom or change the Dashboard time to a custom absolute time range, all panel overrides will be disabled. The panel relative time override is only active when the dashboard time is also relative. The panel timeshift override however is always active, even when the dashboard time is absolute. - -The `Hide time override info` option allows you to hide the override info text that is by default shown in the -upper right of a panel when overridden time range options. - -Currently you can only override the dashboard time with relative time ranges, not absolute time ranges. - -## Panel iframe embedding - -You can embed a single panel on another web page or your own application using the panel share dialog. - -Below you should see an iframe with a graph panel (taken from a Dashboard snapshot at [snapshots.raintank.io](http://snapshots.raintank.io). - -Try hovering or zooming on the panel below! - - - -This feature makes it easy to include interactive visualizations from your Grafana instance anywhere you want. - -## New dashboard top header - -The top header has gotten a major streamlining in Grafana V2.0. - - - -1. `Side menubar toggle` Toggle the side menubar on or off. This allows you to focus on the data presented on the Dashboard. The side menubar provides access to features unrelated to a Dashboard such as Users, Organizations, and Data Sources. -1. `Dashboard dropdown` The main dropdown shows you which Dashboard you are currently viewing, and allows you to easily switch to a new Dashboard. From here you can also create a new Dashboard, Import existing Dashboards, and manage the Playlist. -1. `Star Dashboard`: Star (or un-star) the current Dashboard. Starred Dashboards will show up on your own Home Dashboard by default, and are a convenient way to mark Dashboards that you're interested in. -1. `Share Dashboard`: Share the current dashboard by creating a link or create a static Snapshot of it. Make sure the Dashboard is saved before sharing. -1. `Save dashboard`: Save the current Dashboard with the current name. -1. `Settings`: Manage Dashboard settings and features such as Templating, Annotations and the name. - -> **Note:** In Grafana v2.0 when you change the title of a dashboard and then save it, it will no -> longer create a new Dashboard. It will just change the name for the current Dashboard. -> To change name and create a new Dashboard use the `Save As...` menu option - -### New Side menubar - -The new side menubar provides access to features such as User Preferences, Organizations, and Data Sources. - -If you have multiple Organizations, you can easily switch between them here. - -The side menubar will become more useful as we build out additional functionality in Grafana 2.x - -You can easily collapse or re-open the side menubar at any time by clicking the Grafana icon in the top left. We never want to get in the way of the data. - -## New search view and starring dashboards - -![](/static/img/docs/v2/dashboard_search.jpg) - -The dashboard search view has gotten a big overhaul. You can now see and filter by which dashboard you have personally starred. - -## Logarithmic scale - -The Graph panel now supports 3 logarithmic scales, `log base 10`, `log base 32`, `log base 1024`. Logarithmic y-axis scales are very useful when rendering many series of different order of magnitude on the same scale (eg. -latency, network traffic, and storage) - -![](/static/img/docs/v2/graph_logbase10_ms.png) - -## Dashlist panel - -![](/static/img/docs/v2/dashlist_starred.png) - -The dashlist is a new panel in Grafana v2.0. It allows you to show your personal starred dashboards, as well as do custom searches based on search strings or tags. - -dashlist is used on the new Grafana Home screen. It is included as a reference Panel and is useful to provide basic linking between Dashboards. - -## Data Source proxy and admin views - -Data sources in Grafana v2.0 are no longer defined in a config file. Instead, they are added through the UI or the HTTP API. - -The backend can now proxy data from Data Sources, which means that it is a lot easier to get started using Grafana with Graphite or OpenTSDB without having to spend time with CORS (Cross origin resource sharing) work-arounds. - -In addition, connections to Data Sources can be better controlled and secured, and authentication information no longer needs to be exposed to the browser. - -## Dashboard "now delay" - -A commonly reported problem has been graphs dipping to zero at the end, because metric data for the last interval has yet to be written to the Data Source. These graphs then "self correct" once the data comes in, but can look deceiving or alarming at times. - -You can avoid this problem by adding a `now delay` in `Dashboard Settings` > `Time Picker` tab. This new feature will cause Grafana to ignore the most recent data up to the set delay. -![](/static/img/docs/v2/timepicker_now_delay.jpg) - -The delay that may be necessary depends on how much latency you have in your collection pipeline. - -## Dashboard overwrite protection - -Grafana v2.0 protects Users from accidentally overwriting each others Dashboard changes. Similar protections are in place if you try to create a new Dashboard with the same name as an existing one. - -![](/static/img/docs/v2/overwrite_protection.jpg) - -These protections are only the first step; we will be building out additional capabilities around dashboard versioning and management in future versions of Grafana. - -## User preferences - -If you open side menu (by clicking on the Grafana icon in the top header) you can access your Profile Page. - -Here you can update your user details, UI Theme, and change your password. - -## Server-side Panel rendering - -Grafana now supports server-side PNG rendering. From the Panel share dialog you now have access to a link that will render a particular Panel to a PNG image. - -> **Note:** This requires that your Data Source is accessible from your Grafana instance. - -![](/static/img/docs/v2/share_dialog_image_highlight.jpg) diff --git a/docs/sources/whatsnew/whats-new-in-v2-1.md b/docs/sources/whatsnew/whats-new-in-v2-1.md deleted file mode 100644 index 043e4e4910b..00000000000 --- a/docs/sources/whatsnew/whats-new-in-v2-1.md +++ /dev/null @@ -1,155 +0,0 @@ ---- -_build: - list: false -aliases: - - /docs/grafana/latest/guides/whats-new-in-v2-1/ - - /docs/grafana/latest/whatsnew/whats-new-in-v2-1/ -description: Feature and improvement highlights for Grafana v2.1 -keywords: - - grafana - - new - - documentation - - '2.1' - - release notes -title: What's new in Grafana v2.1 -weight: -2 ---- - -# What's new in Grafana v2.1 - -Grafana 2.1 brings improvements in three core areas: dashboarding, authentication, and data sources. -As with every Grafana release, there is a whole slew of new features, enhancements, and bug fixes. - -## More Dynamic Dashboards - -The Templating system is one of the most powerful and well-used features of Grafana. -The 2.1 release brings numerous improvements that make dashboards more dynamic than ever before. - -### Multi-Value Template Variables - -A template variable with Multi-Value enabled allows for the selection of multiple values at the same time. -These variables can then be used in any Panel to make them more dynamic, and to give you the perfect view of your data. -Multi-Value variables are also enabling the new `row repeat` and `panel repeat` feature described below. - -![Multi-Value Select](/static/img/docs/v2/multi-select.gif 'Multi-Value Select') -

- -### Repeating Rows and Panels - -It’s now possible to create a dashboard that automatically adds (or removes) both rows and panels based -on selected variable values. Any row or any panel can be configured to repeat (duplicate itself) based -on a multi-value template variable.

- -![Repeating Rows and Panels](/static/img/docs/v2/panel-row-repeat.gif 'Repeating Rows and Panels') -

- -### Dashboard Links and Navigation - -To support better navigation between dashboards, it's now possible to create custom and dynamic links from individual -panels to appropriate Dashboards. You also have the ability to create flexible top-level links on any -given dashboard thanks to the new dashboard navigation bar feature. - -![Dashboard Links](/static/img/docs/v2/dash_links.png 'Dashboard Links') - -Dashboard links can be added under dashboard settings. Either defined as static URLs with a custom icon or as dynamic -dashboard links or dropdowns based on custom dashboard search query. These links appear in the same -row under the top menu where template variables appear. - ---- - -### Better local Dashboard support - -Grafana can now index Dashboards saved locally as JSON from a given directory. These file based dashboards -will appear in the regular dashboard search along regular DB dashboards. - -> **Note:** Saving local dashboards back the folder is not supported; this feature is meant for statically generated JSON dashboards. - ---- - -## New Authentication Options - -New authentication methods add numerous options to manage users, roles and organizations. - -### LDAP support - -This highly requested feature now allows your Grafana users to login with their LDAP credentials. -You can also specify mappings between LDAP group memberships and Grafana Organization user roles. - -### Basic Auth Support - -You can now authenticate against the Grafana API utilizing a simple username and password with basic HTTP authentication. - -> **Note:** This can be useful for provisioning and configuring management systems that need -> to utilize the API without having to create an API key. - -### Auth Proxy Support - -You can now authenticate utilizing a header (eg. X-Authenticated-User, or X-WEBAUTH-USER) - -> **Note:** this can be useful in situations with reverse proxies. - -### New “Read-only Editor” User Role - -There is a new User role available in this version of Grafana: “Read-only Editor”. This role behaves just -like the Viewer role does in Grafana 2.0. That is you can edit graphs and queries but not save dashboards. -The Viewer role has been modified in Grafana 2.1 so that users assigned this role can no longer edit panels. - ---- - -## Data source Improvements - -### InfluxDB 0.9 Support - -Grafana 2.1 now comes with full support for InfluxDB 0.9. There is a new query editor designed from scratch -for the new features InfluxDB 0.9 enables. - -![InfluxDB Editor](/static/img/docs/v2/influx_09_editor_anim.gif 'InfluxDB Editor') - -
- -### OpenTSDB Improvements - -Grafana OpenTSDB data source now supports template variable values queries. This means you can create -template variables that fetches the values from OpenTSDB (for example metric names, tag names, or tag values). -The query editor is also enhanced to limiting tags by metric. - -> **Note:** OpenTSDB config option tsd.core.meta.enable_realtime_ts must enabled for OpenTSDB lookup API) - -### New Data Source: KairosDB - -The Cassandra backed time series database KairosDB is now supported in Grafana out of the box. Thank you to -masaori335 for his hard work in getting it to this point. - ---- - -## Panel Improvements - -Grafana 2.1 gives you even more flexibility customizing how individual panels render. -Overriding the colors of specific series using regular expressions, changing how series stack, -and allowing string values will help you better understand your data at a glance. - -### Graph Panel - -Define series color using regex rule. This is useful when you have templated graphs with series names -that change depending selected template variables. Using a regex style override rule you could -for example make all series that contain the word **CPU** `red` and assigned to the second y axis. - -![Define series color using regex rule](/static/img/docs/v2/regex_color_override.png 'Define series color using regex rule') - -New series style override, negative-y transform and stack groups. Negative y transform is -very useful if you want to plot a series on the negative y scale without affecting the legend values like min or max or -the values shown in the hover tooltip. - -![Negative-y Transform](/static/img/docs/v2/negative-y.png 'Negative-y Transform') - -![Negative-y Transform](/static/img/docs/v2/negative-y-form.png 'Negative-y Transform') - -### Singlestat Panel - -Now support string values. Useful for time series database like InfluxDB that supports -string values. - -### Changelog - -For a detailed list and link to github issues for everything included in the 2.1 release please -view the [CHANGELOG.md](https://github.com/grafana/grafana/blob/master/CHANGELOG.md) file. diff --git a/docs/sources/whatsnew/whats-new-in-v2-5.md b/docs/sources/whatsnew/whats-new-in-v2-5.md deleted file mode 100644 index 599e1fbff75..00000000000 --- a/docs/sources/whatsnew/whats-new-in-v2-5.md +++ /dev/null @@ -1,121 +0,0 @@ ---- -_build: - list: false -aliases: - - /docs/grafana/latest/guides/whats-new-in-v2-5/ - - /docs/grafana/latest/whatsnew/whats-new-in-v2-5/ -description: Feature and improvement highlights for Grafana v2.5 -keywords: - - grafana - - new - - documentation - - '2.5' - - release notes -title: What's new in Grafana v2.5 -weight: -3 ---- - -# What's new in Grafana v2.5 - -## Release highlights - -This is an exciting release, and we want to share some of the highlights. The release includes many -fixes and enhancements to all areas of Grafana, like new Data Sources, a new and improved timepicker, user invites, panel -resize handles and improved InfluxDB and OpenTSDB support. - -### New time range controls - -New Time picker - -A new timepicker with room for more quick ranges as well as new types of relative ranges, like `Today`, -`The day so far` and `This day last week`. Also an improved time and calendar picker that now works -correctly in UTC mode. - -### Elasticsearch - -Elasticsearch example -
- -This release brings a fully featured query editor for Elasticsearch. You will now be able to visualize -logs or any kind of data stored in Elasticsearch. The query editor allows you to build both simple -and complex queries for logs or metrics. - -- Compute metrics from your documents, supported Elasticsearch aggregations: - - Count, Avg, Min, Max, Sum - - Percentiles, Std Dev, etc. -- Group by multiple terms or filters - - Specify group by options like Top 5 based on Avg @value -- Auto completion for field names -- Query only relevant indices based on time pattern -- Alias patterns for short readable series names - -Try the new Elasticsearch query editor on the [play.grafana.org](https://play.grafana.org/dashboard/db/elasticsearch-metrics) site. - -### CloudWatch - -Cloudwatch editor - -Grafana 2.5 ships with a new CloudWatch data source that will allow you to query and visualize CloudWatch -metrics directly from Grafana. - -- Rich editor with auto completion for metric names, namespaces and dimensions -- Templating queries for generic dashboards -- Alias patterns for short readable series names - -### Prometheus - -Prometheus editor - -Grafana 2.5 ships with a new Prometheus data source that will allow you to query and visualize data -stored in Prometheus. - -### Mix different data sources - -Mix data sources in the same dashboard or in the same graph! - -In previous releases you have been able to mix different data sources on the same dashboard. In v2.5 you -will be able to mix then on the same graph! You can enable this by selecting the built in `-- Mixed --` data source. -When selected this will allow you to specify data source on a per query basis. This will, for example, allow you -to plot metrics from different Graphite servers on the same Graph or plot data from Elasticsearch alongside -data from Prometheus. Mixing different data sources on the same graph works for any data source, even custom ones. - -### Panel Resize handles - - - -This release adds resize handles to the bottom right corners of panels making it easy to resize both width and height. - -### User invites - - - -This version also brings some new features for user management. - -- Organization admins can now invite new users (via email or manually via invite link) -- Users can signup using invite link and get automatically added to invited organization -- User signup workflow can (if enabled) contain an email verification step. -- Check out [#2353](https://github.com/grafana/grafana/issues/2353) for more info. - -### Miscellaneous improvements - -- InfluxDB query editor now supports math and AS expressions -- InfluxDB query editor now supports custom group by interval -- Panel drilldown link is easier to reach -- LDAP improvements (can now search for group membership if your LDAP server does not support memberOf attribute) -- More units for graph and singlestat panel (Length, Volume, Temperature, Pressure, Currency) -- Admin page for all organizations (remove / edit) - -### Breaking changes - -There have been some changes to the data source plugin API. If you are using a custom plugin check that there is an update for it before you upgrade. Also -the new time picker does not currently support custom quick ranges like the last one did. This will likely be added in a -future release. - -### Changelog - -For a detailed list and link to github issues for everything included in the 2.5 release please -view the [CHANGELOG.md](https://github.com/grafana/grafana/blob/master/CHANGELOG.md) file. - ---- - -### Download Grafana 2.5 now diff --git a/docs/sources/whatsnew/whats-new-in-v2-6.md b/docs/sources/whatsnew/whats-new-in-v2-6.md deleted file mode 100644 index 880ee853bf5..00000000000 --- a/docs/sources/whatsnew/whats-new-in-v2-6.md +++ /dev/null @@ -1,144 +0,0 @@ ---- -_build: - list: false -aliases: - - /docs/grafana/latest/guides/whats-new-in-v2-6/ - - /docs/grafana/latest/whatsnew/whats-new-in-v2-6/ -description: Feature and improvement highlights for Grafana v2.6 -keywords: - - grafana - - new - - documentation - - '2.6' - - release notes -title: What's new in Grafana v2.6 -weight: -4 ---- - -# What's new in Grafana v2.6 - -## Release highlights - -The release includes a new Table panel, a new InfluxDB query editor, support for Elasticsearch Pipeline Metrics and -support for multiple Cloudwatch credentials. - -## Table Panel - - - -The new table panel is very flexible, supporting both multiple modes for time series as well as for -table, annotation and raw JSON data. It also provides date formatting and value formatting and coloring options. - -### Time series to rows - -In the most simple mode you can turn time series to rows. This means you get a `Time`, `Metric` and a `Value` column. -Where `Metric` is the name of the time series. - - - -### Table Transform - -Above you see the options tab for the **Table Panel**. The most important option is the `To Table Transform`. -This option controls how the result of the metric/data query is turned into a table. - -### Column Styles - -The column styles allow you control how dates and numbers are formatted. - -### Time series to columns - -This transform allows you to take multiple time series and group them by time. Which will result in a `Time` column -and a column for each time series. - - - -In the screenshot above you can see how the same time series query as in the previous example can be transformed into -a different table by changing the `To Table Transform` to `Time series to columns`. - -### Time series to aggregations - -This transform works very similar to the legend values in the Graph panel. Each series gets its own row. In the Options -tab you can select which aggregations you want using the plus button the Columns section. - - - -You have to think about how accurate the aggregations will be. It depends on what aggregation is used in the time series query, -how many data points are fetched, etc. The time series aggregations are calculated by Grafana after aggregation is performed -by the time series database. - -### Raw logs queries - -If you want to show documents from Elasticsearch pick `Raw Document` as the first metric. - - - -This in combination with the `JSON Data` table transform will allow you to pick which fields in the document -you want to show in the table. - - - -### Elasticsearch aggregations - -You can also make Elasticsearch aggregation queries without a `Date Histogram`. This allows you to -use Elasticsearch metric aggregations to get accurate aggregations for the selected time range. - - - -### Annotations - -The table can also show any annotations you have enabled in the dashboard. - - - -## The New InfluxDB Editor - -The new InfluxDB editor is a lot more flexible and powerful. It supports nested functions, like `derivative`. -It also uses the same technique as the Graphite query editor in that it presents nested functions as chain of function -transformations. It tries to simplify and unify the complicated nature of InfluxDB's query language. - - - -In the `SELECT` row you can specify what fields and functions you want to use. If you have a -group by time you need an aggregation function. Some functions like derivative require an aggregation function. - -The editor tries simplify and unify this part of the query. For example: - -![](/static/img/docs/influxdb/select_editor.png) - -The above will generate the following InfluxDB `SELECT` clause: - -```sql -SELECT derivative(mean("value"), 10s) /10 AS "REQ/s" FROM .... -``` - -### Select multiple fields - -Use the plus button and select Field > field to add another SELECT clause. You can also -specify an asterix `*` to select all fields. - -### Group By - -To group by a tag click the plus icon at the end of the GROUP BY row. Pick a tag from the dropdown that appears. -You can remove the group by by clicking on the `tag` and then click on the x icon. - -The new editor also allows you to remove group by time and select `raw` table data. Which is very useful -in combination with the new Table panel to show raw log data stored in InfluxDB. - - - -## Pipeline metrics - -If you have Elasticsearch 2.x and Grafana 2.6 or above then you can use pipeline metric aggregations like -**Moving Average** and **Derivative**. Elasticsearch pipeline metrics require another metric to be based on. Use the eye icon next to the metric -to hide metrics from appearing in the graph. - -![](/static/img/docs/elasticsearch/pipeline_metrics_editor.png) - -## Changelog - -For a detailed list and link to github issues for everything included in the 2.6 release please -view the [CHANGELOG.md](https://github.com/grafana/grafana/blob/master/CHANGELOG.md) file. - ---- - -Download Grafana 2.6 now diff --git a/docs/sources/whatsnew/whats-new-in-v3-0.md b/docs/sources/whatsnew/whats-new-in-v3-0.md deleted file mode 100644 index a0bb5c8a1e9..00000000000 --- a/docs/sources/whatsnew/whats-new-in-v3-0.md +++ /dev/null @@ -1,236 +0,0 @@ ---- -_build: - list: false -aliases: - - /docs/grafana/latest/guides/whats-new-in-v3/ - - /docs/grafana/latest/whatsnew/whats-new-in-v3-0/ -description: Feature and improvement highlights for Grafana v3.0 -keywords: - - grafana - - new - - documentation - - '3.0' - - release notes -title: What's new in Grafana v3.0 -weight: -5 ---- - -# What's new in Grafana v3.0 - -## Commercial Support - -Commercial Support subscriptions for Grafana are now [generally available](https://grafana.com/support/plans/). - -Raintank is committed to a 100% open-source strategy for Grafana. We -do not want to go down the “open core” route. If your organization -finds Grafana valuable, please consider purchasing a subscription. Get -direct support, bug fixes, and training from the core Grafana team. - -## Plugins - -With the popularity of Grafana continuing to accelerate, it has been -challenging to keep up with all the requests for new features, new -panels, new data sources, and new functionality. Saying “no” so often -has been frustrating, especially for an open source project with such -a vibrant community. - -The team felt that it was time to dramatically improve extensibility -through plugin support. Grafana 3.0 comes with a completely revamped -plugin SDK / API. - -We’ve refactored our **Data Source** plugin architecture and added -two new plugin types: - -- **Panel** plugins let you add new panel types for your Dashboards. -- **App** plugins bundle **Panels** plugins, **Data Sources** plugins, - Dashboards, and Grafana **Pages**. Apps are a great way to provide an - entire experience right within Grafana. - -## Grafana.com - - - -[Grafana.com](https://grafana.com) offers a central repository where the community can come together to discover, create and -share plugins (data sources, panels, apps) and dashboards. - -We are also working on a hosted Graphite-compatible data source that will be optimized for use with Grafana. -It’ll be easy to combine your existing data source(s) with this OpenSaaS option. Finally, Grafana.com can -also be a hub to manage all your Grafana instances. You’ll be able to monitor their health and availability, -perform dashboard backups, and more. - -We are also working on a hosted Graphite-compatible Data Source that -will be optimized for use with Grafana. It’ll be easy to combine your -existing Data Source(s) with this OpenSaaS option. - -Finally, Grafana.com will also be a hub to manage all your Grafana -instances. You’ll be able to monitor their health and availability, -perform Dashboard backups, and more. - -Grafana.net will officially launch along with the stable version of -Grafana 3.0, but go to and check out the preview -and sign up for an account in the meantime. - -## grafana-cli - -Grafana 3.0 comes with a new command line tool called grafana-cli. You -can easily install plugins from Grafana.net with it. For -example: - -``` -grafana-cli install grafana-pie-chart-panel -``` - -## Personalization and Preferences - -The home dashboard, timezone and theme can now be customized on Organization -and user Profile level. Grafana can also track recently viewed dashboards, which -can then be displayed in the dashboard list panel. - -## Improved Playlists - -You can now save Playlists, and start them by using a Playlist URL. If -you update a running Playlist, it will update after its next cycle. - -This is powerful as it allows you to remote control Grafana. If you -have a big TV display showing Grafana in your company lobby, create a -playlist named Lobby, and start it on the computer connected to the -Lobby TV. - -You can now change the Lobby playlist and have the dashboards shown in -the Lobby update accordingly, automatically. - -The playlist does not even have to contain multiple Dashboards; you -can use this feature to reload the whole Dashboard (and Grafana) -periodically and remotely. - -You can also make Playlists dynamic by using Dashboard **tags** to -define the Playlist. - - - -## Improved UI - -We’ve always tried to focus on a good looking, usable, and responsive -UI. We’ve continued to pay a lot of attention to these areas in this -release. - -Grafana 3.0 has a dramatically updated UI that not only looks better -but also has a number of usability improvements. The side menu now -works as a dropdown that you can pin to the side. The Organization / -Profile / Sign out side menu links have been combined into an on hover -slide out menu. - -In addition, all the forms and the layouts of all pages have been -updated to look and flow better, and be much more consistent. There -are literally hundreds of UI improvements and refinements. - -Here’s the new side menu in action: - - - -And here's the new look for Dashboard settings: - - - -Check out the Play -Site to get a feel for some of the UI changes. - -## Improved Annotations - -It is now possible to define a link in each annotation. You can hover -over the link and click the annotation text. This feature is very -useful for linking to particular commits or tickets where more -detailed information can be presented to the user. - - - -## Data source variables - -This has been a top requested feature for very long we are excited to finally provide -this feature. You can now add a new `Data source` type variable. That will -automatically be filled with instance names of your data sources. - - - -You can then use this variable as the panel data source: - - - -This will allow you to quickly change data source server and reuse the -same dashboard for different instances of your metrics backend. For example -you might have Graphite running in multiple data centers or environments. - -## Prometheus, InfluxDB, and OpenTSDB improvements - -All three of these popular included Data Sources have seen a variety -of improvements in this release. Here are some highlights: - -### Prometheus - -The Prometheus Data Source now supports annotations. - -### InfluxDB - -You can now select the InfluxDB policy from the query editor. - - -Grafana 3.0 also comes with support for InfluxDB 0.11 and InfluxDB 0.12. - -### OpenTSDB - -OpenTSDB 2.2 is better supported and now supports millisecond precision. - -## Breaking changes - -Dashboards from v2.6 are compatible; no manual updates should be necessary. There could -be some edge case scenarios where dashboards using templating could stop working. -If that is the case just enter the edit view for the template variable and hit Update button. -This is due to a simplification of the variable format system where template variables are -now stored without any formatting (glob/regex/etc), this is done on the fly when the -variable is interpolated. - -- Plugin API: The plugin API has changed so if you are using a custom - data source (or panel) they need to be updated as well. - -- InfluxDB 0.8: This data source is no longer included in releases, - you can still install manually from [Grafana.com](https://grafana.com) - -- KairosDB: This data source has also no longer shipped with Grafana, - you can install it manually from [Grafana.com](https://grafana.com) - -## Plugin showcase - -Discovering and installing plugins is very quick and easy with Grafana 3.0 and [Grafana.com](https://grafana.com). Here -are a couple that I encourage you try! - -#### [Clock Panel](https://grafana.com/plugins/grafana-clock-panel) - -Support's both current time and count down mode. - - -#### [Pie Chart Panel](https://grafana.com/plugins/grafana-piechart-panel) - -A simple pie chart panel is now available as an external plugin. - - -#### [WorldPing App](https://grafana.com/plugins/raintank-worldping-app) - -This is full blown Grafana App that adds new panels, data sources and pages to give -feature rich global performance monitoring directly from your on-prem Grafana. - - - -#### [Zabbix App](https://grafana.com/plugins/alexanderzobnin-zabbix-app) - -This app contains the already very pouplar Zabbix data source plugin, 2 dashboards and a triggers panel. It is -created and maintained by [Alexander Zobnin](https://github.com/alexanderzobnin/grafana-zabbix). - - - -Check out the full list of plugins on [Grafana.com](https://grafana.com/plugins) - -## CHANGELOG - -For a detailed list and link to github issues for everything included -in the 3.0 release please view the -[CHANGELOG.md](https://github.com/grafana/grafana/blob/master/CHANGELOG.md) file. diff --git a/docs/sources/whatsnew/whats-new-in-v3-1.md b/docs/sources/whatsnew/whats-new-in-v3-1.md deleted file mode 100644 index ba9c07cfbf7..00000000000 --- a/docs/sources/whatsnew/whats-new-in-v3-1.md +++ /dev/null @@ -1,68 +0,0 @@ ---- -_build: - list: false -aliases: - - /docs/grafana/latest/guides/whats-new-in-v3-1/ - - /docs/grafana/latest/whatsnew/whats-new-in-v3-1/ -description: Feature and improvement highlights for Grafana v3.1 -keywords: - - grafana - - new - - documentation - - '3.1' - - release notes -title: What's new in Grafana v3.1 -weight: -6 ---- - -# What's new in Grafana v3.1 - -## Dashboard Export and Import - -The export feature is now accessed from the share menu. - - - -Dashboards exported from Grafana 3.1 are now more portable and easier for others to import than before. The export process extracts information data source types used by panels and adds these to a new `inputs` section in the dashboard json. So when you or another person tries to import the dashboard they will be asked to select data source and optional metric prefix options. - - - -The above screenshot shows the new import modal that gives you 3 options for how to import a dashboard. One notable new addition here is the ability to import directly from Dashboards shared on [Grafana.com](https://grafana.com). - -The next step in the import process: - - - -Here you can change the name of the dashboard and also pick what data sources you want the dashboard to use. The above screenshot shows a CollectD dashboard for Graphite that requires a metric prefix be specified. - -## Discover Dashboards - -On [Grafana.com](https://grafana.com) you can now browse and search for dashboards. We have already added a few but more are being uploaded every day. To import a dashboard just copy the dashboard URL and head back to Grafana, then Dashboard Search -> Import -> Paste Grafana.com Dashboard URL. - - - -## Constant template variables - -We added a new template variable named constant that makes it easier to share and export dashboard that have custom prefixes. - -## Dashboard URLs - -Having current time range and template variable value always sync with the URL makes it possible to always copy your current Grafana URL to share with a colleague without having to use the Share modal. - -## Internal metrics - -Do you want metrics about viewing metrics? Of course you do! In this release we added support for sending metrics about Grafana to graphite. You can configure interval and server in the config file. - -## Logging - -Switched logging framework to log15 to enable key value per logging and filtering based on different log levels. It's now possible to configure different log levels for different modules. - -### Breaking changes - -- **Logging** format have been changed to improve log filtering. -- **Graphite PNG** Graphite PNG support dropped from Graph panel (use Grafana native PNG instead). -- **Migration** No longer possible to migrate dashboards from 1.x (Stored in ES or Influx 0.8). - -## CHANGELOG - -For a detailed list and link to github issues for everything included in the 3.1 release please view the [CHANGELOG.md](https://github.com/grafana/grafana/blob/master/CHANGELOG.md) file. diff --git a/docs/sources/whatsnew/whats-new-in-v4-0.md b/docs/sources/whatsnew/whats-new-in-v4-0.md deleted file mode 100644 index 8d175914f15..00000000000 --- a/docs/sources/whatsnew/whats-new-in-v4-0.md +++ /dev/null @@ -1,184 +0,0 @@ ---- -_build: - list: false -aliases: - - /docs/grafana/latest/guides/whats-new-in-v4/ - - /docs/grafana/latest/whatsnew/whats-new-in-v4-0/ -description: Feature and improvement highlights for Grafana v4.0 -keywords: - - grafana - - new - - documentation - - '4.0' - - release notes -title: What's new in Grafana v4.0 -weight: -7 ---- - -# What's new in Grafana v4.0 - -As usual this release contains a ton of minor new features, fixes and improved UX. But on top of the usual new goodies -is a core new feature: Alerting! Read on below for a detailed description of what's new in v4.0. - -## Alerting - -{{< figure class="float-right" max-width="40%" src="/static/img/docs/v4/drag_handles_gif.gif" caption="Alerting overview" >}} - -Alerting is a really revolutionary feature for Grafana. It transforms Grafana from a -visualization tool into a truly mission critical monitoring tool. The alert rules are very easy to -configure using your existing graph panels and threshold levels can be set simply by dragging handles to -the right side of the graph. The rules will continually be evaluated by grafana-server and -notifications will be sent out when the rule conditions are met. - -This feature has been worked on for over a year with many iterations and rewrites -just to make sure the foundations are really solid. We are really proud to finally release it! -Since the alerting execution is processed in the backend not all data source plugins are supported. -Right now Graphite, Prometheus, InfluxDB and OpenTSDB are supported. Elasticsearch is being worked -on but will be not ready for v4 release. - -
- -### Rules - -{{< figure class="float-right" max-width="40%" src="/static/img/docs/v4/alerting_conditions.png" caption="Alerting Conditions" >}} - -The rule configuration allows you to specify a name, how often the rule should be evaluated and a series -of conditions that all need to be true for the alert to fire. - -Currently the only condition type that exists is a `Query` condition that allows you to -specify a query letter, time range and an aggregation function. The letter refers to -a query you already have added in the **Metrics** tab. The result from the -query and the aggregation function is a single value that is then used in the threshold check. - -We plan to add other condition types in the future, like `Other Alert`, where you can include the state -of another alert in your conditions, and `Time Of Day`. - -### Notifications - -{{< figure class="float-right" max-width="40%" src="/static/img/docs/v4/slack_notification.png" caption="Alerting Slack Notification" >}} - -Alerting would not be very useful if there was no way to send notifications when rules trigger and change state. You -can set up notifications of different types. We currently have `Slack`, `PagerDuty`, `Email` and `Webhook` with more in the -pipe that will be added during beta period. The notifications can then be added to your alert rules. -If you have configured an external image store in the grafana.ini config file (s3, webdav, and azure_blob options available) -you can get very rich notifications with an image of the graph and the metric -values all included in the notification. - -### Annotations - -Alert state changes are recorded in a new annotation store that is built into Grafana. This store -currently only supports storing annotations in Grafana's own internal database (mysql, postgres or sqlite). -The Grafana annotation storage is currently only used for alert state changes but we hope to add the ability for users -to add graph comments in the form of annotations directly from within Grafana in a future release. - -### Alert List Panel - -{{< figure class="float-right" max-width="30%" src="/static/img/docs/v4/alert_list_panel.png" caption="Alert List Panel" >}} - -This new panel allows you to show alert rules or a history of alert rule state changes. You can filter based on states you are -interested in. This panel is very useful for overview style dashboards. - -
- -## Ad-hoc filter variable - -{{< figure class="float-right" max-width="30%" src="/static/img/docs/v4/adhoc_filters.gif" caption="Ad-hoc filters variable" >}} - -This is a new and very different type of template variable. It will allow you to create new key/value filters on the fly -with autocomplete for both key and values. The filter condition will be automatically applied to all -queries that use that data source. This feature opens up more exploratory dashboards. In the gif animation to the right -you have a dashboard for Elasticsearch log data. It uses one query variable that allow you to quickly change how the data -is grouped, and an interval variable for controlling the granularity of the time buckets. What was missing -was a way to dynamically apply filters to the log query. With the `Ad-Hoc Filters` variable you can -dynamically add filters to any log property! - -## UX Improvements - -We always try to bring some UX/UI refinements and polish in every release. - -### TV-mode and Kiosk mode - -
-
-

- Grafana is so often used on wall mounted TVs that we figured a clean TV mode would be - really nice. In TV mode the top navbar, row and panel controls will all fade to transparent. -

- -

- This happens automatically after one minute of user inactivity but can also be toggled manually - with the d v sequence shortcut. Any mouse movement or keyboard action will - restore navbar and controls. -

- -

- Another feature is the kiosk mode. This can be enabled with d k - shortcut or by adding &kiosk to the URL when you load a dashboard. - In kiosk mode the navbar is completely hidden/removed from view. -

- -
-
- {{< figure src="/static/img/docs/v4/tvmode.png" caption="TV mode" >}} - -
-
- -### New row menu and add panel experience - -{{< figure class="float-right" max-width="50%" src="/static/img/docs/v4/add_panel.gif" caption="Add Panel flow" >}} - -We spent a lot of time improving the dashboard building experience to make it both -more efficient and easier for beginners. After many good but not great experiments -with a `build mode` we eventually decided to just improve the green row menu and -continue work on a `build mode` for a future release. - -The new row menu automatically slides out when you mouse over the edge of the row. You no longer need -to hover over the small green icon and then click it to expand the row menu. - -There are some minor improvements to drag and drop behavior. Now when dragging a panel from one row -to another you will insert the panel and Grafana will automatically make room for it. -When you drag a panel within a row you will simply reorder the panels. - -If you look at the animation to the right you can see that you can drag and drop a new panel. This is not -required, you can also just click the panel type and it will be inserted at the end of the row -automatically. Dragging a new panel has an advantage in that you can insert a new panel where ever you want -not just at the end of the row. - -We plan to further improve dashboard building in the future with a more rich grid and layout system. - -### Keyboard shortcuts - -{{< figure class="float-right" max-width="40%" src="/static/img/docs/v4/shortcuts.png" caption="Shortcuts" >}} - -Grafana v4 introduces a number of really powerful keyboard shortcuts. You can now focus a panel -by hovering over it with your mouse. With a panel focused you can simply hit `e` to toggle panel -edit mode, or `v` to toggle fullscreen mode. `p r` removes the panel. `p s` opens share -modal. - -Some nice navigation shortcuts are: - -- `g h` for go to home dashboard -- `s s` open search with starred pre-selected -- `s t` open search in tags list view - -
- -## Upgrade and Breaking changes - -There are no breaking changes. Old dashboards and features should work the same. Grafana-server will automatically upgrade its db -schema on restart. It's advisable to do a backup of Grafana's database before updating. - -If you are using plugins make sure to update your plugins as some might not work perfectly v4. - -You can update plugins using grafana-cli - - grafana-cli plugins update-all - -## Changelog - -Check out the [CHANGELOG.md](https://github.com/grafana/grafana/blob/master/CHANGELOG.md) file for a complete list -of new features, changes, and bug fixes. diff --git a/docs/sources/whatsnew/whats-new-in-v4-1.md b/docs/sources/whatsnew/whats-new-in-v4-1.md deleted file mode 100644 index da51f5185bb..00000000000 --- a/docs/sources/whatsnew/whats-new-in-v4-1.md +++ /dev/null @@ -1,77 +0,0 @@ ---- -_build: - list: false -aliases: - - /docs/grafana/latest/guides/whats-new-in-v4-1/ - - /docs/grafana/latest/whatsnew/whats-new-in-v4-1/ -description: Feature and improvement highlights for Grafana v4.1 -keywords: - - grafana - - new - - documentation - - 4.1.0 - - release notes -title: What's new in Grafana v4.1 -weight: -8 ---- - -## What's new in Grafana v4.1 - -- **Graph**: Support for shared tooltip on all graphs as you hover over one graph. [#1578](https://github.com/grafana/grafana/pull/1578), [#6274](https://github.com/grafana/grafana/pull/6274) -- **Victorops**: Add VictorOps notification integration [#6411](https://github.com/grafana/grafana/issues/6411), thx [@ichekrygin](https://github.com/ichekrygin) -- **Opsgenie**: Add OpsGenie notification integratiion [#6687](https://github.com/grafana/grafana/issues/6687), thx [@kylemcc](https://github.com/kylemcc) -- **Cloudwatch**: Make it possible to specify access and secret key on the data source configuration page [#6697](https://github.com/grafana/grafana/issues/6697) -- **Elasticsearch**: Added support for Elasticsearch 5.x [#5740](https://github.com/grafana/grafana/issues/5740), thx [@lpic10](https://github.com/lpic10) -- **Panel**: Added help text for panels. [#4079](https://github.com/grafana/grafana/issues/4079), thx [@utkarshcmu](https://github.com/utkarshcmu) -- [Full changelog](https://github.com/grafana/grafana/blob/master/CHANGELOG.md) - -### Shared tooltip - -{{< figure class="float-right" max-width="60%" src="/static/img/docs/v41/shared_tooltip.gif" caption="Shared tooltip" >}} - -Showing the tooltip on all panels at the same time has been a long standing request in Grafana and we are really happy to finally be able to release it. -You can enable/disable the shared tooltip from the dashboard settings menu or cycle between default, shared tooltip and shared crosshair by pressing Ctrl/Cmd+O. - -
- -### Help text for panel - -{{< figure class="float-right" max-width="60%" src="/static/img/docs/v41/helptext_for_panel_settings.png" caption="Hovering help text" >}} - -You can set a help text in the general tab on any panel. The help text is using Markdown to enable better formatting and linking to other sites that can provide more information. - -
- -{{< figure class="float-right" max-width="60%" src="/static/img/docs/v41/helptext_hover.png" caption="Hovering help text" >}} - -Panels with a help text available have a little indicator in the top left corner. You can show the help text by hovering the icon. - -
- -### Easier Cloudwatch configuration - -{{< figure class="float-right" max-width="60%" src="/static/img/docs/v41/cloudwatch_settings.png" caption="Cloudwatch configuration" >}} - -In Grafana 4.1.0 you can configure your Cloudwatch data source with `access key` and `secret key` directly in the data source configuration page. -This enables people to use the Cloudwatch data source without having access to the filesystem where Grafana is running. - -Once the `access key` and `secret key` have been saved the user will no longer be able to view them. - -
- -## Upgrade and Breaking changes - -Elasticsearch 1.x is no longer supported. Please upgrade to Elasticsearch 2.x or 5.x. Otherwise Grafana 4.1.0 contains no breaking changes. - -## Changelog - -Check out the [CHANGELOG.md](https://github.com/grafana/grafana/blob/master/CHANGELOG.md) file for a complete list -of new features, changes, and bug fixes. - -## Download - -Head to [v4.1 download page](/download/4_1_0/) for download links and instructions. - -## Thanks - -A big thanks to all the Grafana users who contribute by submitting PRs, bug reports and feedback! diff --git a/docs/sources/whatsnew/whats-new-in-v4-2.md b/docs/sources/whatsnew/whats-new-in-v4-2.md deleted file mode 100644 index f9edad7d673..00000000000 --- a/docs/sources/whatsnew/whats-new-in-v4-2.md +++ /dev/null @@ -1,93 +0,0 @@ ---- -_build: - list: false -aliases: - - /docs/grafana/latest/guides/whats-new-in-v4-2/ - - /docs/grafana/latest/whatsnew/whats-new-in-v4-2/ -description: Feature and improvement highlights for Grafana v4.2 -keywords: - - grafana - - new - - documentation - - 4.2.0 - - release notes -title: What's new in Grafana v4.2 -weight: -9 ---- - -## What's new in Grafana v4.2 - -Grafana v4.2 Beta is now [available for download](https://grafana.com/grafana/download/4.2.0). -Just like the last release this one contains lots bug fixes and minor improvements. -We are very happy to say that 27 of 40 issues was closed by pull requests from the community. -Big thumbs up! - -## Release Highlights - -- **Hipchat**: Adds support for sending alert notifications to hipchat [#6451](https://github.com/grafana/grafana/issues/6451), thx [@jregovic](https://github.com/jregovic) -- **Telegram**: Added Telegram alert notifier [#7098](https://github.com/grafana/grafana/pull/7098), thx [@leonoff](https://github.com/leonoff) -- **LINE**: Add LINE as alerting notification channel [#7301](https://github.com/grafana/grafana/pull/7301), thx [@huydx](https://github.com/huydx) -- **Templating**: Make $**interval and $**interval_ms global built in variables that can be used in by any data source (in panel queries), closes [#7190](https://github.com/grafana/grafana/issues/7190), closes [#6582](https://github.com/grafana/grafana/issues/6582) -- **Alerting**: Adds deduping of alert notifications [#7632](https://github.com/grafana/grafana/pull/7632) -- **Alerting**: Better information about why an alert triggered [#7035](https://github.com/grafana/grafana/issues/7035) -- **Orgs**: Sharing dashboards using Grafana share feature will now redirect to correct org. [#6948](https://github.com/grafana/grafana/issues/6948) -- [Full changelog](https://github.com/grafana/grafana/blob/master/CHANGELOG.md) - -### New alert notification channels - -This release adds **five** new alert notifications channels, all of them contributed by the community. - -- Hipchat -- Telegram -- LINE -- Pushover -- Threema - -### Templating - -We added two new global built in variables in grafana. `$__interval` and `$__interval_ms` are now reserved template names in grafana and can be used by any data source. -We might add more global built in variables in the future and if we do we will prefix them with `$__`. So please avoid using that in your template variables. - -### Dedupe alert notifications when running multiple servers - -In this release we will dedupe alert notifications when you are running multiple servers. -This makes it possible to run alerting on multiple servers and only get one notification. - -We currently solve this with sql transactions which puts some limitations for how many servers you can use to execute the same rules. -3-5 servers should not be a problem but as always, it depends on how many alerts you have and how frequently they execute. - -Next up for a better HA situation is to add support for workload balancing between Grafana servers. - -### Alerting more info - -You can now see the reason why an alert triggered in the alert history. Its also easier to detect when an alert is set to `alerting` due to the `no_data` option. - -### Improved support for multi-org setup - -When loading dashboards we now set an query parameter called orgId. So we can detect from which org an user shared a dashboard. -This makes it possible for users to share dashboards between orgs without changing org first. - -We aim to introduce [dashboard groups](https://github.com/grafana/grafana/issues/1611) sometime in the future which will introduce access control and user groups within one org. -Making it possible to have users in multiple groups and have detailed access control. - -## Upgrade and Breaking changes - -If you're using HTTPS in grafana we now force you to use TLS 1.2 and the most secure ciphers. -We think its better to be secure by default rather then making it configurable. -If you want to run HTTPS with lower versions of TLS we suggest you put a reserve proxy in front of grafana. - -If you have template variables name `$__interval` or `$__interval_ms` they will no longer work since these keywords -are reserved as global built in variables. We might add more global built in variables in the future and if we do, we will prefix them with `$__`. So please avoid using that in your template variables. - -## Changelog - -Check out the [CHANGELOG.md](https://github.com/grafana/grafana/blob/master/CHANGELOG.md) file for a complete list -of new features, changes, and bug fixes. - -## Download - -Head to [v4.2-beta download page](/download/4_2_0/) for download links and instructions. - -## Thanks - -A big thanks to all the Grafana users who contribute by submitting PRs, bug reports and feedback! diff --git a/docs/sources/whatsnew/whats-new-in-v4-3.md b/docs/sources/whatsnew/whats-new-in-v4-3.md deleted file mode 100644 index 00ce76ba40c..00000000000 --- a/docs/sources/whatsnew/whats-new-in-v4-3.md +++ /dev/null @@ -1,109 +0,0 @@ ---- -_build: - list: false -aliases: - - /docs/grafana/latest/guides/whats-new-in-v4-3/ - - /docs/grafana/latest/whatsnew/whats-new-in-v4-3/ -description: Feature and improvement highlights for Grafana v4.3 -keywords: - - grafana - - new - - documentation - - 4.3.0 - - release notes -title: What's new in Grafana v4.3 -weight: -10 ---- - -## What's new in Grafana v4.3 - -Grafana v4.3 Beta is now [available for download](https://grafana.com/grafana/download/4.3.0-beta1). - -## Release Highlights - -- New [Heatmap Panel](http://docs.grafana.org/features/panels/heatmap/) -- Graph Panel Histogram Mode -- Elasticsearch Histogram Aggregation -- Prometheus Table data format -- New [MySQL Data Source](http://docs.grafana.org/features/datasources/mysql/) (alpha version to get some early feedback) -- 60+ small fixes and improvements, most of them contributed by our fantastic community! - -Check out the [New Features in v4.3 Dashboard](https://play.grafana.org/dashboard/db/new-features-in-v4-3?orgId=1) on the Grafana Play site for a showcase of these new features. - -## Histogram Support - -A Histogram is a kind of bar chart that groups numbers into ranges, often called buckets or bins. Taller bars show that more data falls in that range. - -The Graph Panel now supports Histograms. - -![](/static/img/docs/v43/heatmap_histogram.png) - -## Histogram Aggregation Support for Elasticsearch - -Elasticsearch is the only supported data source that can return pre-bucketed data (data that is already grouped into ranges). With other data sources there is a risk of returning inaccurate data in a histogram due to using already aggregated data rather than raw data. This release adds support for Elasticsearch pre-bucketed data that can be visualized with the new [Heatmap Panel](http://docs.grafana.org/features/panels/heatmap/). - -## Heatmap Panel - -The Histogram support in the Graph Panel does not show changes over time - it aggregates all the data together for the chosen time range. To visualize a histogram over time, we have built a new [Heatmap Panel](http://docs.grafana.org/features/panels/heatmap/). - -Every column in a Heatmap is a histogram snapshot. Instead of visualizing higher values with higher bars, a heatmap visualizes higher values with color. The histogram shown above is equivalent to one column in the heatmap shown below. - -![](/static/img/docs/v43/heatmap_histogram_over_time.png) - -The Heatmap panel also works with Elasticsearch Histogram Aggregations for more accurate server side bucketing. - -![](/assets/img/blog/v4/elastic_heatmap.jpg) - -## MySQL Data Source (alpha) - -This release includes a [new core data source for MySQL](http://docs.grafana.org/features/datasources/mysql/). You can write any possible MySQL query and format it as either Time Series or Table Data allowing it be used with the Graph Panel, Table Panel and SingleStat Panel. - -We are still working on the MySQL data source. As it's missing some important features, like templating and macros and future changes could be breaking, we are -labeling the state of the data source as Alpha. Instead of holding up the release of v4.3 we are including it in its current shape to get some early feedback. So please try it out and let us know what you think on [twitter](https://twitter.com/intent/tweet?text=.%40grafana&source=4_3_beta_blog&related=blog) or on our [community forum](https://community.grafana.com/c/releases). Is this a feature that you would use? How can we make it better? - -**The query editor can show the generated and interpolated SQL that is sent to the MySQL server.** - -![](/static/img/docs/v43/mysql_table_query.png) - -**The query editor will also show any errors that resulted from running the query (very useful when you have a syntax error!).** - -![](/static/img/docs/v43/mysql_query_error.png) - -## Health Check Endpoint - -Now you can monitor the monitoring with the Health Check Endpoint! The new `/api/health` endpoint returns HTTP 200 OK if everything is up and HTTP 503 Error if the Grafana database cannot be pinged. - -## Lazy Load Panels - -Grafana now delays loading panels until they become visible (scrolled into view). This means panels out of view are not sending requests thereby reducing the load on your time series database. - -## Prometheus - Table Data (column per label) - -The Prometheus data source now supports the Table Data format by automatically assigning a column to a label. This makes it really easy to browse data in the table panel. - -![](/static/img/docs/v43/prom_table_cols_as_labels.png) - -## Other Highlights From The Changelog - -Changes: - -- **Table**: Support to change column header text [#3551](https://github.com/grafana/grafana/issues/3551) -- **InfluxDB**: influxdb query builder support for ORDER BY and LIMIT (allows TOPN queries) [#6065](https://github.com/grafana/grafana/issues/6065) Support influxdb's SLIMIT Feature [#7232](https://github.com/grafana/grafana/issues/7232) thx [@thuck](https://github.com/thuck) -- **Graph**: Support auto grid min/max when using log scale [#3090](https://github.com/grafana/grafana/issues/3090), thx [@bigbenhur](https://github.com/bigbenhur) -- **Prometheus**: Make Prometheus query field a textarea [#7663](https://github.com/grafana/grafana/issues/7663), thx [@hagen1778](https://github.com/hagen1778) -- **Server**: Support listening on a Unix socket [#4030](https://github.com/grafana/grafana/issues/4030), thx [@mitjaziv](https://github.com/mitjaziv) - -Fixes: - -- **MySQL**: 4-byte UTF8 not supported when using MySQL database (allows Emojis in Dashboard Names) [#7958](https://github.com/grafana/grafana/issues/7958) -- **Dashboard**: Description tooltip is not fully displayed [#7970](https://github.com/grafana/grafana/issues/7970) - -Lots more enhancements and fixes can be found in the [Changelog](https://github.com/grafana/grafana/blob/master/CHANGELOG.md). - -## Download - -Head to the [v4.3 download page](https://grafana.com/grafana/download) for download links and instructions. - -## Thanks - -A big thanks to all the Grafana users who contribute by submitting PRs, bug reports, helping out on our [community site](https://community.grafana.com/) and providing feedback! diff --git a/docs/sources/whatsnew/whats-new-in-v4-4.md b/docs/sources/whatsnew/whats-new-in-v4-4.md deleted file mode 100644 index e2bc015c9ae..00000000000 --- a/docs/sources/whatsnew/whats-new-in-v4-4.md +++ /dev/null @@ -1,55 +0,0 @@ ---- -_build: - list: false -aliases: - - /docs/grafana/latest/guides/whats-new-in-v4-4/ - - /docs/grafana/latest/whatsnew/whats-new-in-v4-4/ -description: Feature and improvement highlights for Grafana v4.4 -keywords: - - grafana - - new - - documentation - - 4.4.0 - - release notes -title: What's new in Grafana v4.4 -weight: -11 ---- - -## What's new in Grafana v4.4 - -Grafana v4.4 is now [available for download](https://grafana.com/grafana/download/4.4.0). - -**Highlights**: - -- Dashboard History - version control for dashboards. - -## New Features - -**Dashboard History**: View dashboard version history, compare any two versions (summary and json diffs), restore to old version. This big feature -was contributed by **Walmart Labs**. Big thanks to them for this massive contribution! -Initial feature request: [#4638](https://github.com/grafana/grafana/issues/4638) -Pull Request: [#8472](https://github.com/grafana/grafana/pull/8472) - -## Enhancements - -- **Elasticsearch**: Added filter aggregation label [#8420](https://github.com/grafana/grafana/pull/8420), thx [@tianzk](github.com/tianzk) -- **Sensu**: Added option for source and handler [#8405](https://github.com/grafana/grafana/pull/8405), thx [@joemiller](github.com/joemiller) -- **CSV**: Configurable csv export datetime format [#8058](https://github.com/grafana/grafana/issues/8058), thx [@cederigo](github.com/cederigo) -- **Table Panel**: Column style that preserves formatting/indentation (like pre tag) [#6617](https://github.com/grafana/grafana/issues/6617) -- **DingDing**: Add DingDing Alert Notifier [#8473](https://github.com/grafana/grafana/pull/8473) thx [@jiamliang](https://github.com/jiamliang) - -## Minor Enhancements - -- **Elasticsearch**: Add option for result set size in raw_document [#3426](https://github.com/grafana/grafana/issues/3426) [#8527](https://github.com/grafana/grafana/pull/8527), thx [@mk-dhia](github.com/mk-dhia) - -## Bug Fixes - -- **Graph**: Bug fix for negative values in histogram mode [#8628](https://github.com/grafana/grafana/issues/8628) - -## Download - -Head to the [v4.4 download page](https://grafana.com/grafana/download) for download links and instructions. - -## Thanks - -A big thanks to all the Grafana users who contribute by submitting PRs, bug reports, helping out on our [community site](https://community.grafana.com/) and providing feedback! diff --git a/docs/sources/whatsnew/whats-new-in-v4-5.md b/docs/sources/whatsnew/whats-new-in-v4-5.md deleted file mode 100644 index d62a0c88fdc..00000000000 --- a/docs/sources/whatsnew/whats-new-in-v4-5.md +++ /dev/null @@ -1,77 +0,0 @@ ---- -_build: - list: false -aliases: - - /docs/grafana/latest/guides/whats-new-in-v4-5/ - - /docs/grafana/latest/whatsnew/whats-new-in-v4-5/ -description: Feature and improvement highlights for Grafana v4.5 -keywords: - - grafana - - new - - documentation - - '4.5' - - release notes -title: What's new in Grafana v4.5 -weight: -12 ---- - -# What's new in Grafana v4.5 - -## Highlights - -### New prometheus query editor - -The new query editor has full syntax highlighting. As well as auto complete for metrics, functions, and range vectors. There are also integrated function docs right from the query editor! - -{{< figure src="/static/img/docs/v45/prometheus_query_editor_still.png" class="docs-image--block" animated-gif="/static/img/docs/v45/prometheus_query_editor.gif" >}} - -### Elasticsearch: Add ad-hoc filters from the table panel - -{{< figure src="/static/img/docs/v45/elastic_ad_hoc_filters.png" class="docs-image--block" >}} - -### Table cell links! - -Create column styles that turn cells into links that use the value in the cell (or other row values) to generate a URL to another dashboard or system: -![](/static/img/docs/v45/table_links.jpg) - -### Query Inspector - -Query Inspector is a new feature that shows query requests and responses. This can be helpful if a graph is not shown or shows something very different than what you expected. -For more information about query inspector, refer to [using grafanas query inspector to troubleshoot issues](https://community.grafana.com/t/using-grafanas-query-inspector-to-troubleshoot-issues/2630). -![](/static/img/docs/v45/query_inspector.png) - -## Changelog - -### New Features - -- **Table panel**: Render cell values as links that can have an URL template that uses variables from current table row. [#3754](https://github.com/grafana/grafana/issues/3754) -- **Elasticsearch**: Add ad hoc filters directly by clicking values in table panel [#8052](https://github.com/grafana/grafana/issues/8052). -- **MySQL**: New rich query editor with syntax highlighting -- **Prometheus**: New rich query editor with syntax highlighting, metric and range auto complete and integrated function docs. [#5117](https://github.com/grafana/grafana/issues/5117) - -### Enhancements - -- **GitHub OAuth**: Support for GitHub organizations with 100+ teams. [#8846](https://github.com/grafana/grafana/issues/8846), thx [@skwashd](https://github.com/skwashd) -- **Graphite**: Calls to Graphite API /metrics/find now include panel or dashboard time range (from and until) in most cases, [#8055](https://github.com/grafana/grafana/issues/8055) -- **Graphite**: Added new graphite 1.0 functions, available if you set version to 1.0.x in data source settings. New Functions: mapSeries, reduceSeries, isNonNull, groupByNodes, offsetToZero, grep, weightedAverage, removeEmptySeries, aggregateLine, averageOutsidePercentile, delay, exponentialMovingAverage, fallbackSeries, integralByInterval, interpolate, invert, linearRegression, movingMin, movingMax, movingSum, multiplySeriesWithWildcards, pow, powSeries, removeBetweenPercentile, squareRoot, timeSlice, closes [#8261](https://github.com/grafana/grafana/issues/8261) -- **Elasticsearch**: Ad-hoc filters now use query phrase match filters instead of term filters, works on non keyword/raw fields [#9095](https://github.com/grafana/grafana/issues/9095). - -### Breaking change - -- **InfluxDB/Elasticsearch**: The panel and data source option named "Group by time interval" is now named "Min time interval" and does now always define a lower limit for the auto group by time. Without having to use `>` prefix (that prefix still works). This should in theory have close to zero actual impact on existing dashboards. It does mean that if you used this setting to define a hard group by time interval of, say "1d", if you zoomed to a time range wide enough the time range could increase above the "1d" range as the setting is now always considered a lower limit. - -This option is now renamed (and moved to Options sub section above your queries): -![image|519x120](upload://ySjHOVpavV6yk9LHQxL9nq2HIsT.png) - -Data source selection and options and help are now above your metric queries. -![image|690x179](upload://5kNDxKgMz1BycOKgG3iWYLsEVXv.png) - -### Minor Changes - -- **InfluxDB**: Change time range filter for absolute time ranges to be inclusive instead of exclusive [#8319](https://github.com/grafana/grafana/issues/8319), thx [@Oxydros](https://github.com/Oxydros) -- **InfluxDB**: Added parenthesis around tag filters in queries [#9131](https://github.com/grafana/grafana/pull/9131) - -## Bug Fixes - -- **Modals**: Maintain scroll position after opening/leaving modal [#8800](https://github.com/grafana/grafana/issues/8800) -- **Templating**: You cannot select data source variables as data source for other template variables [#7510](https://github.com/grafana/grafana/issues/7510) diff --git a/docs/sources/whatsnew/whats-new-in-v4-6.md b/docs/sources/whatsnew/whats-new-in-v4-6.md deleted file mode 100644 index aadce439fe9..00000000000 --- a/docs/sources/whatsnew/whats-new-in-v4-6.md +++ /dev/null @@ -1,80 +0,0 @@ ---- -_build: - list: false -aliases: - - /docs/grafana/latest/guides/whats-new-in-v4-6/ - - /docs/grafana/latest/whatsnew/whats-new-in-v4-6/ -description: Feature and improvement highlights for Grafana v4.6 -keywords: - - grafana - - new - - documentation - - '4.6' - - release notes -title: What's new in Grafana v4.6 -weight: -13 ---- - -# What's new in Grafana v4.6 - -Grafana v4.6 brings many enhancements to Annotations, Cloudwatch and Prometheus. It also adds support for Postgres as metric and table data source! - -### Annotations - -{{< figure src="/static/img/docs/v46/add_annotation_region.png" max-width= "800px" >}} - -You can now add annotation events and regions right from the graph panel! Just hold Ctrl/Cmd+Click or drag region to open the **Add Annotation** view. The -[Annotations]({{< relref "../dashboards/annotations/" >}}) documentation is updated to include details on this new exciting feature. - -### Cloudwatch - -Cloudwatch now supports alerting. Set up alert rules for any Cloudwatch metric! - -{{< figure src="/static/img/docs/v46/cloudwatch_alerting.png" max-width= "800px" >}} - -### Postgres - -Grafana v4.6 now ships with a built-in data source plugin for Postgres. Have logs or metric data in Postgres? You can now visualize that data and -define alert rules on it like any of our other data sources. - -{{< figure src="/static/img/docs/v46/postgres_table_query.png" max-width= "800px" >}} - -### Prometheus - -New enhancements include support for **instant queries** and improvements to query editor in the form of autocomplete for label names and label values. -This makes exploring and filtering Prometheus data much easier. - -## Changelog - -### New Features - -- **GCS**: Adds support for Google Cloud Storage [#8370](https://github.com/grafana/grafana/issues/8370) thx [@chuhlomin](https://github.com/chuhlomin) -- **Prometheus**: Adds /metrics endpoint for exposing Grafana metrics. [#9187](https://github.com/grafana/grafana/pull/9187) -- **Graph**: Add support for local formatting in axis. [#1395](https://github.com/grafana/grafana/issues/1395), thx [@m0nhawk](https://github.com/m0nhawk) -- **Jaeger**: Add support for open tracing using jaeger in Grafana. [#9213](https://github.com/grafana/grafana/pull/9213) -- **Unit types**: New date and time unit types added, useful in singlestat to show dates and times. [#3678](https://github.com/grafana/grafana/issues/3678), [#6710](https://github.com/grafana/grafana/issues/6710), [#2764](https://github.com/grafana/grafana/issues/2764) -- **CLI**: Make it possible to install plugins from any URL [#5873](https://github.com/grafana/grafana/issues/5873) -- **Prometheus**: Add support for instant queries [#5765](https://github.com/grafana/grafana/issues/5765), thx [@mtanda](https://github.com/mtanda) -- **Cloudwatch**: Add support for alerting using the cloudwatch data source [#8050](https://github.com/grafana/grafana/pull/8050), thx [@mtanda](https://github.com/mtanda) -- **Pagerduty**: Include triggering series in pagerduty notification [#8479](https://github.com/grafana/grafana/issues/8479), thx [@rickymoorhouse](https://github.com/rickymoorhouse) -- **Timezone**: Time ranges like Today and Yesterday now work correctly when timezone setting is set to UTC [#8916](https://github.com/grafana/grafana/issues/8916), thx [@ctide](https://github.com/ctide) -- **Prometheus**: Align $\_\_interval with the step parameters. [#9226](https://github.com/grafana/grafana/pull/9226), thx [@alin-amana](https://github.com/alin-amana) -- **Prometheus**: Autocomplete for label name and label value [#9208](https://github.com/grafana/grafana/pull/9208), thx [@mtanda](https://github.com/mtanda) -- **Postgres**: New Postgres data source [#9209](https://github.com/grafana/grafana/pull/9209), thx [@svenklemm](https://github.com/svenklemm) -- **Data sources**: closes [#9371](https://github.com/grafana/grafana/issues/9371), [#5334](https://github.com/grafana/grafana/issues/5334), [#8812](https://github.com/grafana/grafana/issues/8812), thx [@mattbostock](https://github.com/mattbostock) - -### Minor Changes - -- **SMTP**: Make it possible to set specific EHLO for SMTP client. [#9319](https://github.com/grafana/grafana/issues/9319) -- **Dataproxy**: Allow Grafana to renegotiate TLS connection [#9250](https://github.com/grafana/grafana/issues/9250) -- **HTTP**: set net.Dialer.DualStack to true for all HTTP clients [#9367](https://github.com/grafana/grafana/pull/9367) -- **Alerting**: Add diff and percent diff as series reducers [#9386](https://github.com/grafana/grafana/pull/9386), thx [@shanhuhai5739](https://github.com/shanhuhai5739) -- **Slack**: Allow images to be uploaded to slack when Token is present [#7175](https://github.com/grafana/grafana/issues/7175), thx [@xginn8](https://github.com/xginn8) -- **Opsgenie**: Use their latest API instead of old version [#9399](https://github.com/grafana/grafana/pull/9399), thx [@cglrkn](https://github.com/cglrkn) -- **Table**: Add support for displaying the timestamp with milliseconds [#9429](https://github.com/grafana/grafana/pull/9429), thx [@s1061123](https://github.com/s1061123) -- **Hipchat**: Add metrics, message and image to hipchat notifications [#9110](https://github.com/grafana/grafana/issues/9110), thx [@eloo](https://github.com/eloo) -- **Postgres**: modify group by time macro so it can be used in select clause [#9527](https://github.com/grafana/grafana/pull/9527), thanks [@svenklemm](https://github.com/svenklemm) - -### Tech - -- **Go**: Grafana is now built using golang 1.9 diff --git a/docs/sources/whatsnew/whats-new-in-v5-0.md b/docs/sources/whatsnew/whats-new-in-v5-0.md deleted file mode 100644 index 9f28601180d..00000000000 --- a/docs/sources/whatsnew/whats-new-in-v5-0.md +++ /dev/null @@ -1,155 +0,0 @@ ---- -_build: - list: false -aliases: - - /docs/grafana/latest/guides/whats-new-in-v5/ - - /docs/grafana/latest/whatsnew/whats-new-in-v5-0/ -description: Feature and improvement highlights for Grafana v5.0 -keywords: - - grafana - - new - - documentation - - '5.0' - - release notes -title: What's new in Grafana v5.0 -weight: -14 ---- - -# What's new in Grafana v5.0 - -This is the most substantial update that Grafana has ever seen. This article will detail the major new features and enhancements. - -- [New Dashboard Layout Engine]({{< relref "#new-dashboard-layout-engine" >}}) enables a much easier drag, drop and resize experience and new types of layouts. -- [New UX]({{< relref "#new-ux-layout-engine" >}}). The UI has big improvements in both look and function. -- [New Light Theme]({{< relref "#new-light-theme" >}}) is now looking really nice. -- [Dashboard Folders]({{< relref "#dashboard-folders" >}}) helps you keep your dashboards organized. -- [Permissions]({{< relref "#dashboard-folders" >}}) on folders and dashboards helps manage larger Grafana installations. -- [Group users into teams]({{< relref "#teams" >}}) and use them in the new permission system. -- [Data source provisioning]({{< relref "#data-sources" >}}) makes it possible to set up data sources via config files. -- [Dashboard provisioning]({{< relref "#dashboards" >}}) makes it possible to set up dashboards via config files. -- [Persistent dashboard URL's]({{< relref "#dashboard-model-persistent-url-s-and-api-changes" >}}) makes it possible to rename dashboards without breaking links. -- [Graphite Tags and Integrated Function Docs]({{< relref "#graphite-tags-integrated-function-docs" >}}). - -### Video showing new features - - -
- -## New Dashboard Layout Engine - -{{< figure src="/static/img/docs/v50/new_grid.png" max-width="1000px" class="docs-image--right">}} - -The new dashboard layout engine allows for much easier movement and sizing of panels, as other panels now move out of the way in -a very intuitive way. Panels are sized independently, so rows are no longer necessary to create layouts. This opens -up many new types of layouts where panels of different heights can be aligned easily. Check out the new grid in the video -above or on the [play site](https://play.grafana.org). All your existing dashboards will automatically migrate to the -new position system and look close to identical. The new panel position makes dashboards saved in v5.0 incompatible -with older versions of Grafana. - -
- -## New UX - -{{< figure src="/static/img/docs/v50/new_ux_nav.png" max-width="1000px" class="docs-image--right" >}} - -Almost every page has seen significant UX improvements. All pages (except dashboard pages) have a new tab-based layout that improves navigation between pages. The side menu has also changed quite a bit. You can still hide the side menu completely if you click on the Grafana logo. - -
- -## Dashboard Settings - -{{< figure src="/static/img/docs/v50/dashboard_settings.png" max-width="1000px" class="docs-image--right" >}} -Dashboard pages have a new header toolbar where buttons and actions are now all moved to the right. All the dashboard -settings views have been combined with a side nav which allows you to easily move between different setting categories. - -
- -## New Light Theme - -{{< figure src="/static/img/docs/v50/new_white_theme.png" max-width="1000px" class="docs-image--right" >}} - -This theme has not seen a lot of love in recent years and we felt it was time to give it a major overhaul. We are very happy with the result. - -
- -## Dashboard Folders - -{{< figure src="/static/img/docs/v50/new_search.png" max-width="1000px" class="docs-image--right" >}} - -The big new feature that comes with Grafana v5.0 is dashboard folders. Now you can organize your dashboards in folders, -which is very useful if you have a lot of dashboards or multiple teams. - -- New search design adds expandable sections for each folder, starred and recently viewed dashboards. -- New manage dashboard pages enable batch actions and views for folder settings and permissions. -- Set permissions on folders and have dashboards inherit the permissions. - -## Teams - -A team is a new concept in Grafana v5. They are simply a group of users that can be used in the new permission system for dashboards and folders. Only an admin can create teams. -We hope to do more with teams in future releases like integration with LDAP and a team landing page. - -## Permissions - -{{< figure src="/static/img/docs/v50/folder_permissions.png" max-width="1000px" class="docs-image--right" >}} - -You can assign permissions to folders and dashboards. The default user role-based permissions can be removed and -replaced with specific teams or users enabling more control over what a user can see and edit. - -Dashboard permissions only limits what dashboards and folders a user can view and edit not which -data sources a user can access nor what queries a user can issue. - -
- -## Provisioning from configuration - -In previous versions of Grafana, you could only use the API for provisioning data sources and dashboards. -But that required the service to be running before you started creating dashboards and you also needed to -set up credentials for the HTTP API. In v5.0 we decided to improve this experience by adding a new active -provisioning system that uses config files. This will make GitOps more natural as data sources and dashboards can -be defined via files that can be version controlled. We hope to extend this system to later add support for users, orgs -and alerts as well. - -### Data sources - -Data sources can now be set up using config files. These data sources are by default not editable from the Grafana GUI. -It's also possible to update and delete data sources from the config file. More info in the [data source provisioning docs](/administration/provisioning/#datasources). - -### Dashboards - -We also deprecated the `[dashboard.json]` in favor of our new dashboard provisioner that keeps dashboards on disk -in sync with dashboards in Grafana's database. The dashboard provisioner has multiple advantages over the old -`[dashboard.json]` feature. Instead of storing the dashboard in memory we now insert the dashboard into the database, -which makes it possible to star them, use one as the home dashboard, set permissions and other features in Grafana that -expects the dashboards to exist in the database. More info in the [dashboard provisioning docs]({{< relref "../administration/provisioning/" >}}) - -## Graphite Tags and Integrated Function Docs - -{{< figure src="/static/img/docs/v50/graphite_tags.png" max-width="1000px" class="docs-image--right" >}} - -The Graphite query editor has been updated to support the latest Graphite version (v1.2) that adds -many new functions and support for querying by tags. You can now also view function documentation right in the query editor! - -Read more on [Graphite Tag Support](http://graphite.readthedocs.io/en/latest/tags.html?highlight=tags). - -
- -## Dashboard model, persistent URLs and API changes - -We are introducing a new unique identifier (`uid`) in the dashboard JSON model. It's automatically -generated if not provided when creating a dashboard and will have a length of 9-12 characters. - -The unique identifier allows having persistent URLs for accessing dashboards, sharing them -between instances and when using [dashboard provisioning](<(/administration/provisioning/#reusable-dashboard-urls)>). This means that dashboard can -be renamed without breaking any links. We're changing the URL format for dashboards -from `/dashboard/db/:slug` to `/d/:uid/:slug`. We'll keep supporting the old slug-based URLs for dashboards -and redirects to the new one for backward compatibility. Please note that the old slug-based URLs -have been deprecated and will be removed in a future release. - -Sharing dashboards between instances becomes much easier since the `uid` is unique (unique enough). -This might seem like a small change, but we are incredibly excited about it since it will make it -much easier to manage, collaborate and navigate between dashboards. - -### API changes - -New uid-based routes in the dashboard API have been introduced to retrieve and delete dashboards. -The corresponding slug-based routes have been deprecated and will be removed in a future release. diff --git a/docs/sources/whatsnew/whats-new-in-v5-1.md b/docs/sources/whatsnew/whats-new-in-v5-1.md deleted file mode 100644 index 0076840701e..00000000000 --- a/docs/sources/whatsnew/whats-new-in-v5-1.md +++ /dev/null @@ -1,129 +0,0 @@ ---- -_build: - list: false -aliases: - - /docs/grafana/latest/guides/whats-new-in-v5-1/ - - /docs/grafana/latest/whatsnew/whats-new-in-v5-1/ -description: Feature and improvement highlights for Grafana v5.1 -keywords: - - grafana - - new - - documentation - - '5.1' - - release notes -title: What's new in Grafana v5.1 -weight: -15 ---- - -# What's new in Grafana v5.1 - -Grafana v5.1 brings new features, many enhancements and bug fixes. This article will detail the major new features and enhancements. - -- [Improved scrolling experience]({{< relref "#improved-scrolling-experience" >}}) -- [Improved docker image]({{< relref "#improved-docker-image-breaking-change" >}}) with a breaking change! -- [Heatmap support for Prometheus]({{< relref "#prometheus" >}}) -- [Microsoft SQL Server]({{< relref "#microsoft-sql-server" >}}) as metric and table data source! -- [Dashboards and Panels]({{< relref "#dashboards-panels" >}}) Improved adding panels to dashboards and enhancements to Graph and Table panels. -- [New variable interpolation syntax]({{< relref "#new-variable-interpolation-syntax" >}}) -- [Improved workflow for provisioned dashboards]({{< relref "#improved-workflow-for-provisioned-dashboards" >}}) - -## Improved scrolling experience - -In Grafana v5.0 we introduced a new scrollbar component. Unfortunately this introduced a lot of issues and in some scenarios removed -the native scrolling functionality. Grafana v5.1 ships with a native scrollbar for all pages together with a scrollbar component for -the dashboard grid and panels that's not overriding the native scrolling functionality. We hope that these changes and improvements should -make the Grafana user experience much better! - -## Improved Docker image (breaking change) - -Grafana v5.1 brings an improved official docker image which should make it easier to run and use the Grafana docker image and at the same time give more control to the user how to use/run it. - -We've switched the id of the grafana user running Grafana inside a docker container. Unfortunately this means that files created prior to 5.1 won't have the correct permissions for later versions and thereby this introduces a breaking change. -We made this change so that it would be easier for you to control what user Grafana is executed as (see examples below). - -| Version | User | User ID | -| ------- | ------- | ------- | -| < 5.1 | grafana | 104 | -| >= 5.1 | grafana | 472 | - -Please read the [updated documentation](/installation/docker/#migrate-to-v51-or-later) which includes migration instructions and more information. - -## Prometheus - -{{< figure src="/static/img/docs/v51/prometheus_heatmap.png" max-width="800px" class="docs-image--right" >}} - -The Prometheus data source now support transforming Prometheus histograms to the heatmap panel. Prometheus histogram is a powerful feature, and we're -really happy to finally allow our users to render those as heatmaps. Please read [Heatmap panel documentation](/features/panels/heatmap/#pre-bucketed-data) -for more information on how to use it. - -Prometheus query editor also got support for autocomplete of template variables. More information in the [Prometheus data source documentation]({{< relref "../datasources/prometheus/" >}}). - -
- -## Microsoft SQL Server - -{{< figure src="/static/img/docs/v51/mssql_query_editor_showcase.png" max-width= "800px" class="docs-image--right" >}} - -Grafana v5.1 now ships with a built-in Microsoft SQL Server (MSSQL) data source plugin that allows you to query and visualize data from any -Microsoft SQL Server 2005 or newer, including Microsoft Azure SQL Database. Do you have metric or log data in MSSQL? You can now visualize -that data and define alert rules on it like with any of Grafana's other core data sources. - -Please read [Using Microsoft SQL Server in Grafana documentation]({{< relref "../datasources/mssql/" >}}) for more detailed information on how to get started and use it. - -
- -## Dashboards and Panels - -### Adding new panels to dashboards - -{{< figure src="/static/img/docs/v51/dashboard_add_panel.png" max-width= "800px" class="docs-image--right" >}} - -The control for adding new panels to dashboards have got some enhancements and now includes functionality to search for the type of panel -you want to add. Further, the control has tabs separating functionality for adding new panels and pasting -copied panels. - -By copying a panel in a dashboard it will be displayed in the `Paste` tab in _any_ dashboard and allows you to paste the -copied panel into the current dashboard. - -{{< figure src="/static/img/docs/v51/dashboard_panel_copy.png" max-width= "300px" >}} - -
- -### Graph Panel - -New enhancements include support for multiple series stacking in histogram mode, thresholds for right Y axis, aligning left and right Y-axes to one level and additional units. More information in the [Graph panel documentation]({{< relref "../visualizations/graph-panel/" >}}). - -### Table Panel - -New enhancements include support for mapping a numeric value/range to text and additional units. More information in the [Table panel documentation](/features/panels/table_panel/#string). - -## New variable interpolation syntax - -We now support a new option for rendering variables that gives the user full control of how the value(s) should be rendered. -In the table below you can see some examples and you can find all different options in the [Variables documentation](http://docs.grafana.org/variables/templates-and-variables/#advanced-formatting-options). - -| Filter Option | Example | Raw | Interpolated | Description | -| ------------- | ---------------- | ------------------ | -------------------------------- | --------------------------------------------------------- | -| `glob` | ${servers:glob} | `'test1', 'test2'` | `{test1,test2}` | Formats multi-value variable into a glob | -| `regex` | ${servers:regex} | `'test.', 'test2'` | (test\.|test2) | Formats multi-value variable into a regex string | -| `pipe` | ${servers:pipe} | `'test.', 'test2'` | test.|test2 | Formats multi-value variable into a pipe-separated string | -| `csv` | ${servers:csv} | `'test1', 'test2'` | `test1,test2` | Formats multi-value variable as a comma-separated string | - -## Improved workflow for provisioned dashboards - -{{< figure src="/static/img/docs/v51/provisioning_cannot_save_dashboard.png" max-width="800px" class="docs-image--right" >}} - -Grafana v5.1 brings an improved workflow for provisioned dashboards: - -- A populated `id` property in JSON is now automatically removed when provisioning dashboards. -- When making changes to a provisioned dashboard you can `Save` the dashboard which now will bring up a _Cannot save provisioned dashboard_ dialog like seen in the screenshot to the right. - -Available options in the dialog will let you `Copy JSON to Clipboard` and/or `Save JSON to file` which can help you synchronize your dashboard changes back to the provisioning source. -More information in the [Provisioning documentation](/administration/provisioning/). - -
- -## Changelog - -Check out the [CHANGELOG.md](https://github.com/grafana/grafana/blob/master/CHANGELOG.md) file for a complete list -of new features, changes, and bug fixes. diff --git a/docs/sources/whatsnew/whats-new-in-v5-2.md b/docs/sources/whatsnew/whats-new-in-v5-2.md deleted file mode 100644 index 37847ba8837..00000000000 --- a/docs/sources/whatsnew/whats-new-in-v5-2.md +++ /dev/null @@ -1,106 +0,0 @@ ---- -_build: - list: false -aliases: - - /docs/grafana/latest/guides/whats-new-in-v5-2/ - - /docs/grafana/latest/whatsnew/whats-new-in-v5-2/ -description: Feature and improvement highlights for Grafana v5.2 -keywords: - - grafana - - new - - documentation - - '5.2' - - release notes -title: What's new in Grafana v5.2 -weight: -16 ---- - -# What's new in Grafana v5.2 - -Grafana v5.2 brings new features, many enhancements and bug fixes. This article will detail the major new features and enhancements. - -- [Elasticsearch alerting]({{< relref "#elasticsearch-alerting" >}}) it's finally here! -- [Native builds for ARM]({{< relref "#native-builds-for-arm" >}}) native builds of Grafana for many more platforms! -- [Improved Docker image]({{< relref "#improved-docker-image" >}}) with support for docker secrets -- [Security]({{< relref "#security" >}}) make your Grafana instance more secure -- [Prometheus]({{< relref "#prometheus" >}}) with alignment enhancements -- [InfluxDB]({{< relref "#influxdb" >}}) now supports the `mode` function -- [Alerting]({{< relref "#alerting" >}}) with alert notification channel type for Discord -- [Dashboards and Panels]({{< relref "#dashboards-panels" >}}) with save and import enhancements - -## Elasticsearch alerting - -{{< figure src="/static/img/docs/v52/elasticsearch_alerting.png" max-width="800px" class="docs-image--right" >}} - -Grafana v5.2 ships with an updated Elasticsearch data source with support for alerting. Alerting support for Elasticsearch has been one of -the most requested features by our community and now it's finally here. Please try it out and let us know what you think. - -
- -## Native builds for ARM - -Grafana v5.2 brings an improved build pipeline with cross-platform support. This enables native builds of Grafana for ARMv7 (x32) and ARM64 (x64). -We've been longing for native ARM build support for ages. With the help from our amazing community this is now finally available. -Please try it out and let us know what you think. - -Another great addition with the improved build pipeline is that binaries for macOS/Darwin (x64) and Windows (x64) are now automatically built and -published for both stable and nightly builds. - -## Improved Docker image - -The Grafana docker image adds support for Docker secrets which enables you to supply Grafana with configuration through files. More -information in the [Installing using Docker documentation](/installation/docker/#reading-secrets-from-files-support-for-docker-secrets). - -## Security - -{{< figure src="/static/img/docs/v52/login_change_password.png" max-width="800px" class="docs-image--right" >}} - -Starting from Grafana v5.2, when you login with the administrator account using the default password you'll be presented with a form to change the password. -We hope this encourages users to follow Grafana's best practices and change the default administrator password. - -
- -## Prometheus - -The Prometheus data source now aligns the start/end of the query sent to Prometheus with the step, which ensures PromQL expressions with _rate_ -functions get consistent results, and thus avoids graphs jumping around on reload. - -## InfluxDB - -The InfluxDB data source now includes support for the _mode_ function which returns the most frequent value in a list of field values. - -## Alerting - -By popular demand Grafana now includes support for an alert notification channel type for [Discord](https://discordapp.com/). - -## Dashboards and Panels - -### Modified time range and variables are no longer saved by default - -{{< figure src="/static/img/docs/v52/dashboard_save_modal.png" max-width="800px" class="docs-image--right" >}} - -Starting from Grafana v5.2, a modified time range or variable are no longer saved by default. To save a modified -time range or variable, you'll need to actively select that when saving a dashboard, see screenshot. -This should hopefully make it easier to have same defaults for time and variables in dashboards and make it more explicit -when you actually want to overwrite those settings. - -
- -### Import dashboard enhancements - -{{< figure src="/static/img/docs/v52/dashboard_import.png" max-width="800px" class="docs-image--right" >}} - -Grafana v5.2 adds support for specifying an existing folder or creating a new one when importing a dashboard - a long-awaited feature since -Grafana v5.0 introduced support for dashboard folders and permissions. The import dashboard page has also got some general improvements -and should now make it more clear if a possible import will overwrite an existing dashboard, or not. - -This release also adds some improvements for those users only having editor or admin permissions in certain folders. The links to -_Create Dashboard_ and _Import Dashboard_ are now available in the side navigation, in dashboard search and on the manage dashboards/folder page for a -user that has editor role in an organization or the edit permission in at least one folder. - -
- -## Changelog - -Check out the [CHANGELOG.md](https://github.com/grafana/grafana/blob/master/CHANGELOG.md) file for a complete list -of new features, changes, and bug fixes. diff --git a/docs/sources/whatsnew/whats-new-in-v5-3.md b/docs/sources/whatsnew/whats-new-in-v5-3.md deleted file mode 100644 index 260f06a1e0b..00000000000 --- a/docs/sources/whatsnew/whats-new-in-v5-3.md +++ /dev/null @@ -1,97 +0,0 @@ ---- -_build: - list: false -aliases: - - /docs/grafana/latest/guides/whats-new-in-v5-3/ - - /docs/grafana/latest/whatsnew/whats-new-in-v5-3/ -description: Feature and improvement highlights for Grafana v5.3 -keywords: - - grafana - - new - - documentation - - '5.3' - - release notes -title: What's new in Grafana v5.3 -weight: -17 ---- - -# What's new in Grafana v5.3 - -Grafana v5.3 brings new features, many enhancements and bug fixes. This article will detail the major new features and enhancements. - -- [Google Stackdriver]({{< relref "#google-stackdriver" >}}) as a core data source! -- [TV mode]({{< relref "#tv-and-kiosk-mode" >}}) is improved and more accessible -- [Alerting]({{< relref "#notification-reminders" >}}) with notification reminders -- [Postgres]({{< relref "#postgres-query-builder" >}}) gets a new query builder! -- [OAuth]({{< relref "#improved-oauth-support-for-gitlab" >}}) support for GitLab is improved -- [Annotations]({{< relref "#annotations" >}}) with template variable filtering -- [Variables]({{< relref "#variables" >}}) with free text support - -## Google Stackdriver - -{{< figure src="/static/img/docs/v53/stackdriver-with-heatmap.png" max-width= "600px" class="docs-image--no-shadow docs-image--right" >}} - -Grafana v5.3 ships with built-in support for [Google Stackdriver](https://cloud.google.com/stackdriver/) and enables you to visualize your Stackdriver metrics in Grafana. - -Getting started with the plugin is easy. Simply create a GCE Service account that has access to the Stackdriver API scope, download the Service Account key file from Google and upload it on the Stackdriver data source configuration page in Grafana and you should have a secure server-to-server authentication setup. Like other core plugins, Stackdriver has built-in support for alerting. It also comes with support for heatmaps and basic variables. - -If you're already accustomed to the Stackdriver Metrics Explorer UI, you'll notice that there are a lot of similarities to the query editor in Grafana. It is possible to add filters using wildcards and regular expressions. You can do Group By, Primary Aggregation and Alignment. - -Alias By allows you to format the legend the way you want, and it's a feature that is not yet present in the Metrics Explorer. Two other features that are only supported in the Grafana plugin are the abilities to manually set the Alignment Period in the query editor and to add Annotations queries. - -The Grafana Stackdriver plugin comes with support for automatic unit detection. Grafana will try to map the Stackdriver unit type to a corresponding unit type in Grafana, and if successful the panel Y-axes will be updated accordingly to display the correct unit of measure. This is the first core plugin to provide support for unit detection, and it is our intention to provide support for this in other core plugins in the near future. - -The data source is still in the `beta` phase, meaning it's currently in active development and is still missing one important feature - templating queries. -Please try it out, but be aware of that it might be subject to changes and possible bugs. We would love to hear your feedback. - -Refer to [Using Google Stackdriver in Grafana]({{< relref "../datasources/google-cloud-monitoring/" >}}) for more detailed information on how to get started and use it. - -## TV and Kiosk Mode - -{{< figure src="/static/img/docs/v53/tv_mode_still.png" max-width="600px" class="docs-image--no-shadow docs-image--right" animated-gif="/static/img/docs/v53/tv_mode.gif" >}} - -We've improved the TV and kiosk mode to make it easier to use. There's now an icon in the top bar that will let you cycle through the different view modes. - -1. In the first view mode, the sidebar and most of the buttons in the top bar will be hidden. -1. In the second view mode, the top bar is completely hidden so that only the dashboard itself is shown. -1. Hit the escape key to go back to the default view mode. - -When switching view modes, the URL will be updated to reflect the view mode selected. This allows a dashboard to be opened with a -certain view mode enabled. Additionally, this also enables [playlists](/dashboards/playlist) to be started with a certain view mode enabled. - -
- -## Notification Reminders - -Do you use Grafana Alerting and have some notifications that are more important than others? Then it's possible to set reminders so that you continue to be alerted until the problem is fixed. This is done on the notification channel itself and will affect all alerts that use that channel. -For additional examples of why reminders might be useful for you, see [multiple series](/alerting/alerts-overview/#multiple-series). - -For more information about how to enable and configure reminders, refer to [alerting reminders](/alerting/notifications/#send-reminders). - -## Postgres Query Builder - -Grafana 5.3 comes with a new graphical query builder for Postgres. This brings Postgres integration more in line with some of the other data sources and makes it easier for both advanced users and beginners to work with timeseries in Postgres. For more information about Postgres graphical query builder, refer to [query editor]({{< relref "../datasources/postgres/#query-editor" >}}). - -{{< figure src="/static/img/docs/v53/postgres_query_still.png" class="docs-image--no-shadow" animated-gif="/static/img/docs/v53/postgres_query.gif" >}} - -## Improved OAuth Support for GitLab - -Grafana 5.3 comes with a new OAuth integration for GitLab that enables configuration to only allow users that are a member of certain GitLab groups to authenticate. This makes it possible to use GitLab OAuth with Grafana in a shared environment without giving everyone access to Grafana. -For more information about how to enable and configure OAuth, refer to [Gitlab OAuth](/auth/gitlab/). - -## Annotations - -Grafana 5.3 brings improved support for [native annotations](/dashboards/annotations/#native-annotations) and makes it possible to use template variables when filtering by tags. -For more information about native annotation, refer to [query by tag](/dashboards/annotations/#query-by-tag). - -{{< figure src="/static/img/docs/v53/annotation_tag_filter_variable.png" max-width="600px" >}} - -## Variables - -Grafana 5.3 ships with a brand new variable type named `Text box` which makes it easier and more convenient to provide free text input to a variable. -This new variable type will display as a free text input field with an optional prefilled default value. - -## Changelog - -Check out the [CHANGELOG.md](https://github.com/grafana/grafana/blob/master/CHANGELOG.md) file for a complete list -of new features, changes, and bug fixes. diff --git a/docs/sources/whatsnew/whats-new-in-v5-4.md b/docs/sources/whatsnew/whats-new-in-v5-4.md deleted file mode 100644 index 8630cbdfbd1..00000000000 --- a/docs/sources/whatsnew/whats-new-in-v5-4.md +++ /dev/null @@ -1,88 +0,0 @@ ---- -_build: - list: false -aliases: - - /docs/grafana/latest/guides/whats-new-in-v5-4/ - - /docs/grafana/latest/whatsnew/whats-new-in-v5-4/ -description: Feature and improvement highlights for Grafana v5.4 -keywords: - - grafana - - new - - documentation - - '5.4' - - release notes -title: What's new in Grafana v5.4 -weight: -18 ---- - -# What's new in Grafana v5.4 - -Grafana v5.4 brings new features, many enhancements and bug fixes. This article will detail the major new features and enhancements. - -- [Alerting]({{< relref "#alerting" >}}) Limit false positives with the new `For` setting -- [Google Stackdriver]({{< relref "#google-stackdriver" >}}) Now with support for templating queries -- [MySQL]({{< relref "#mysql-query-builder" >}}) gets a new query builder! -- [Graph Panel]({{< relref "#graph-panel-enhancements" >}}) Highlight time regions and more -- [Team Preferences]({{< relref "#team-preferences" >}}) Give your teams their own home dashboard - -## Alerting - -{{< figure src="/static/img/docs/v54/alerting-for-dark-theme.png" max-width="600px" class="docs-image--right" >}} - -Grafana v5.4 ships with a new alert rule setting named `For` which is great for removing false positives. If an alert rule has a configured `For` and the query violates the configured threshold it will first go from `OK` to `Pending`. Going from `OK` to `Pending` Grafana will not send any notifications. Once the alert rule has been firing for more than `For` duration, it will change to `Alerting` and send alert notifications. Typically, it's always a good idea to use this setting since it's often worse to get false positive than wait a few minutes before the alert notification triggers. - -In the screenshot you can see an example timeline of an alert using the `For` setting. At ~16:04 the alert state changes to `Pending` and after 4 minutes it changes to `Alerting` which is when alert notifications are sent. Once the series falls back to normal the alert rule goes back to `OK`. [Learn more](/alerting/alerts-overview/#for). - -Additionally, there's now support for disable the sending of `OK` alert notifications. [Learn more](/alerting/notifications/#disable-resolve-message). - -
- -## Google Stackdriver - -{{< figure src="/static/img/docs/v54/stackdriver_template_query.png" max-width="600px" class="docs-image--right" >}} - -Grafana v5.3 included built-in support for [Google Stackdriver](https://cloud.google.com/stackdriver/) which enables you to visualize your Stackdriver metrics in Grafana. -One important feature missing was support for templating queries. This is now included together with a brand new templating query editor for Stackdriver. - -The Stackdriver templating query editor lets you choose from a set of different Query Types. This will in turn reveal additional drop-downs to help you -find, filter and select the templating values you're interested in, see screenshot for details. The templating query editor also supports chaining multiple variables -making it easy to define variables that's dependent on other variables. - -Stackdriver is the first data source which has support for a custom templating query editor. But starting from Grafana v5.4 it's now possible for all data sources, including plugin data sources, to -create their very own templating query editor. - -Additionally, if Grafana is running on a Google Compute Engine (GCE) virtual machine, it is now possible for Grafana to automatically retrieve default credentials from the metadata server. -This has the advantage of not needing to generate a private key file for the service account and also not having to upload the file to Grafana. [Learn more]({{< relref "../datasources/google-cloud-monitoring/#using-gce-default-service-account" >}}). - -Please read [Using Google Stackdriver in Grafana]({{< relref "../datasources/google-cloud-monitoring/" >}}) for more detailed information on how to get started and use it. - -
- -## MySQL Query Builder - -Grafana v5.4 comes with a new graphical query builder for MySQL. This brings MySQL integration more in line with some of the other data sources and makes it easier for both advanced users and beginners to work with timeseries in MySQL. For more information about MySQL graphical query builder, refer to [query editor]({{< relref "../datasources/mysql/#query-editor" >}}). - -{{< figure src="/static/img/docs/v54/mysql_query_still.png" animated-gif="/static/img/docs/v54/mysql_query.gif" >}} - -## Graph Panel Enhancements - -Grafana v5.4 adds support for highlighting weekdays and/or certain timespans in the graph panel. This should make it easier to compare for example weekends, business hours and/or off work hours. - -{{< figure src="/static/img/docs/v54/graph_time_regions.png" max-width= "800px" >}} - -Additionally, when rendering series as lines in the graph panel, should there be only one data point available for one series so that a connecting line cannot be established, a point will -automatically be rendered for that data point. This should make it easier to understand what's going on when only receiving a single data point. - -{{< figure src="/static/img/docs/v54/graph_dot_single_point.png" max-width= "800px" >}} - -## Team Preferences - -Grafana v5.4 adds support for customizing home dashboard, timezone and theme for teams, in addition to the existing customization on Organization and user Profile level. - -1. Specifying a preference on User Profile level will override preference on Team and/or Organization level -1. Specifying a preference on Team level will override preference on Organization level. - -## Changelog - -Check out the [CHANGELOG.md](https://github.com/grafana/grafana/blob/master/CHANGELOG.md) file for a complete list -of new features, changes, and bug fixes. diff --git a/docs/sources/whatsnew/whats-new-in-v6-0.md b/docs/sources/whatsnew/whats-new-in-v6-0.md deleted file mode 100644 index d6068ed4680..00000000000 --- a/docs/sources/whatsnew/whats-new-in-v6-0.md +++ /dev/null @@ -1,181 +0,0 @@ ---- -_build: - list: false -aliases: - - /docs/grafana/latest/guides/whats-new-in-v6-0/ - - /docs/grafana/latest/whatsnew/whats-new-in-v6-0/ -description: Feature and improvement highlights for Grafana v6.0 -keywords: - - grafana - - new - - documentation - - '6.0' - - release notes -title: What's new in Grafana v6.0 -weight: -19 ---- - -# What's new in Grafana v6.0 - -This update to Grafana introduces a new way of exploring your data, support for log data, and tons of other features. - -The main highlights are: - -- [Explore]({{< relref "#explore" >}}) - A new query focused workflow for ad-hoc data exploration and troubleshooting. -- [Grafana Loki]({{< relref "#explore-and-grafana-loki" >}}) - Integration with the new open source log aggregation system from Grafana Labs. -- [Gauge Panel]({{< relref "#gauge-panel" >}}) - A new standalone panel for gauges. -- [New Panel Editor UX]({{< relref "#new-panel-editor" >}}) improves panel editing - and enables easy switching between different visualizations. -- [Google Stackdriver data source]({{< relref "#google-stackdriver-data-source" >}}) is out of beta and is officially released. -- [Azure Monitor]({{< relref "#azure-monitor-data-source" >}}) plugin is ported from being an external plugin to be a core data source -- [React Plugin]({{< relref "#react-panels-query-editors" >}}) support enables an easier way to build plugins. -- [Named Colors]({{< relref "#named-colors" >}}) in our new improved color picker. -- [Removal of user session storage]({{< relref "#easier-to-deploy-improved-security" >}}) makes Grafana easier to deploy and improves security. - -## Explore - -{{< figure src="/static/img/docs/v60/explore_prometheus.png" max-width="800px" class="docs-image--right" caption="Screenshot of the new Explore option in the panel menu" >}} - -Grafana's dashboard UI is all about building dashboards for visualization. **Explore** strips away all the dashboard and panel options so that you can focus on the query and metric exploration. Iterate until you have a working query and then think about building a dashboard. You can also jump from a dashboard panel into **Explore** and from there do some ad-hoc query exploration with the panel queries as a starting point. - -For infrastructure monitoring and incident response, you no longer need to switch to other tools to debug what went wrong. **Explore** allows you to dig deeper into your metrics and logs to find the cause. Grafana's new logging data source, [Loki](https://github.com/grafana/loki) is tightly integrated into Explore and allows you to correlate metrics and logs by viewing them side-by-side. - -**Explore** is a new paradigm for Grafana. It creates a new interactive debugging workflow that integrates two pillars -of observability—metrics and logs. Explore works with every data source but for Prometheus we have customized the -query editor and the experience to provide the best possible exploration UX. - -### Explore and Prometheus - -Explore features a new [Prometheus query editor](/explore/#prometheus-specific-features). This new editor has improved autocomplete, metric tree selector, -integrations with the Explore table view for easy label filtering, and useful query hints that can automatically apply -functions to your query. There is also integration between Prometheus and Grafana Loki (see more about Loki below) that -enabled jumping between metrics query and logs query with preserved label filters. - -### Explore splits - -Explore supports splitting the view so you can compare different queries, different data sources and metrics and logs side by side! - -{{< figure src="/static/img/docs/v60/explore_split.png" max-width="800px" caption="Screenshot of the new Explore option in the panel menu" >}} - -
- -### Explore and Grafana Loki - -The log exploration and visualization features in Explore are available to any data source but are currently only implemented by the new open source log -aggregation system from Grafana Lab called [Grafana Loki](https://github.com/grafana/loki). - -Loki is a horizontally-scalable, highly-available, multi-tenant log aggregation system inspired by Prometheus. It is designed to be very cost effective, as it does not index the contents of the logs, but rather a set of labels for each log stream. The logs from Loki are queried in a similar way to querying with label selectors in Prometheus. It uses labels to group log streams which can be made to match up with your Prometheus labels. - -For more information about Grafana Loki, refer to [Github Grafana Loki](https://github.com/grafana/loki) or [Grafana Labs hosted Loki](https://grafana.com/loki). - -The Explore feature allows you to query logs and features a new log panel. In the near future, we will be adding support -for other log sources to Explore and the next planned integration is Elasticsearch. - -
- -
- -
- -## New Panel Editor - -Grafana v6.0 has a completely redesigned UX around editing panels. You can now resize the visualization area if you want -more space for queries/options and vice versa. You can now also change visualization (panel type) from within the new -panel edit mode. No need to add a new panel to try out different visualizations! Check out the -video below to see the new Panel Editor in action. - -
- -
- -
- -### Gauge Panel - -We have created a new separate Gauge panel as we felt having this visualization be a hidden option in the Singlestat panel -was not ideal. When it supports 100% of the Singlestat Gauge features, we plan to add a migration so all -singlestats that use it become Gauge panels instead. This new panel contains a new **Threshold** editor that we will -continue to refine and start using in other panels. - -{{< figure src="/static/img/docs/v60/gauge_panel.png" max-width="600px" caption="Gauge Panel" >}} - -
- -### React Panels and Query Editors - -A major part of all the work that has gone into Grafana v6.0 has been on the migration to React. This investment -is part of the future-proofing of Grafana's code base and ecosystem. Starting in v6.0 **Panels** and **Data -source** plugins can be written in React using our published `@grafana/ui` sdk library. More information on this -will be shared soon. - -{{< figure src="/static/img/docs/v60/react_panels.png" max-width="600px" caption="React Panel" >}} -
- -## Google Stackdriver data source - -Built-in support for [Google Stackdriver](https://cloud.google.com/stackdriver/) is officially released in Grafana 6.0. Beta support was added in Grafana 5.3 and we have added lots of improvements since then. - -To get started read the guide: [Using Google Stackdriver in Grafana]({{< relref "../datasources/google-cloud-monitoring/" >}}). - -## Azure Monitor data source - -One of the goals of the Grafana v6.0 release is to add support for the three major clouds. Amazon CloudWatch has been a core data source for years and Google Stackdriver is also now supported. We developed an external plugin for Azure Monitor last year and for this release the [plugin](https://grafana.com/plugins/grafana-azure-monitor-datasource) is being moved into Grafana to be one of the built-in data sources. For users of the external plugin, Grafana will automatically start using the built-in version. As a core data source, the Azure Monitor data source is able to get alerting support, in the 6.0 release alerting is supported for the Azure Monitor service, with the rest to follow. - -The Azure Monitor data source integrates four Azure services with Grafana - Azure Monitor, Azure Log Analytics, Azure Application Insights and Azure Application Insights Analytics. - -Please read [Using Azure Monitor in Grafana documentation]({{< relref "../datasources/azuremonitor/" >}}) for more detailed information on how to get started and use it. - -## Provisioning support for alert notifiers - -Grafana now has support for provisioning alert notifiers from configuration files, allowing operators to provision notifiers without using the UI or the API. A new field called `uid` has been introduced which is a string identifier that the administrator can set themselves. This is the same kind of identifier used for dashboards since v5.0. This feature makes it possible to use the same notifier configuration in multiple environments and refer to notifiers in dashboard json by a string identifier instead of the numeric id which depends on insert order and how many notifiers exist in the instance. - -## Easier to deploy and improved security - -Grafana 6.0 removes the need to configure and set up additional storage for [user sessions](/tutorials/ha_setup/#user-sessions). This should make it easier to deploy and operate Grafana in a -high availability setup and/or if you're using a stateless user session store like Redis, Memcache, Postgres or MySQL. - -Instead of user sessions, we've implemented a solution based on short-lived tokens that are rotated frequently. This also replaces the old "remember me cookie" -solution, which allowed a user to be logged in between browser sessions and which have been subject to several security holes throughout the years. -For more information about the short-lived token solution and how to configure it, refer to [short lived token](/auth/overview/#login-and-short-lived-tokens). - -> Please note that due to these changes, all users will be required to login upon next visit after upgrade. - -Besides these changes we have also made security improvements regarding Cross-Site Request Forgery (CSRF) and Cross-site Scripting (XSS) vulnerabilities: - -- Cookies are per default using the [SameSite](/administration/configuration/#cookie-samesite) attribute to protect against CSRF attacks -- Script tags in text panels are per default [disabled](/administration/configuration/#disable-sanitize-html) to protect against XSS attacks - -> **Note:** If you're using [Auth Proxy Authentication](/auth/auth-proxy/) you still need to have user sessions set up and configured -> but our goal is to remove this requirement in the near future. - -## Named Colors - -{{< figure src="/static/img/docs/v60/named_colors.png" max-width="400px" class="docs-image--right" caption="Named Colors" >}} - -We have updated the color picker to show named colors and primary colors. We hope this will improve accessibility and -helps making colors more consistent across dashboards. We hope to do more in this color picker in the future, like showing -colors used in the dashboard. - -Named colors also enables Grafana to adapt colors to the current theme. - -
- -## Other features - -- The ElasticSearch data source now supports [bucket script pipeline aggregations](https://www.elastic.co/guide/en/elasticsearch/reference/current/search-aggregations-pipeline-bucket-script-aggregation.html). This gives the ability to do per-bucket computations like the difference or ratio between two metrics. -- Support for Google Hangouts Chat alert notifications -- New built in template variables for the current time range in `$__from` and `$__to` - -## Upgrading - -See [upgrade notes](/installation/upgrading/#upgrading-to-v6-0). - -## Changelog - -Check out the [CHANGELOG.md](https://github.com/grafana/grafana/blob/master/CHANGELOG.md) file for a complete list of new features, changes, and bug fixes. diff --git a/docs/sources/whatsnew/whats-new-in-v6-1.md b/docs/sources/whatsnew/whats-new-in-v6-1.md deleted file mode 100644 index 425840eb327..00000000000 --- a/docs/sources/whatsnew/whats-new-in-v6-1.md +++ /dev/null @@ -1,64 +0,0 @@ ---- -_build: - list: false -aliases: - - /docs/grafana/latest/guides/whats-new-in-v6-1/ - - /docs/grafana/latest/whatsnew/whats-new-in-v6-1/ -description: Feature and improvement highlights for Grafana v6.1 -keywords: - - grafana - - new - - documentation - - '6.1' - - release notes -title: What's new in Grafana v6.1 -weight: -20 ---- - -# What's new in Grafana v6.1 - -## Highlights - -### Ad hoc Filtering for Prometheus - -{{< figure class="float-right" max-width="30%" src="/static/img/docs/v61/prometheus-ad-hoc.gif" caption="Ad-hoc filters variable for Prometheus" >}} - -The ad hoc filter feature allows you to create new key/value filters on the fly with autocomplete for both key and values. The filter condition is then automatically applied to all queries on the dashboard. This makes it easier to explore your data in a dashboard without changing queries and without having to add new template variables. - -Other timeseries databases with label-based query languages have had this feature for a while. Recently Prometheus added support for fetching label names from their API and thanks to [Mitsuhiro Tanda](https://github.com/mtanda) implementing it in Grafana, the Prometheus data source finally supports ad hoc filtering. - -Support for fetching a list of label names was released in Prometheus v2.6.0 so that is a requirement for this feature to work in Grafana. - -### Permissions: Editors can own dashboards, folders and teams they create - -When the dashboard folders feature and permissions system was released in Grafana 5.0, users with the editor role were not allowed to administrate dashboards, folders or teams. In the 6.1 release, we have added a configuration option that can change the default permissions so that editors are admins for any Dashboard, Folder or Team they create. - -This feature also adds a new Team permission that can be assigned to any user with the editor or viewer role and enables that user to add other users to the Team. - -We believe that this is more in line with the Grafana philosophy, as it will allow teams to be more self-organizing. This option will be made permanent if it gets positive feedback from the community so let us know what you think in the [issue on GitHub](https://github.com/grafana/grafana/issues/15590). - -To turn this feature on add the following [configuration option](/administration/configuration/#editors-can-admin) to your Grafana ini file in the `users` section and then restart the Grafana server: - -```ini -[users] -editors_can_admin = true -``` - -### List and revoke of user auth tokens in the API - -As the first step of a feature to be able to list a user's signed in devices/sessions and to be able log out those devices from the Grafana UI, support has been added to the [API to list and revoke user authentication tokens](/http_api/admin/#auth-tokens-for-user). - -### Minor Features and Fixes - -This release contains a lot of small features and fixes: - -- A new keyboard shortcut `d l` toggles all Graph legends in a dashboard. -- A small bug fix for Elasticsearch - template variables in the alias field now work properly. -- Some new capabilities have been added for data source plugins that will be of interest to plugin authors: - - a new OAuth pass-through option. - - it is now possible to add user details to requests sent to the dataproxy. -- Heatmap and Explore fixes. - -Check out the [CHANGELOG.md](https://github.com/grafana/grafana/blob/master/CHANGELOG.md) file for a complete list of new features, changes, and bug fixes. - -A huge thanks to our community for all the reported issues, bug fixes and feedback. diff --git a/docs/sources/whatsnew/whats-new-in-v6-2.md b/docs/sources/whatsnew/whats-new-in-v6-2.md deleted file mode 100644 index 64e0833155c..00000000000 --- a/docs/sources/whatsnew/whats-new-in-v6-2.md +++ /dev/null @@ -1,101 +0,0 @@ ---- -_build: - list: false -aliases: - - /docs/grafana/latest/guides/whats-new-in-v6-2/ - - /docs/grafana/latest/whatsnew/whats-new-in-v6-2/ -description: Feature and improvement highlights for Grafana v6.2 -keywords: - - grafana - - new - - documentation - - '6.2' - - release notes -title: What's new in Grafana v6.2 -weight: -21 ---- - -# What's new in Grafana v6.2 - -For all details please read the full [CHANGELOG.md](https://github.com/grafana/grafana/blob/master/CHANGELOG.md). - -If you use a password for your data sources please read the [upgrade notes](/installation/upgrading/#upgrading-to-v6-2). - -Check out the [demo dashboard](https://play.grafana.org/d/ZvPm55mWk/new-features-in-v6-2?orgId=1) of some the new features in v6.2. - -## Improved security - -Data sources now store passwords and basic auth passwords in `secureJsonData` encrypted by default. Existing data source with unencrypted passwords will keep working. -Read the [upgrade notes](/installation/upgrading/#upgrading-to-v6-2) on how to migrate existing data sources to use encrypted storage. - -To mitigate the risk of [Clickjacking](https://www.owasp.org/index.php/Clickjacking), embedding Grafana is no longer allowed per default. -Read the [upgrade notes](/installation/upgrading/#upgrading-to-v6-2) for further details of how this may affect you. - -To mitigate the risk of sensitive information being cached in browser after a user has logged out, browser caching is now disabled for full page requests. - -## Provisioning - -- Environment variables support, see [Using environment variables](/administration/provisioning/#using-environment-variables) for more information. -- Reload provisioning configs, see [Admin HTTP API](/http_api/admin/#reload-provisioning-configurations) for more information. -- Do not allow deletion of provisioned dashboards -- When trying to delete or save provisioned dashboard, relative file path to the file is shown in the dialog. - -## Official support for Elasticsearch 7 - -Grafana v6.2 ships with official support for Elasticsearch v7, see [Using Elasticsearch in Grafana]({{< relref "../datasources/elasticsearch/#elasticsearch-version" >}}) for more information. - -## Bar Gauge Panel - -Grafana v6.2 ships with a new exciting panel! This new panel, named Bar Gauge, is very similar to the current -Gauge panel and shares almost all it's options. The main difference is that the Bar Gauge uses both horizontal and -vertical space much better and can be more efficiently stacked both vertically and horizontally. The Bar Gauge also -comes with 3 unique display modes, Basic, Gradient, and Retro LED. Read the -[preview article](https://grafana.com/blog/2019/04/11/sneak-preview-of-new-visualizations-coming-to-grafana/) to learn -more about the design and features of this new panel. - -Retro LED display mode -{{< figure src="/assets/img/blog/bargauge/bar_gauge_retro_led.jpg" max-width="800px" caption="Bar Gauge LED mode" >}} - -Gradient mode -{{< figure src="/assets/img/blog/bargauge/gradient.jpg" max-width="800px" caption="Bar Gauge Gradient mode" >}} - -## Improved table data support - -We have been working on improving table support in our new react panels (Gauge and Bar Gauge) and this is ongoing work -that will eventually come to the new Graph and Singlestat and Table panels we are working on. But you can see it already in -the Gauge and Bar Gauge panels. Without any config, you can visualize any number of columns or choose to visualize each -row as its own gauge. - -## Lazy loading of panels out of view - -This has been one of the most requested features for many years and is now finally here! Lazy loading of panels means -Grafana will not issue any data queries for panels that are not visible. This will greatly reduce the load -on your data source backends when loading dashboards with many panels. - -## Panels without title - -Sometimes your panels do not need a title and having that panel header still take up space makes singlestats and -other panels look strange and have bad vertical centering. In v6.2 Grafana will allow panel content (visualizations) -to use the full panel height in case there is no panel title. - -{{< figure src="/static/img/docs/v62/panels_with_no_title.jpg" max-width="800px" caption="Bar Gauge Gradient mode" >}} - -## Minor Features and Fixes - -This release contains a lot of small features and fixes: - -- Explore - Adds user time zone support, reconnect for failing data sources and a fix that prevents killing Prometheus instances when Histogram metrics are loaded. -- Alerting - Adds support for configuring timeout durations and retries, see [configuration](/administration/configuration/#evaluation-timeout-seconds) for more information. -- Azure Monitor - Adds support for multiple subscriptions per data source. -- Elasticsearch - A small bug fix to properly display percentiles metrics in table panel. -- InfluxDB - Support for POST HTTP verb. -- CloudWatch - Important fix for default alias disappearing in v6.1. -- Search - Works in a scope of dashboard's folder by default when viewing dashboard. - -Check out the [CHANGELOG.md](https://github.com/grafana/grafana/blob/master/CHANGELOG.md) file for a complete list of new features, changes, and bug fixes. - -A huge thanks to our community for all the reported issues, bug fixes and feedback. - -## Upgrading - -Read important [upgrade notes](/installation/upgrading/#upgrading-to-v6-2). diff --git a/docs/sources/whatsnew/whats-new-in-v6-3.md b/docs/sources/whatsnew/whats-new-in-v6-3.md deleted file mode 100644 index 70921206347..00000000000 --- a/docs/sources/whatsnew/whats-new-in-v6-3.md +++ /dev/null @@ -1,152 +0,0 @@ ---- -_build: - list: false -aliases: - - /docs/grafana/latest/guides/whats-new-in-v6-3/ - - /docs/grafana/latest/whatsnew/whats-new-in-v6-3/ -description: Feature and improvement highlights for Grafana v6.3 -keywords: - - grafana - - new - - documentation - - '6.3' - - release notes -title: What's new in Grafana v6.3 -weight: -22 ---- - -# What's new in Grafana v6.3 - -For all details please read the full [CHANGELOG.md](https://github.com/grafana/grafana/blob/master/CHANGELOG.md). - -## Highlights - -- New Explore features - - [Loki Live Streaming]({{< relref "#loki-live-streaming" >}}) - - [Loki Context Queries]({{< relref "#loki-context-queries" >}}) - - [Elasticsearch Logs Support]({{< relref "#elasticsearch-logs-support" >}}) - - [InfluxDB Logs Support]({{< relref "#influxdb-logs-support" >}}) -- [Data links]({{< relref "#data-links" >}}) -- [New Time Picker]({{< relref "#new-time-picker" >}}) -- [Graph Area Gradients]({{< relref "#graph-gradients" >}}) - A new graph display option! -- Grafana Enterprise - - [LDAP Active Sync]({{< relref "#ldap-active-sync" >}}) - LDAP Active Sync - - [SAML Authentication]({{< relref "#saml-authentication" >}}) - SAML Authentication - -## Explore improvements - -This release adds a ton of enhancements to Explore. Both in terms of new general enhancements but also in -new data source specific features. - -### Loki live streaming - -For log queries using the Loki data source you can now stream logs live directly to the Explore UI. - -### Loki context queries - -After finding a log line through the heavy use of query filters it can then be useful to -see the log lines surrounding the line your searched for. The `show context` feature -allows you to view lines before and after the line of interest. - -### Elasticsearch logs support - -This release adds support for searching and visualizing logs stored in Elasticsearch in the Explore mode. With a special -simplified query interface specifically designed for logs search. - -{{< figure src="/static/img/docs/v63/elasticsearch_explore_logs.png" max-width="600px" caption="New Time Picker" >}} - -Please read [Using Elasticsearch in Grafana]({{< relref "../datasources/elasticsearch/#elasticsearch-version" >}}) for more detailed information on how to get started and use it. - -### InfluxDB logs support - -This release adds support for searching and visualizing logs stored in InfluxDB in the Explore mode. With a special -simplified query interface specifically designed for logs search. - -{{< figure src="/static/img/docs/v63/influxdb_explore_logs.png" max-width="600px" caption="New Time Picker" >}} - -Please read [Using InfluxDB in Grafana]({{< relref "../datasources/influxdb/#querying-logs-beta" >}}) for more detailed information on how to get started and use it. - -## Data Links - -We have simplified the UI for defining panel drilldown links (and renamed them to Panel links). We have also added a -new type of link named `Data link`. The reason to have two different types is to make it clear how they are used -and what variables you can use in the link. Panel links are only shown in the top left corner of -the panel and you cannot reference series name or any data field. - -While `Data links` are used by the actual visualization and can reference data fields. - -Example: - -```url -http://my-grafana.com/d/bPCI6VSZz/other-dashboard?var-server=${__series_name} -``` - -You have access to these variables: - -| Name | Description | -| ----------------------- | -------------------------------------------------------------------------- | -| _${\_\_series_name}_ | The name of the time series (or table) | -| _${\_\_value_time}_ | The time of the point your clicking on (in millisecond epoch) | -| _${\_\_url_time_range}_ | Interpolates as the full time range (i.e. from=21312323412&to=21312312312) | -| _${\_\_all_variables}_ | Adds all current variables (and current values) to the URL | - -You can then click on point in the Graph. - -{{< figure src="/static/img/docs/v63/graph_datalink.png" max-width="400px" caption="New Time Picker" >}} - -For now only the Graph panel supports `Data links` but we hope to add these to many visualizations. - -## New Time Picker - -The time picker has been re-designed and with a more basic design that makes accessing quick ranges more easy. - -{{< figure src="/static/img/docs/v63/time_picker.png" max-width="400px" caption="New Time Picker" >}} - -## Graph Gradients - -Want more eye candy in your graphs? Then the fill gradient option might be for you! Works really well for -graphs with only a single series. - -{{< figure src="/static/img/docs/v63/graph_gradient_area.jpeg" max-width="800px" caption="Graph Gradient Area" >}} - -Looks really nice in light theme as well. - -{{< figure src="/static/img/docs/v63/graph_gradients_white.png" max-width="800px" caption="Graph Gradient Area" >}} - -## Grafana Enterprise - -Substantial refactoring and improvements to the external auth systems has gone in to this release making the features -listed below possible as well as laying a foundation for future enhancements. - -### LDAP Active Sync - -This is a new Enterprise feature that enables background syncing of user information, org role and teams memberships. -This syncing is otherwise only done at login time. With this feature you can schedule how often this user synchronization should -occur. - -For example, lets say a user is removed from an LDAP group. In previous versions of Grafana an admin would have to -wait for the user to logout or the session to expire for the Grafana permissions to update, a process that can take days. - -With active sync the user would be automatically removed from the corresponding team in Grafana or even logged out and disabled if no longer -belonging to an LDAP group that gives them access to Grafana. - -[Read more](/auth/enhanced_ldap/#active-ldap-synchronization). - -### SAML Authentication - -Built-in support for SAML is now available in Grafana Enterprise. - -[See docs]({{< relref "../setup-grafana/configure-security/configure-authentication/saml/" >}}) - -### Team Sync for GitHub OAuth - -When setting up OAuth with GitHub it's now possible to sync GitHub teams with Teams in Grafana. - -[See docs]({{< relref "../setup-grafana/configure-security/configure-authentication/github/" >}}) - -### Team Sync for Auth Proxy - -We've added support for enriching the Auth Proxy headers with Teams information, which makes it possible -to use Team Sync with Auth Proxy. - -[See docs](/auth/auth-proxy/#auth-proxy-authentication). diff --git a/docs/sources/whatsnew/whats-new-in-v6-4.md b/docs/sources/whatsnew/whats-new-in-v6-4.md deleted file mode 100644 index acfb61b153c..00000000000 --- a/docs/sources/whatsnew/whats-new-in-v6-4.md +++ /dev/null @@ -1,152 +0,0 @@ ---- -_build: - list: false -aliases: - - /docs/grafana/latest/guides/whats-new-in-v6-4/ - - /docs/grafana/latest/whatsnew/whats-new-in-v6-4/ -description: Feature and improvement highlights for Grafana v6.4 -keywords: - - grafana - - new - - documentation - - '6.4' - - release notes -title: What's new in Grafana v6.4 -weight: -23 ---- - -# What's new in Grafana v6.4 - -For all details please read the full [CHANGELOG.md](https://github.com/grafana/grafana/blob/master/CHANGELOG.md). - -## Highlights - -Grafana 6.4 comes with a lot of new features and enhancements backed with tons of work around the data models and query execution that is going to enable powerful future capabilities. -Some of those new capabilities can already be seen in this release, like sharing query results between panels. - -- [**Explore:** Go back to dashboard (with query changes)]({{< relref "#go-back-to-dashboard-from-explore" >}}) -- [**Explore:** Live tailing improvements]({{< relref "#live-tailing-improvements" >}}) -- **Loki:** Show logs as annotations in dashboard graphs -- **Loki:** Use Loki in dashboard panels -- [**Panels:** New logs panel]({{< relref "#new-logs-panel" >}}) -- [**Panels:** Data links improvements]({{< relref "#data-links-improvements" >}}) -- [**Graph:** Series override to turn constant (point) into a line]({{< relref "#series-override-to turn-constant-into-a-line" >}}) -- [**Dashboard:** Share query results between panels]({{< relref "#share-query-results-between-panels" >}}) -- [**Plugins:** Alpha version of grafana-toolkit]({{< relref "#alpha-version-of-grafana-toolkit" >}}) -- [**Image Rendering:** PhantomJS deprecation]({{< relref "#phantomjs-deprecation" >}}) -- [**Docker:** Alpine based docker image]({{< relref "#alpine-based-docker-image" >}}) -- [**LDAP:** Debug UI]({{< relref "#ldap-debug-ui" >}}) -- [**Enterprise**: Reporting]({{< relref "#reporting" >}}) -- [**Enterprise**: GitLab OAuth Team Sync support]({{< relref "#gitlab-oauth-team-sync-support" >}}) -- [**Enterprise**: Teams and LDAP Improvements]({{< relref "#ldap-teams" >}}) - -### Go back to dashboard from Explore - -To help accelerate workflows that involve regularly switching from Explore to a dashboard and vice-versa, we've added the ability to return to the origin dashboard -after navigating to Explore from the panel's dropdown. - -{{< figure src="/static/img/docs/v60/explore_panel_menu.png" caption="Screenshot of the new Explore Icon" >}} - -After you've navigated to Explore, you should notice a "Back" button in the Explore toolbar. - - - -Simply clicking the button will return you to the origin dashboard, or, if you'd like to bring changes you make in Explore back to the dashboard, simply click -the arrow next to the button to reveal a "Return to panel with changes" menu item. - - - -### Live tailing improvements - -With 6.4 version you can now pause the live tail view to see the last 1000 lines of logs without being interrupted by new logs coming in. You can either pause manually with pause button or the live tailing will automatically pause when you scroll up to see older logs. To resume you just hit the resume button to continue live tailing. - -We also introduced some performance optimizations to allow live tailing of higher throughput log streams and various UI fixes and improvements like more consistent styling and fresh logs highlighting. - - - -### New Logs Panel - -The logs panel shows log lines from datasources that support logs, e.g., Elastic, Influx, and Loki. Typically you would use this panel next to a graph panel to display the log output of a related process. - - - -Limitations: Even though Live tailing can be enabled on logs panels in dashboards, we recommend using Live tailing in Explore. On dashboards, the refresher at the top of the page should be used instead to keep the data of all panels in sync. Note that the logs panel is still beta and we're looking to get feedback. - -## Data Links improvements - -With Grafana 6.3 we introduced a new way of creating [Data Links](https://grafana.com/blog/2019/08/27/new-in-grafana-6.3-easy-to-use-data-links/). -Grafana 6.4 improves Data Links and adds them to the Gauge and Bar Gauge and panels. - -With Data Links you can define dynamic links to other dashboards and systems. The link can now reference template variables and query results like series name and labels, field name, value and time. - -For more information about Data Links, refer to [data link](https://grafana.com/docs/features/panels/graph/#data-link) - -## Series override to turn constant into a line - -Some graph query results are made up only of one datapoint per series but can be shown in the graph panel with the help of [series overrides](/features/panels/graph/#series-overrides). -To show a horizontal line through the Y-value of the datapoint across the whole graph, add a series override and select `Transform > constant`. - - - -## Share query results between panels - -Grafana 6.4 continues the work started in 6.3 of creating a data model and query execution lifecycle that can support robust analytics and streaming. These changes are mostly structural and lay the foundation for powerful features in future releases. - -The first new feature all these changes have enabled is the ability to share query results between panels. So for example if you have an expensive query you can visualize the same results in a graph, table and singlestat panel. To reuse another panel’s query result select the data source named `-- Dashboard --` and then select the panel. - -To make the sharing of query results even more powerful we are introducing a transformation step as well that allows you to select specific parts of the query result and transform it. This new transformation feature is in [alpha](https://grafana.com/docs/administration/configuration/#enable-alpha) state and has to be enabled in the config file. - -DataFrame, our primary data model, has now a [columnar](https://en.wikipedia.org/wiki/Column-oriented_DBMS) layout. This -will support easier frontend processing. The DataSource query interface has been updated to better support streaming. -The result can now either return a `Promise` or `Observable`. Be on the lookout for more on live data -streaming in the future! - -## Alpha version of grafana-toolkit - -[grafana-toolkit](https://www.npmjs.com/package/@grafana/toolkit/v/6.4.0-beta.1) is our attempt to simplify the life of plugin developers. It’s a CLI that helps them focus on the core value of their plugin rather than the ceremony around setting up the environment, configs, tests and builds. It’s available as an NPM package under `next` tag. - -You can read more about the grafana-toolkit [in the Readme](https://github.com/grafana/grafana/blob/master/packages/grafana-toolkit/README.md) and play with it by trying out our [react panel](https://github.com/grafana/simple-react-panel) or [angular panel](https://github.com/grafana/simple-angular-panel) templates. - -## PhantomJS deprecation - -[PhantomJS](https://phantomjs.org/), which is used for rendering images of dashboards and panels, have been deprecated and will be removed in a future Grafana release. A deprecation warning will from now on be logged when Grafana starts up if PhantomJS is in use. - -Please consider migrating from PhantomJS to the [Grafana Image Renderer plugin](https://grafana.com/grafana/plugins/grafana-image-renderer). - -## Alpine-based Docker image - -Grafana’s Docker image is now based on Alpine 3.10 and should from now on report zero vulnerabilities when scanning the image for security vulnerabilities. - -## LDAP Debug UI - -After listening to customer feedback, we have been working at improving the experience to set up authentication and synchronization with LDAP. We're happy to present the new LDAP Debug View. - -You'll be able to see how a user authenticating with LDAP would be mapped and whether your LDAP integration is working correctly. Furthermore, it provides a simpler method to test your integration with LDAP server(s) and have a clear view of how attributes are mapped between both systems. - -The feature is currently limited to Grafana Server Admins. - -For more information on how to use this new feature, follow the [guide]({{< relref "../setup-grafana/configure-security/configure-authentication/ldap/#ldap-debug-view" >}}). - -## Grafana Enterprise - -### Reporting - -A common request from Enterprise users have been to be able to set up reporting for Grafana, and now it’s here. A report is simply a PDF of a Grafana dashboard, outside of just generating a PDF you can set up a schedule so that you can get the report emailed to yourself (or whoever is interested) whenever it suits you. - -This feature is currently limited to Organization Admins. - -{{< figure src="/static/img/docs/v64/reports.jpeg" max-width="500px" caption="Reporting" >}} - -### GitLab OAuth Team Sync support - -GitLab OAuth gets support for Team Sync, making it possible to synchronize your GitLab Groups with Teams in Grafana. - -[Read more about Team Sync](https://grafana.com/docs/auth/team-sync/). - -## Upgrading - -See [upgrade notes](/installation/upgrading/#upgrading-to-v6-4). - -## Changelog - -Check out the [CHANGELOG.md](https://github.com/grafana/grafana/blob/master/CHANGELOG.md) file for a complete list of new features, changes, and bug fixes. diff --git a/docs/sources/whatsnew/whats-new-in-v6-5.md b/docs/sources/whatsnew/whats-new-in-v6-5.md deleted file mode 100644 index 9405e60f4b1..00000000000 --- a/docs/sources/whatsnew/whats-new-in-v6-5.md +++ /dev/null @@ -1,216 +0,0 @@ ---- -_build: - list: false -aliases: - - /docs/grafana/latest/guides/whats-new-in-v6-5/ - - /docs/grafana/latest/whatsnew/whats-new-in-v6-5/ -description: Feature and improvement highlights for Grafana v6.5 -keywords: - - grafana - - new - - documentation - - '6.5' - - release notes -title: What's new in Grafana v6.5 -weight: -24 ---- - -# What's new in Grafana v6.5 - -For all details, read the full [CHANGELOG.md](https://github.com/grafana/grafana/blob/master/CHANGELOG.md). - -## Highlights - -Grafana 6.5 comes with a lot of new features and enhancements: - -- [**Docker:** Ubuntu-based images and more]({{< relref "#ubuntu-based-docker-images" >}}) -- [**CloudWatch:** Major rewrite and lots of enhancements]({{< relref "#cloudwatch-data-source-improvements" >}}) -- [**Templating:** Dynamic typeahead queries using $__searchFilter]({{< relref "#dynamic-typeahead-support-in-query-variables" >}}) -- [**Graphite:** Support for additional Metrictank functionality]({{< relref "#graphite-support-for-additional-metrictank-functionality" >}}) -- [**Explore:** New log row details view]({{< relref "#explore-logs-log-row-details" >}}) -- [**Explore:** Turn parts of log message into a link using derived fields]({{< relref "#loki-explore-derived-fields" >}}) -- [**Explore:** Time-sync of split views]({{< relref "#time-sync-of-split-views-in-explore" >}}) -- [**Explore**: Hover/tooltip support in graphs]({{< relref "#explore-metrics-graph-hover-tooltip" >}}) -- [**Azure Monitor**: Alerting support for Azure Application Insights]({{< relref "#alerting-support-for-azure-application-insights" >}}) -- [**Provisioning**: Allow saving of provisioned dashboards from UI]({{< relref "#allow-saving-of-provisioned-dashboards-from-ui" >}}) -- [**Auth Proxy:** Mix auth proxy with Grafana login token and session cookie]({{< relref "#mix-auth-proxy-with-grafana-login-token-and-session-cookie" >}}) -- [**OAuth:** Generic OAuth now supports role mapping]({{< relref "#generic-oauth-role-mapping" >}}) -- [**Image Rendering:** Quick update since Grafana 6.4]({{< relref "#image-renderer-plugin" >}}) - -### Ubuntu-based Docker images - -In Grafana [v6.4]({{< relref "whats-new-in-v6-4/#alpine-based-docker-image" >}}), we switched the Grafana Docker image from Ubuntu to Alpine. This change provides a more secure and lightweight Docker image. - -This change has received both negative and positive feedback as well as some bug reports. We learned that switching to an Alpine-based Docker image was a big breaking change for a lot of users. We should have more clearly highlighted this in blog post, release notes, changelog, and the [Docker Hub readme](https://hub.docker.com/r/grafana/grafana). - -We also broke the Docker images for ARM, but this is fixed in Grafana v6.5. - -Grafana Docker images should be as secure as possible by default and that’s why the Alpine-based Docker images will continue to be the Grafana default (`grafana/grafana:`). With that said, it’s good to give users options, and that’s why starting from Grafana v6.5, Ubuntu-based Docker images are also (`grafana/grafana:-ubuntu`) available. - -Read more about [Installing using Docker]({{< relref "../setup-grafana/installation/docker/" >}}). - -### CloudWatch data source improvements - -In this release, several feature improvements and additions were made in the CloudWatch data source. This work has been done in collaboration with the Amazon CloudWatch team. - -#### GetMetricData API - -For Grafana version 6.5 or higher, all API requests to GetMetricStatistics have been replaced with calls to GetMetricData, following Amazon’s [best practice to use the GetMetricData API](https://aws.amazon.com/premiumsupport/knowledge-center/cloudwatch-getmetricdata-api) instead of GetMetricStatistics, because data can be retrieved faster at scale with GetMetricData. This change provides better support for CloudWatch metric math and enables the use of automatic search expressions. - -While GetMetricStatistics qualified for the CloudWatch API free tier, this is not the case for GetMetricData calls. For more information, please refer to the [CloudWatch pricing page](https://aws.amazon.com/cloudwatch/pricing/). - -#### Dynamic queries using dimension wildcards - -In Grafana 6.5 or higher, you can monitor a dynamic list of metrics by using the asterisk (\*) wildcard for one or more dimension values. - -{{< figure src="/static/img/docs/v65/cloudwatch-dimension-wildcard.png" max-width="800px" class="docs-image--right" caption="CloudWatch dimension wildcard" >}} - -The example queries all metrics in the namespace `AWS/EC2` with a metric name of `CPUUtilization` and _any_ value for the `InstanceId` dimension. This can help you monitor metrics for AWS resources, like EC2 instances or containers. For example, when new instances get created as part of an auto scaling event, they automatically appear in the graph without you having to track new instance IDs. You can click `Show Query Preview` to see the search expression that is automatically built to support wildcards. To learn more about search expressions, visit the [CloudWatch documentation](https://docs.aws.amazon.com/AmazonCloudWatch/latest/monitoring/search-expression-syntax.html). - -By default, the search expression is defined in such a way that the queried metrics must match the defined dimension names exactly. This means that in the example it only returns metrics with exactly one dimension with name ‘InstanceId’. - -You can untoggle `Match Exact` to include metrics that have other dimensions defined. Turning off `Match Exact` also creates a search expression even if you don’t use wildcards. We simply search for any metric that match at least the namespace, metric name, and all defined dimensions. - -#### Deep linking from Grafana panels to the CloudWatch console - -{{< figure src="/static/img/docs/v65/cloudwatch-deep-linking.png" max-width="500px" class="docs-image--right" caption="CloudWatch deep linking" >}} - -Left-clicking a time series in the panel displays a context menu with a link to `View in CloudWatch console`. Clicking that link opens the CloudWatch console and displays all the metrics for that query. If you are not currently logged in to the CloudWatch console, then the link opens the login page. The link is valid for any account, but it only displays the right metrics if you are logged in to the account that corresponds to the selected data source in Grafana. - -This feature is not available for metrics based on math expressions. - -#### Improved feedback when throttling occurs - -If the [limit of the GetMetricData API](https://docs.aws.amazon.com/AmazonCloudWatch/latest/monitoring/cloudwatch_limits.html) is reached, either the transactions per second limit or the data points per second limit, then a throttling error will be returned by the CloudWatch API. Throttling limits are defined per account and region, so the alert modal indicates which data source got throttled in which region. A link to request a limit increase for the affected region is provided, but you will have to log in to the correct account. For example, for us-east-1, a limit increase can be requested on [AWS console](https://console.aws.amazon.com/servicequotas/home?region=us-east-1#!/services/monitoring/quotas/L-5E141212). - -#### Multi-value template variables now use search expressions - -When defining dimension values based on multi-valued template variables, we now use search expressions to query for the matching metrics. This enables the use of multiple template variables in one query and also allows you to use template variables for queries that have the `Match Exact` option disabled. - -Search expressions are currently limited to 1024 characters, so your query may fail if you have a long list of values. We recommend using the asterisk (\*) wildcard instead of the `All` option if you want to query all metrics that have any value for a certain dimension name. - -The use of multi-valued template variables is only supported for dimension values. Using multi-valued template variables for `Region`, `Namespace`, or `Metric Name` is not supported. - -#### Curated Dashboards - -The updated CloudWatch data source is shipped with pre-configured dashboards for five of the most popular AWS services: - -- Amazon Elastic Compute Cloud `Amazon EC2` -- Amazon Elastic Block Store `Amazon EBS` -- AWS Lambda `AWS Lambda` -- Amazon CloudWatch Logs `Amazon CloudWatch Logs` -- Amazon Relational Database Service `Amazon RDS` - -To import the pre-configured dashboards, go to the configuration page of your CloudWatch data source and click on the `Dashboards` tab. Click `Import` for the dashboard you would like to use. To customize the dashboard, we recommend to save the dashboard under a different name, because otherwise the dashboard will be overwritten when a new version of the dashboard is released. - -{{< figure src="/static/img/docs/v65/cloudwatch-dashboard-import.png" max-width="600px" caption="CloudWatch dashboard import" >}} - -### Dynamic typeahead support in query variables - -If you have a query variable that has many thousands of values it can be quite slow to search for a specific value in the dropdown. This is due to the fact that all that search filtering is happening in the browser. - -Using `__searchFilter` in the template variable query field you can filter the query results based on what the user types in the variable dropdown input. When nothing has been entered by the user the default value for `__searchFilter` is `*` , `.*` or `%` depending on data source and formatting option. - -The example below shows how to use `__searchFilter` as part of the query field to enable searching for `server` while the user types in the dropdown select box. - -Query - -```bash -apps.$app.servers.$__searchFilter -``` - -TagValues - -```bash -tag_values(server, server=~${__searchFilter:regex}) -``` - -This feature is currently only supported by [Graphite]({{< relref "../datasources/graphite/#using-searchfilter-to-filter-results-in-query-variable" >}}), [MySQL]({{< relref "../datasources/mysql/#using-searchfilter-to-filter-results-in-query-variable" >}}) and [Postgres]({{< relref "../datasources/postgres/#using-searchfilter-to-filter-results-in-query-variable" >}}) data sources. - -### Graphite: Support for additional Metrictank functionality - -The Graphite data source now has an option to enable extra functionality when using [Metrictank](https://grafana.com/oss/metrictank/) as a Graphite datastore. -In the Datasource configuration for Graphite, you can change the type to Metrictank. -Metrictank returns 2 kinds of additional metadata along its responses: - -- **Performance information:** Time spent querying index, fetching data, running processing functions, the number of series and points fetched, cache hits/misses, etc. This can be useful for optimizing queries or tuning the chunk cache. -- **Lineage information about the returned series:** Which archive was fetched from (raw or rollup), which (if any) runtime consolidation was applied (using which processing function), etc. This is very useful information for anyone trying to understand how their data was generated and why it may not look as expected. - -To see the metadata response from Metrictank you can inspect the response using the Query Inspector found in the panel queries tab. -Grafana 6.5 includes a new `Panel Inspector` in alpha/preview where you also can see the metadata response from Metrictank. -You can try it out by enabling a feature flag in the Grafana configuration file: - -```bash -[feature_toggles] -enable = inspect -``` - -{{< figure src="/static/img/docs/v65/panel-inspector.png" max-width="400px" caption="New Panel Inspector modal" >}} - -In Grafana 6.6, this will have a more user friendly display. In the future, additional Metrictank functionality will become available when the Graphite datasource option is set to the `Metrictank` type. - -### Explore/Metrics: Graph hover/tooltip - -We finally got around to implementing the series hover that shows values of the timeseries you hover over. This has been a requested feature ever since Explore was released. The graph component has been rewritten from scratch, making it more composable for future interactions with the graph data. - -{{< figure src="/static/img/docs/v65/explore_tooltip.png" max-width="500px" caption="Explore graph tooltip/hover" >}} - -### Explore/Logs: Log row details - -We have massively simplified the way we display both log row labels/fields as well as parsed fields by putting them into an extendable area in each row. - -So far labels had been squashed into their own column, making long label values difficult to read or interact with. Similarly, the parsed fields (available for logfmt and JSON structured logs) were too fiddly for mouse interaction. To solve this we took both and put them into a collapsed area below each row for more robust interaction. We have also added the ability to filter out labels, i.e., turn them into a negative filter on click (in addition to a positive filter). - -{{< figure src="/static/img/docs/v65/explore_log_details.gif" caption="Explore Log row details" >}} - -### Loki/Explore: Derived fields - -Derived fields allow any part of a log message to be turned into a link. Leaning on the concept of data links for graphs, we've extended the log result viewer in Explore to turn certain parsed fields into a link, based on a pattern to match. - -This allows you to turn an occurrence of e.g., `traceId=624f706351956b81` in your log line, into a link to your distributed tracing system to view that trace. The configuration for the patterns to match can be found in the datasource settings. - -This release starts with support for Loki, but we will bring this concept to other data sources soon. - -### Time-sync of split views in Explore - -In the Explore split view, you can now link the two timepickers so that if you change one, the other gets changed as well. This helps with keeping start and end times of the split view queries in sync and will ensure that you're looking at the same time interval in both split panes. - -{{< figure src="/static/img/docs/v65/explore_time_sync.gif" caption="Time-sync of split views in Explore" >}} - -### Alerting support for Azure Application Insights - -The [Azure Monitor]({{< relref "../datasources/azuremonitor/" >}}) data source supports multiple services in the Azure cloud. Before Grafana v6.5, only the Azure Monitor service had support for [Grafana Alerting]({{< relref "../alerting/" >}}). In Grafana 6.5, alerting support has been implemented for the [Application Insights service]({{< relref "../datasources/azuremonitor/#querying-the-application-insights-service" >}}). - -### Allow saving of provisioned dashboards from UI - -Historically it has been possible to make changes to a provisioned dashboard in the Grafana UI. However, it hasn't been possible to save the changes without manual intervention. In Grafana 6.5 we introduce a new dashboard provisioning setting named `allowUiUpdates`. If `allowUiUpdates` is set to `true` and you make changes to a provisioned dashboard, you can save the dashboard and the changes will be persisted to the Grafana database. - -Read more about this new feature in [Provisioning Grafana]({{< relref "../administration/provisioning/#making-changes-to-a-provisioned-dashboard" >}}). - -### Mix auth proxy with Grafana login token and session cookie - -With the new setting, `enable_login_token`, set to true Grafana will, after successful auth proxy header validation, assign the user a login token and cookie. You only have to configure your auth proxy to provide headers for the /login route. Requests via other routes will be authenticated using the cookie. - -Read more about this new feature in [Auth Proxy Authentication]({{< relref "../setup-grafana/configure-security/configure-authentication/auth-proxy/#login-token-and-session-cookie" >}}) - -### Generic OAuth role mapping - -Grafana 6.5 makes it possible to configure Generic OAuth to map a certain response from OAuth provider to a certain Grafana organization role, similar to the existing [LDAP Group Mappings]({{< relref "../setup-grafana/configure-security/configure-authentication/ldap/#group-mappings" >}}) feature. The new setting is named `role_attribute_path` and expects a [JMESPath](http://jmespath.org/) expression. - -Read more about this new feature in [Generic OAuth Authentication]({{< relref "../setup-grafana/configure-security/configure-authentication/generic-oauth/" >}}) and make sure to check out the [JMESPath examples]({{< relref "../setup-grafana/configure-security/configure-authentication/generic-oauth/#jmespath-examples" >}}). - -### Image renderer plugin - -Since we announced the deprecation of PhantomJS and the new [Image Renderer Plugin](https://grafana.com/grafana/plugins/grafana-image-renderer) in Grafana [6.4]({{< relref "whats-new-in-v6-4/#phantomjs-deprecation" >}}), we’ve received bug reports and valuable feedback. - -In Grafana 6.5 we’ve updated documentation to make it easier to understand how to install and troubleshoot possible problems. Read more about [Image Rendering]({{< relref "../setup-grafana/image-rendering/" >}}). - -Please try the [Image Renderer plugin](https://grafana.com/grafana/plugins/grafana-image-renderer) and let us know what you think. - -## Upgrading - -See [upgrade notes]({{< relref "../setup-grafana/upgrade-grafana/#upgrading-to-v6-5" >}}). - -## Changelog - -Check out [CHANGELOG.md](https://github.com/grafana/grafana/blob/master/CHANGELOG.md) for a complete list of new features, changes, and bug fixes. diff --git a/docs/sources/whatsnew/whats-new-in-v6-6.md b/docs/sources/whatsnew/whats-new-in-v6-6.md deleted file mode 100644 index 30a4d2fe85b..00000000000 --- a/docs/sources/whatsnew/whats-new-in-v6-6.md +++ /dev/null @@ -1,225 +0,0 @@ ---- -_build: - list: false -aliases: - - /docs/grafana/latest/guides/whats-new-in-v6-6/ - - /docs/grafana/latest/whatsnew/whats-new-in-v6-6/ -description: Feature and improvement highlights for Grafana v6.6 -keywords: - - grafana - - new - - documentation - - '6.6' - - release notes -title: What's new in Grafana v6.6 -weight: -25 ---- - -# What's new in Grafana v6.6 - -For all details, read the full [CHANGELOG.md](https://github.com/grafana/grafana/blob/master/CHANGELOG.md). - -## Highlights - -Grafana 6.6 comes with a lot of new features and enhancements: - -- [**Panels:** New stat panel]({{< relref "#new-stat-panel" >}}) -- [**Panels:** Auto min/max for Bar Gauge/Gauge/Stat]({{< relref "#auto-min-max" >}}) -- [**Panels:** News panel]({{< relref "#news-panel" >}}) -- [**Panels:** Custom data units]({{< relref "#custom-data-units" >}}) -- [**Panels:** Bar Gauge unfilled option]({{< relref "#bar-gauge-unfilled-option" >}}) -- [**TimePicker:** New design & features]({{< relref "#new-time-picker" >}}) -- [**Alerting enhancements**]({{< relref "#alerting-enhancements" >}}) -- [**Explore:** Added log message line wrapping options for logs]({{< relref "#explore-logs-panel-log-message-line-wrapping-options" >}}) -- [**Explore:** Column with unique log labels ]({{< relref "#explore-logs-panel-column-with-unique-log-labels" >}}) -- [**Explore:** Context tooltip]({{< relref "#explore-context-tooltip" >}}) -- **Explore:** Added ability to specify step with Prometheus queries -- **Graphite:** Added Metrictank dashboard to Graphite datasource -- **Loki:** Support for template variable queries -- **Postgres/MySQL/MSSQL:** Added support for region annotations -- [**Security:** Added disabled option for cookie sameSite attribute]({{< relref "#cookie-management-modifications" >}}) -- **TablePanel, GraphPanel:** Exclude hidden columns from CSV -- [**Enterprise:** White labeling]({{< relref "#enterprise-white-labeling" >}}) -- [**Enterprise:** APT and YUM repositories]({{< relref "#enterprise-apt-and-yum-repositories" >}}) -- [**Stackdriver:** Meta labels]({{< relref "#stackdriver-meta-labels" >}}) -- [**CloudWatch:** Calculate period based on time range]({{< relref "#cloudwatch-calculate-period-based-on-time-range" >}}) -- [**CloudWatch:** Display partial result in graph when max DP/call limit is reached]({{< relref "#cloudwatch-display-partial-result-in-graph-when-max-data-points-per-call-limit-is-reached" >}}) - -## New stat panel - -{{< figure src="/static/img/docs/v66/stat_panel_dark2.png" max-width="1024px" caption="Stat panel" >}} - -This release adds a new panel named `Stat`. This panel is designed to replace the current `Singlestat` as the primary way to show big single number panels along with a sparkline. This panel is of course building on our new panel infrastructure and option design. So, you can use the new threshold UI and data links. It also supports the same repeating feature as the Gauge and Bar Gauge panels, meaning it will repeat a separate visualization for every series or row -in the query result. - -Key features: - -- Automatic font size handling -- Automatic layout handling based on panel size -- Colors based on thresholds that adapt to light or dark theme -- Data links support -- Repeats horizontally or vertically for every series, row, or column - -Here is how it looks in light theme: - -{{< figure src="/static/img/docs/v66/stat_panel_light.png" max-width="1024px" caption="Stat panel" >}} - -## Auto min-max - -For the panels Gauge, Bar Gauge, and Stat, you can now leave the min and max settings empty. Grafana will, in that case, calculate the min and max based on all the data. - -## News panel - -This panel shows RSS feeds as news items in the default home dashboard for v6.6. Add it to your custom home dashboards to keep up-to-date with Grafana news, or switch the default RSS feed to one of your choice. - -{{< figure src="/static/img/docs/v66/news_panel.png" max-width="600px" caption="News panel" >}} - -## Custom data units - -A top feature request for years is now finally here. All panels now support custom units. Type any text in the unit picker and select the `Custom: ` option. By default, the text will be used as a suffix unit. If you want a custom prefix, then type `prefix: ` to make the custom unit appear before the value. If you want a custom SI unit (with auto SI suffixes) specify `si:Ups`. A value like 1000 will be rendered as `1 kUps`. - -{{< figure src="/static/img/docs/v66/custom_unit_burger1.png" max-width="600px" caption="Custom unit" >}} - -You can also paste a native emoji in the unit picker and pick it as a custom unit: - -{{< figure src="/static/img/docs/v66/custom_unit_burger2.png" max-width="600px" caption="Custom unit emoji" >}} - -## Bar Gauge unfilled option - -The Bar Gauge visualization has a new display option: `Unfilled`. This new option is enabled by default, so it will change how this visualization is displayed on old dashboards. If you prefer the old default -- in which an unfilled area is not shown, and the value follows directly after -- you have to update the visualization settings. -{{< figure src="/static/img/docs/v66/bar_gauge_unfilled.png" max-width="900px" caption="Bar gauge unfilled" >}} - -## New time picker - -The time picker has gotten a major design update. Key changes: - -- Quickly access the absolute from and to input fields without an extra click. -- Calendar automatically shows when from or to inputs have focus. -- A single calendar view can be used to select and show the from and to date. -- You can now select recent absolute ranges. - -{{< figure src="/static/img/docs/v66/time_picker_update.png" max-width="700px" caption="New time picker" >}} - -## Alerting enhancements - -- We have introduced a new configuration for enforcing a minimal interval between evaluations to reduce load on the backend. -- The email notifier can now optionally send a single email to all recipients. -- OpsGenie, PagerDuty, Threema, and Google Chat notifiers have been updated to send additional information. - -## Cookie management modifications - -In order to align with a [change in Chrome 80](https://www.chromestatus.com/feature/5088147346030592), a breaking change has been introduced to Grafana's [`cookie_samesite` setting]({{< relref "../setup-grafana/configure-grafana/#cookie-samesite" >}}). Grafana now properly renders cookies with the `SameSite=None` attribute when this setting is `none`. The previous behavior of `none` was to omit the `SameSite` attribute from cookies. Grafana will use the previous behavior when `cookie_samesite` is set to `disabled`. - -Read more about this in the [upgrade notes]({{< relref "../setup-grafana/upgrade-grafana/#important-changes-regarding-samesite-cookie-attribute" >}}). - -## Explore/Logs Panel: Log message line wrapping options - -We introduced the wrap-lines option for logs because as for some of our users feel it's more efficient to see one line per log message. The wrapped-line option is set as a default; the unwrapped setting results in horizontal scrolling. - -{{< figure src="/static/img/docs/v66/explore_wrap_lines.gif" max-width="600px" caption="Log message line wrapping" >}} - -## Explore/Logs Panel: Column with unique log labels - -After feedback from our community, we have decided to reintroduce a labels column. However, for better readability and usefulness, we have transformed it into a Unique labels column which includes only non-common labels. All common labels are displayed above. - -{{< figure src="/static/img/docs/v66/explore_labels_column.png" max-width="600px" caption="Unique log labels column" >}} - -## Explore: Context tooltip - -Isolating a series from a big set of lines in a graph is important for drill-downs. That's why we have implemented the context tooltip in Explore, which allows you to copy data and labels from it to further refine the query. - -{{< figure src="/static/img/docs/v66/explore_context_tooltip.png" max-width="600px" caption="Explore context tooltip" >}} - -## Enterprise: White labeling - -This release adds new white labeling options to the grafana.ini file (can also be set via ENV variables). - -```bash -[white_labeling] -# Set to complete URL to override login logo -login_logo = https://my.logo.url/images/logo.png - -# Set to complete css background expression to override login background -login_background = url(http://www.bhmpics.com/wallpapers/starfield-1920x1080.jpg) - -# Set to complete URL to override menu logo -menu_logo = https://my.logo.url/images/logo_icon.png - -# Set to complete URL to override fav icon (icon shown in browser tab) -fav_icon = https://my.logo.url/images/logo_icon_32px.png - -# Set to complete URL to override apple/ios icon -apple_touch_icon = https://my.logo.url/images/logo_icon_32px.png - -# Below is an example for how to replace the default footer & help links with 2 custom links -footer_links = support guides -footer_links_support_text = Support -footer_links_support_url = http://your.support.site -footer_links_guides_text = Guides -footer_links_guides_url = http://your.guides.site -``` - -Customize the login page, side menu bar, and footer links. - -{{< figure src="/static/img/docs/v66/whitelabeling_1.png" max-width="700px" caption="White labeling example" >}} - -## Enterprise APT and YUM repositories - -Now you can install the enterprise edition from the APT and YUM repository. The following table shows the APT repository for each Grafana version (for instructions read the [installation notes]({{< relref "../setup-grafana/installation/debian/#install-from-apt-repository" >}})) : - -| Grafana Version | Package | Repository | -| ------------------------- | ------------------ | --------------------------------------------------------- | -| Grafana OSS | grafana | `https://packages.grafana.com/oss/deb stable main` | -| Grafana OSS (Beta) | grafana | `https://packages.grafana.com/oss/deb beta main` | -| Grafana Enterprise | grafana-enterprise | `https://packages.grafana.com/enterprise/deb stable main` | -| Grafana Enterprise (Beta) | grafana-enterprise | `https://packages.grafana.com/enterprise/deb beta main` | - -The following table shows the YUM repositories for each Grafana version (for instructions read the [installation notes]({{< relref "../setup-grafana/installation/rpm/#install-from-yum-repository" >}})) : - -| Grafana Version | Package | Repository | -| ------------------------- | ------------------ | -------------------------------------------------- | -| Grafana OSS | grafana | `https://packages.grafana.com/oss/rpm` | -| Grafana OSS (Beta) | grafana | `https://packages.grafana.com/oss/rpm-beta` | -| Grafana Enterprise | grafana-enterprise | `https://packages.grafana.com/enterprise/rpm` | -| Grafana Enterprise (Beta) | grafana-enterprise | `https://packages.grafana.com/enterprise/rpm-beta` | - -We recommend all users to install the Enterprise Edition of Grafana, which can be seamlessly upgraded with a Grafana Enterprise [subscription](https://grafana.com/products/enterprise/?utm_source=grafana-install-page). - -## Stackdriver: Meta labels - -From now on it will be possible to utilize meta data label in "group bys", filters and in the alias field. Unfortunately, there's no API to retrieve all the labels, but the group by field dropdown comes with a pre-defined list of common system labels. User labels cannot be pre-defined, but it's possible to enter them manually in the group by field. If a meta data label, user label or system label, is included in the group by segment, it will be possible to create filters based on it and to expand its value on the alias field. - -{{< figure src="/static/img/docs/v66/metadatalabels.gif" max-width="800px" caption="Stackdriver meta labels" >}} - -## CloudWatch: Calculate period based on time range - -When the period field was left blank in Grafana 6.5, it would default to 60 seconds. In case users issued queries with a large time span, there was a high risk that they would reach the 100,800 data points per request limit in the Get Metric Data (GMD) API. When the period field is left blank in Grafana 6.6, the period will be calculated automatically based on the time range. The formula that is used is `time range in seconds / 2000`, and then we snap to next higher value in an array of pre-defined periods `[60, 300, 900, 3600, 21600, 86400]`. This will reduce the risk for receiving a `Too many datapoints requested` error in the panel. - -## CloudWatch: Display partial result in graph when max data points per call limit is reached - -In case all queries in a GMD call are metric stat (not using math expressions), Grafana will paginate the response until all data points are received. But pagination is not supported in case a math expression is being used, so in that case it's not possible to receive more than 100,800 data points. Previously when that limit was reached, we only displayed an error message. In Grafana 6.6, we also display the 100,800 data points that were received in the graph. - -## Upgrading - -See [upgrade notes]({{< relref "../setup-grafana/upgrade-grafana/#upgrading-to-v6-6" >}}). - -## Changelog - -Check out [CHANGELOG.md](https://github.com/grafana/grafana/blob/master/CHANGELOG.md) for a complete list of new features, changes, and bug fixes. - -## Notice about upcoming changes in backendSrv for plugin authors - -In our mission to migrate away from AngularJS to React we have removed all AngularJS dependencies in the core data retrieval service `backendSrv`. This change is already in master and will be introduced in the next `major` Grafana release. - -Removing the AngularJS dependencies in `backendSrv` has the unfortunate side effect of AngularJS digest no longer being triggered for any request made with `backendSrv`. Because of this, external plugins using `backendSrv` directly may suffer from strange behaviour in the UI. - -To remedy this issue as a plugin author you need to trigger the digest after a direct call to `backendSrv`. - -Example: - -```js -backendSrv.get(‘http://your.url/api’).then(result => { - this.result = result; - this.$scope.$digest(); -}); -``` diff --git a/docs/sources/whatsnew/whats-new-in-v6-7.md b/docs/sources/whatsnew/whats-new-in-v6-7.md deleted file mode 100644 index b77e9f418f7..00000000000 --- a/docs/sources/whatsnew/whats-new-in-v6-7.md +++ /dev/null @@ -1,105 +0,0 @@ ---- -_build: - list: false -aliases: - - /docs/grafana/latest/guides/whats-new-in-v6-7/ - - /docs/grafana/latest/whatsnew/whats-new-in-v6-7/ -description: Feature and improvement highlights for Grafana v6.7 -keywords: - - grafana - - new - - documentation - - '6.7' - - release notes -title: What's New in Grafana v6.7 -weight: -26 ---- - -# What's new in Grafana v6.7 - -This topic includes the release notes for the Grafana v6.7. For all details, read the full [CHANGELOG.md](https://github.com/grafana/grafana/blob/master/CHANGELOG.md). - -Grafana 6.7 comes with a lot of new features and enhancements: - -- [**Dashboard:** Enforce minimum refresh interval]({{< relref "#enforce-minimum-dashboard-refresh-interval" >}}) -- **Data source:** Google Sheets data source -- [**Explore:** Query history]({{< relref "#query-history" >}}) -- [**Authorization:** Azure OAuth]({{< relref "#azure-oauth" >}}) -- [**Stackdriver:** Project Selector]({{< relref "#stackdriver-project-selector" >}}) -- [**Enterprise:** White Labeling for application title]({{< relref "#white-labeling-for-application-title" >}}) -- [**Enterprise:** Reporting configuration for timeout and concurrency]({{< relref "#reporting-configuration-for-timeout-and-concurrency" >}}) -- [**Enterprise:** Export dashboard as pdf]({{< relref "#export-dashboard-as-pdf" >}}) -- [**Enterprise:** Report landscape mode]({{< relref "#report-landscape-mode" >}}) -- [**Enterprise:** Azure OAuth Team Sync support]({{< relref "#azure-oauth-team-sync-support" >}}) - -## General features - -General features are included in all Grafana editions. - -### Query history - -> BETA: Query history is a beta feature. It is local to your browser and is not shared with others. - -Query history is a new feature that lets you view and interact with the queries that you have previously run in Explore. You can add queries to the Explore query editor, write comments, create and share URL links, star your favorite queries, and much more. Starred queries are displayed in Starred tab, so it is easier to reuse queries that you run often without typing them from scratch. - -Learn more about query history in [Explore]({{< relref "../explore/" >}}). - -{{< figure src="/static/img/docs/v67/rich-history.gif" max-width="1024px" caption="Query history" >}} - -### Azure OAuth - -Grafana v6.7 comes with a new OAuth integration for Microsoft Azure Active Directory. You can now assign users and groups to Grafana roles from the Azure Portal. Learn how to enable and configure it in [Azure AD OAuth2 authentication]({{< relref "../setup-grafana/configure-security/configure-authentication/azuread/" >}}). - -### Enforce minimum dashboard refresh interval - -Allowing a low dashboard refresh interval can cause severe load on data sources and Grafana. Grafana v6.7 allows you to restrict the dashboard refresh interval so it cannot be set lower than a given interval. This provides a way for administrators to control dashboard refresh behavior on a global level. - -Refer to min_refresh_interval in [Configuration]({{< relref "../setup-grafana/configure-grafana/#min-refresh-interval" >}}) for more information and how to enable this feature. - -### Stackdriver project selector - -A Stackdriver data source in Grafana is configured for one service account only. That service account is always associated with a default project in Google Cloud Platform (GCP). Depending on your setup in GCP, the service account might be granted access to more projects than just the default project. - -In Grafana 6.7, the query editor has been enhanced with a project selector that makes it possible to query different projects without changing datasource. Many thanks [Eraac](https://github.com/Eraac), [eliaslaouiti](https://github.com/eliaslaouiti), and [NaurisSadovskis](https://github.com/NaurisSadovskis) for making this happen! - -## Grafana Enterprise features - -General features are included in the Grafana Enterprise edition software. - -### White labeling customizes application title - -This release adds a new white labeling option to customize the application title. Learn how to configure it in [White labeling]({{< relref "../setup-grafana/enable-custom-branding/" >}}). - -``` -[white_labeling] -# Set to your company name to override application title -app_title = Your Company -``` - -### Configure reporting for timeout and concurrency - -This release adds more configuration for the reporting feature rendering requests. You can set the panel rendering request timeout and the maximum number of concurrent calls to the rendering service in your configuration. Learn how to do it in [Reporting]({{< relref "../enterprise/reporting/" >}}). - -``` -[reporting] -# Set timeout for each panel rendering request -rendering_timeout = 10s -# Set maximum number of concurrent calls to the rendering service -concurrent_render_limit = 10 -``` - -### Export dashboard as PDF - -This feature allows you to export a dashboard as a PDF document. All dashboard panels will be rendered as images and added into the PDF document. Learn more in [Export dashboard as PDF]({{< relref "../enterprise/export-pdf/" >}}). - -### Report landscape mode - -You can now use either portrait or landscape mode in your reports. Portrait will render three panels per page and landscape two. -{{< figure src="/static/img/docs/enterprise/reports_create_new.png" max-width="1024px" caption="New report" >}} - -[Reporting]({{< relref "../enterprise/reporting/" >}}) has been updated as a result of this change. - -### Azure OAuth Team Sync support - -When setting up OAuth with Microsoft Azure AD, you can now sync Azure groups with Teams in Grafana. -Learn more in [Team sync]({{< relref "../setup-grafana/configure-security/configure-team-sync/" >}}). From 433bb735de34ee243509f781aebb0530ffc68fdd Mon Sep 17 00:00:00 2001 From: Christopher Moyer <35463610+chri2547@users.noreply.github.com> Date: Fri, 15 Jul 2022 17:02:16 -0500 Subject: [PATCH 019/116] Docs: refactors configure standard options (#52278) * refactors configure standard options * makes prettier --- .../sources/dashboards/add-organize-panels.md | 8 +- docs/sources/datasources/mssql.md | 4 +- docs/sources/datasources/mysql.md | 4 +- docs/sources/datasources/postgres.md | 4 +- .../index.md} | 143 ++++++++++++------ .../apply-color-to-series.md | 34 ----- .../format-standard-fields.md | 27 ---- docs/sources/setup-grafana/upgrade-grafana.md | 2 +- docs/sources/visualizations/bar-chart.md | 2 +- docs/sources/visualizations/histogram.md | 2 +- docs/sources/visualizations/state-timeline.md | 2 +- docs/sources/visualizations/status-history.md | 2 +- .../time-series/change-axis-display.md | 2 +- .../time-series/graph-color-scheme.md | 2 +- .../time-series/graph-time-series-as-bars.md | 4 +- .../time-series/graph-time-series-as-lines.md | 6 +- 16 files changed, 113 insertions(+), 135 deletions(-) rename docs/sources/panels/{standard-field-definitions.md => configure-standard-options/index.md} (53%) delete mode 100644 docs/sources/panels/working-with-panels/apply-color-to-series.md delete mode 100644 docs/sources/panels/working-with-panels/format-standard-fields.md diff --git a/docs/sources/dashboards/add-organize-panels.md b/docs/sources/dashboards/add-organize-panels.md index 52854224f1c..b48598903fd 100644 --- a/docs/sources/dashboards/add-organize-panels.md +++ b/docs/sources/dashboards/add-organize-panels.md @@ -35,11 +35,7 @@ This section describes the areas of the Grafana panel editor. - **Transform tab -** Apply data transformations. For more information, refer to [Transform data]({{< relref "../panels/transform-data/" >}}). - **Alert tab -** Write alert rules. For more information, refer to [Overview of Grafana 8 alerting]({{< relref "../alerting/" >}}). -1. Panel display options: The display options section contains tabs where you configure almost every aspect of your data visualization, including: - - - [Apply color to series and fields]({{< relref "../panels/working-with-panels/apply-color-to-series/" >}}) - - [Format a standard field]({{< relref "../panels/working-with-panels/format-standard-fields/" >}}) - - [Add a title and description to a panel]({{< relref "../panels/working-with-panels/add-title-and-description/" >}}) +1. Panel display options: The display options section contains tabs where you configure almost every aspect of your data visualization. > Not all options are available for each visualization. @@ -101,7 +97,7 @@ Dashboards and panels allow you to show your data in visual form. Each panel nee - [Visualization-specific options]({{< relref "../visualizations/" >}}) - [Override field values]({{< relref "../panels/override-field-values/about-field-overrides/" >}}) - [Configure thresholds]({{< relref "../panels/configure-thresholds/" >}}) - - [Apply color to series and fields]({{< relref "../panels/working-with-panels/apply-color-to-series/" >}}) + - [Configure standard options]({{< relref "../panels/configure-standard-options/" >}}) 1. Add a note to describe the visualization (or describe your changes) and then click **Save** in the upper-right corner of the page. diff --git a/docs/sources/datasources/mssql.md b/docs/sources/datasources/mssql.md index 9a41a65ac80..f6dd2725d8c 100644 --- a/docs/sources/datasources/mssql.md +++ b/docs/sources/datasources/mssql.md @@ -182,7 +182,7 @@ A time series query result is returned in a [wide data frame format]({{< relref > For backward compatibility, there's an exception to the above rule for queries that return three columns including a string column named metric. Instead of transforming the metric column into field labels, it becomes the field name, and then the series name is formatted as the value of the metric column. See the example with the metric column below. -To optionally customize the default series name formatting, refer to [Standard field definitions]({{< relref "../panels/standard-field-definitions/#display-name" >}}). +To optionally customize the default series name formatting, refer to [Standard options definitions]({{< relref "../panels/configure-standard-options/#display-name" >}}). **Example with `metric` column:** @@ -226,7 +226,7 @@ GROUP BY ORDER BY 1 ``` -Given the data frame result in the following example and using the graph panel, you will get two series named _value 10.0.1.1_ and _value 10.0.1.2_. To render the series with a name of _10.0.1.1_ and _10.0.1.2_ , use a [Standard field definition]({{< relref "../panels/standard-field-definitions/#display-name" >}}) display name value of `${__field.labels.hostname}`. +Given the data frame result in the following example and using the graph panel, you will get two series named _value 10.0.1.1_ and _value 10.0.1.2_. To render the series with a name of _10.0.1.1_ and _10.0.1.2_ , use a [Standard options definitions]({{< relref "../panels/configure-standard-options/#display-name" >}}) display name value of `${__field.labels.hostname}`. Data frame result: diff --git a/docs/sources/datasources/mysql.md b/docs/sources/datasources/mysql.md index 41967f9f1d3..d712f8a9ebe 100644 --- a/docs/sources/datasources/mysql.md +++ b/docs/sources/datasources/mysql.md @@ -191,7 +191,7 @@ A time series query result is returned in a [wide data frame format]({{< relref > For backward compatibility, there's an exception to the above rule for queries that return three columns including a string column named metric. Instead of transforming the metric column into field labels, it becomes the field name, and then the series name is formatted as the value of the metric column. See the example with the metric column below. -To optionally customize the default series name formatting, refer to [Standard field definitions]({{< relref "../panels/standard-field-definitions/#display-name" >}}). +To optionally customize the default series name formatting, refer to [Standard options definitions]({{< relref "../panels/configure-standard-options/#display-name" >}}). **Example with `metric` column:** @@ -233,7 +233,7 @@ GROUP BY time, hostname ORDER BY time ``` -Given the data frame result in the following example and using the graph panel, you will get two series named _value 10.0.1.1_ and _value 10.0.1.2_. To render the series with a name of _10.0.1.1_ and _10.0.1.2_ , use a [Standard field definition]({{< relref "../panels/standard-field-definitions/#display-name" >}}) display value of `${__field.labels.hostname}`. +Given the data frame result in the following example and using the graph panel, you will get two series named _value 10.0.1.1_ and _value 10.0.1.2_. To render the series with a name of _10.0.1.1_ and _10.0.1.2_ , use a [[Standard options definitions]({{< relref "../panels/configure-standard-options/#display-name" >}}) display value of `${__field.labels.hostname}`. Data frame result: diff --git a/docs/sources/datasources/postgres.md b/docs/sources/datasources/postgres.md index 1f3a7881bc1..c2d5d3e6349 100644 --- a/docs/sources/datasources/postgres.md +++ b/docs/sources/datasources/postgres.md @@ -196,7 +196,7 @@ A time series query result is returned in a [wide data frame format]({{< relref > For backward compatibility, there's an exception to the above rule for queries that return three columns including a string column named metric. Instead of transforming the metric column into field labels, it becomes the field name, and then the series name is formatted as the value of the metric column. See the example with the metric column below. -To optionally customize the default series name formatting, refer to [Standard field definitions]({{< relref "../panels/standard-field-definitions/#display-name" >}}). +To optionally customize the default series name formatting, refer to [Standard options definitions]({{< relref "../panels/configure-standard-options/#display-name" >}}). **Example with `metric` column:** @@ -238,7 +238,7 @@ GROUP BY time, hostname ORDER BY time ``` -Given the data frame result in the following example and using the graph panel, you will get two series named _value 10.0.1.1_ and _value 10.0.1.2_. To render the series with a name of _10.0.1.1_ and _10.0.1.2_ , use a [Standard field definition]({{< relref "../panels/standard-field-definitions/#display-name" >}}) display value of `${__field.labels.hostname}`. +Given the data frame result in the following example and using the graph panel, you will get two series named _value 10.0.1.1_ and _value 10.0.1.2_. To render the series with a name of _10.0.1.1_ and _10.0.1.2_ , use a [Standard options definitions]({{< relref "../panels/configure-standard-options/#display-name" >}}) display value of `${__field.labels.hostname}`. Data frame result: diff --git a/docs/sources/panels/standard-field-definitions.md b/docs/sources/panels/configure-standard-options/index.md similarity index 53% rename from docs/sources/panels/standard-field-definitions.md rename to docs/sources/panels/configure-standard-options/index.md index 3128a6edfe8..e523a28e6fb 100644 --- a/docs/sources/panels/standard-field-definitions.md +++ b/docs/sources/panels/configure-standard-options/index.md @@ -1,15 +1,39 @@ --- aliases: + - /docs/grafana/latest/panels/working-with-panels/format-standard-fields/ - /docs/grafana/latest/panels/reference-standard-field-definitions/ - /docs/grafana/latest/panels/standard-field-definitions/ - - /docs/sources/panels/reference-standard-field-definitions/ -title: Standard field definitions -weight: 1200 +title: Configure standard options +menuTitle: Configure standard options +weight: 40 +keywords: + - xxx + - xxx --- -# Reference: Standard field definitions +# Configure standard options -This section explains all available standard options. They are listed in alphabetical order. +The data model used in Grafana, namely the [data frame]({{< relref "../../developers/plugins/data-frames/" >}}), is a columnar-oriented table structure that unifies both time series and table query results. Each column within this structure is called a _field_. A field can represent a single time series or table column. + +Field options allow you to change how the data is displayed in your visualizations. Options and overrides that you apply do not change the data, they change how Grafana displays the data. When you change an option, it is applied to all fields, meaning all series or columns. For example, if you change the unit to percentage, then all fields with numeric values are displayed in percentages. + +For a complete list of field formatting options, refer to [Standard options definitions]({{< relref "#standard-options-definitions" >}}). + +> You can apply standard options to most built-in Grafana panels. Some older panels and community panels that have not updated to the new panel and data model will be missing either all or some of these field options. + +1. Open a dashboard, click the panel title, and click **Edit**. + +1. In the panel display options pane, locate the **Standard options** section. + +1. Select the standard options you want to apply. + + For more information about standard options, refer to [Standard options definitions]({{< relref "#standard-options-definitions" >}}). + +1. To preview your change, click outside of the field option box you are editing or press **Enter**. + +## Standard options definitions + +This section explains all available standard options. You can apply standard options to most built-in Grafana panels. Some older panels and community panels that have not updated to the new panel and data model will be missing either all or some of these field options. @@ -17,53 +41,11 @@ Most field options will not affect the visualization until you click outside of > **Note:** We are constantly working to add and expand options for all visualization, so all options might not be available for all visualizations. -## Decimals - -Number of decimals to render value with. Leave empty for Grafana to use the number of decimals provided by the data source. - -To change this setting, type a number in the field and then click outside the field or press Enter. - -## Data links - -Lets you control the URL to which a value or visualization link. - -For more information and instructions, refer to [Data links]({{< relref "./configure-data-links/#data-links" >}}). - -## Display name - -Lets you set the display title of all fields. You can use [variables]({{< relref "../variables/" >}}) in the field title. - -When multiple stats, fields, or series are shown, this field controls the title in each stat. You can use expressions like `${__field.name}` to use only the series name or the field name in title. - -Given a field with a name of Temp, and labels of {"Loc"="PBI", "Sensor"="3"} - -| Expression syntax | Example | Renders to | Explanation | -| ---------------------------- | ----------------------- | ------------------------------ | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ | -| `${__field.displayName}` | Same as syntax | `Temp {Loc="PBI", Sensor="3"}` | Displays the field name, and labels in `{}` if they are present. If there is only one label key in the response, then for the label portion, Grafana displays the value of the label without the enclosing braces. | -| `${__field.name}` | Same as syntax | `Temp` | Displays the name of the field (without labels). | -| `${__field.labels}` | Same as syntax | `Loc="PBI", Sensor="3"` | Displays the labels without the name. | -| `${__field.labels.X}` | `${__field.labels.Loc}` | `PBI` | Displays the value of the specified label key. | -| `${__field.labels.__values}` | Same as Syntax | `PBI, 3` | Displays the values of the labels separated by a comma (without label keys). | - -If the value is an empty string after rendering the expression for a particular field, then the default display method is used. - -## Max - -Lets you set the maximum value used in percentage threshold calculations. Leave blank for auto calculation based on all series and fields - -## Min - -Lets you set the minimum value used in percentage threshold calculations. Leave blank for auto calculation based on all series and fields - -## No value - -Enter what Grafana should display if the field value is empty or null. The default value is a hyphen (-). - -## Unit +### Unit Lets you choose what unit a field should use. Click in the **Unit** field, then drill down until you find the unit you want. The unit you select is applied to all fields except time. -### Custom units +#### Custom units You can use the unit dropdown to also specify custom units, custom prefix or suffix and date time formats. @@ -81,6 +63,67 @@ You can also paste a native emoji in the unit picker and pick it as a custom uni {{< figure src="/static/img/docs/v66/custom_unit_burger2.png" max-width="600px" caption="Custom unit emoji" >}} -### String units +#### String units Grafana can sometime be too aggressive in parsing strings and displaying them as numbers. To make Grafana show the original string create a field override and add a unit property with the `string` unit. + +### Min + +Lets you set the minimum value used in percentage threshold calculations. Leave blank for auto calculation based on all series and fields. + +### Max + +Lets you set the maximum value used in percentage threshold calculations. Leave blank for auto calculation based on all series and fields. + +### Decimals + +Number of decimals to render value with. Leave empty for Grafana to use the number of decimals provided by the data source. + +To change this setting, type a number in the field and then click outside the field or press Enter. + +### Display name + +Lets you set the display title of all fields. You can use [variables]({{< relref "../../variables/" >}}) in the field title. + +When multiple stats, fields, or series are shown, this field controls the title in each stat. You can use expressions like `${__field.name}` to use only the series name or the field name in title. + +Given a field with a name of Temp, and labels of {"Loc"="PBI", "Sensor"="3"} + +| Expression syntax | Example | Renders to | Explanation | +| ---------------------------- | ----------------------- | ------------------------------ | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ | +| `${__field.displayName}` | Same as syntax | `Temp {Loc="PBI", Sensor="3"}` | Displays the field name, and labels in `{}` if they are present. If there is only one label key in the response, then for the label portion, Grafana displays the value of the label without the enclosing braces. | +| `${__field.name}` | Same as syntax | `Temp` | Displays the name of the field (without labels). | +| `${__field.labels}` | Same as syntax | `Loc="PBI", Sensor="3"` | Displays the labels without the name. | +| `${__field.labels.X}` | `${__field.labels.Loc}` | `PBI` | Displays the value of the specified label key. | +| `${__field.labels.__values}` | Same as Syntax | `PBI, 3` | Displays the values of the labels separated by a comma (without label keys). | + +If the value is an empty string after rendering the expression for a particular field, then the default display method is used. + +### Color scheme + +The color options and their effect on the visualization depends on the visualization you are working with. Some visualizations have different color options. + +You can specify a single color, or select a continuous (gradient) color schemes, based on a value. +Continuous color interpolates a color using the percentage of a value relative to min and max. + +Select one of the following palettes: + +
+ +| Color mode | Description | +| ------------------------------- | -------------------------------------------------------------------------------------------------------------------------------------------------------- | +| **Single color** | Specify a single color, useful in an override rule | +| **From thresholds** | Informs Grafana to take the color from the matching threshold | +| **Classic palette** | Grafana will assign color by looking up a color in a palette by series index. Useful for Graphs and pie charts and other categorical data visualizations | +| **Green-Yellow-Red (by value)** | Continuous color scheme | +| **Blue-Yellow-Red (by value)** | Continuous color scheme | +| **Blues (by value)** | Continuous color scheme (panel background to blue) | +| **Reds (by value)** | Continuous color scheme (panel background color to blue) | +| **Greens (by value)** | Continuous color scheme (panel background color to blue) | +| **Purple (by value)** | Continuous color scheme (panel background color to blue) | + +{{< figure src="/static/img/docs/v73/color_scheme_dropdown.png" max-width="350px" caption="Color scheme" >}} + +### No value + +Enter what Grafana should display if the field value is empty or null. The default value is a hyphen (-). diff --git a/docs/sources/panels/working-with-panels/apply-color-to-series.md b/docs/sources/panels/working-with-panels/apply-color-to-series.md deleted file mode 100644 index 26e30345905..00000000000 --- a/docs/sources/panels/working-with-panels/apply-color-to-series.md +++ /dev/null @@ -1,34 +0,0 @@ ---- -aliases: - - /docs/grafana/latest/panels/working-with-panels/apply-color-to-series/ - - /docs/sources/panels/working-with-panels/apply-color-to-series/ -title: Apply color to series and fields -weight: 50 ---- - -# Apply color to series and fields - -In addition to specifying color based on thresholds, you can configure the color of series and field data. The color options and their effect on the visualization depends on the visualization you are working with. Some visualizations have different color options. - -You can specify a single color, or select a continuous (gradient) color schemes, based on a value. -Continuous color interpolates a color using the percentage of a value relative to min and max. - -1. In panel display options, scroll to the **Standard options** or **override** section. - -1. Click the **Standard options Color scheme** drop-down, and select one of the following palettes: - -
- -| Color mode | Description | -| ------------------------------- | -------------------------------------------------------------------------------------------------------------------------------------------------------- | --- | -| **Single color** | Specify a single color, useful in an override rule | -| **From thresholds** | Informs Grafana to take the color from the matching threshold | -| **Classic palette** | Grafana will assign color by looking up a color in a palette by series index. Useful for Graphs and pie charts and other categorical data visualizations | -| **Green-Yellow-Red (by value)** | Continuous color scheme | -| **Blue-Yellow-Red (by value)** | Continuous color scheme | -| **Blues (by value)** | Continuous color scheme (panel background to blue) | -| **Reds (by value)** | Continuous color scheme (panel background color to blue) | -| **Greens (by value)** | Continuous color scheme (panel background color to blue) | -| **Purple (by value)** | Continuous color scheme (panel background color to blue) | . | - -{{< figure src="/static/img/docs/v73/color_scheme_dropdown.png" max-width="350px" caption="Color scheme" class="pull-right" >}} diff --git a/docs/sources/panels/working-with-panels/format-standard-fields.md b/docs/sources/panels/working-with-panels/format-standard-fields.md deleted file mode 100644 index ee0404ac817..00000000000 --- a/docs/sources/panels/working-with-panels/format-standard-fields.md +++ /dev/null @@ -1,27 +0,0 @@ ---- -aliases: - - /docs/grafana/latest/panels/working-with-panels/format-standard-fields/ - - /docs/sources/panels/working-with-panels/format-standard-fields/ -title: Format a standard field -weight: 40 ---- - -# Format a standard field - -The data model used in Grafana, namely the [data frame]({{< relref "../../developers/plugins/data-frames/" >}}), is a columnar-oriented table structure that unifies both time series and table query results. Each column within this structure is called a _field_. A field can represent a single time series or table column. - -Field options allow you to change how the data is displayed in your visualizations. Options and overrides that you apply do not change the data, they change how Grafana displays the data. When you change an option, it is applied to all fields, meaning all series or columns. For example, if you change the unit to percentage, then all fields with numeric values are displayed in percentages. - -For a complete list of field formatting options, refer to [Standard field definitions]({{< relref "../standard-field-definitions/" >}}). - -> You can apply standard options to most built-in Grafana panels. Some older panels and community panels that have not updated to the new panel and data model will be missing either all or some of these field options. - -1. Open a dashboard, click the panel title, and click **Edit**. - -1. In the panel display options pane, locate the **Standard options** section. - -1. Select the standard options you want to apply. - - For more information about standard options, refer to [Standard field definitions]({{< relref "../standard-field-definitions/" >}}). - -1. To preview your change, click outside of the field option box you are editing or press **Enter**. diff --git a/docs/sources/setup-grafana/upgrade-grafana.md b/docs/sources/setup-grafana/upgrade-grafana.md index f6c48ddeaf1..4ad564bfaf9 100644 --- a/docs/sources/setup-grafana/upgrade-grafana.md +++ b/docs/sources/setup-grafana/upgrade-grafana.md @@ -374,7 +374,7 @@ ORDER BY time There are two possible workarounds to resolve this problem: 1. In Grafana v8.0.3, use an alias of the string column selected as `metric`. for example, `hostname as metric`. -2. Use the [Standard field definitions' display name]({{< relref "../panels/standard-field-definitions/#display-name" >}}) to format the alias. For the preceding example query, you would use `${__field.labels.hostname}` option. +2. Use the [Standard field definitions' display name]({{< relref "../panels/configure-standard-options/#display-name" >}}) to format the alias. For the preceding example query, you would use `${__field.labels.hostname}` option. For more information, refer to the our relational databases documentation of [Postgres]({{< relref "../datasources/postgres/#time-series-queries" >}}), [MySQL]({{< relref "../datasources/mysql/#time-series-queries" >}}), [Microsoft SQL Server]({{< relref "../datasources/mssql/#time-series-queries" >}}). diff --git a/docs/sources/visualizations/bar-chart.md b/docs/sources/visualizations/bar-chart.md index 0dd277840b3..b03d53d07b8 100644 --- a/docs/sources/visualizations/bar-chart.md +++ b/docs/sources/visualizations/bar-chart.md @@ -157,4 +157,4 @@ Set a **Soft min** or **soft max** option for better control of Y-axis limits. B **Soft min** and **soft max** settings can prevent blips from turning into mountains when the data is mostly flat, and hard min or max derived from standard min and max field options can prevent intermittent spikes from flattening useful detail by clipping the spikes past a defined point. -You can set standard min/max options to define hard limits of the Y-axis. For more information, refer to [Standard field definitions]({{< relref "../panels/standard-field-definitions/#max" >}}). +You can set standard min/max options to define hard limits of the Y-axis. For more information, refer to [Standard options definitions]({{< relref "../panels/configure-standard-options/#max" >}}). diff --git a/docs/sources/visualizations/histogram.md b/docs/sources/visualizations/histogram.md index ae2c36d49d2..071b23fccca 100644 --- a/docs/sources/visualizations/histogram.md +++ b/docs/sources/visualizations/histogram.md @@ -50,7 +50,7 @@ Controls the fill opacity bars. ### Gradient mode -Set the mode of the gradient fill. Fill gradient is based on the line color. To change the color, use the standard [color scheme]({{< relref "../panels/working-with-panels/apply-color-to-series/" >}}) field option. +Set the mode of the gradient fill. Fill gradient is based on the line color. To change the color, use the standard [color scheme]({{< relref "../panels/configure-standard-options/#color-scheme" >}}) field option. Gradient display is influenced by the **Fill opacity** setting. diff --git a/docs/sources/visualizations/state-timeline.md b/docs/sources/visualizations/state-timeline.md index 31c76b22099..b6295cf0cf4 100644 --- a/docs/sources/visualizations/state-timeline.md +++ b/docs/sources/visualizations/state-timeline.md @@ -60,6 +60,6 @@ The panel can be used with time series data as well. In this case, the threshold ## Legend options -When the legend option is enabled it can show either the value mappings or the threshold brackets. To show the value mappings in the legend, it's important that the `Color scheme` as referenced in [Apply color to a series and fields]({{< relref "../panels/working-with-panels/apply-color-to-series/" >}}) is set to `Single color` or `Classic palette`. To see the threshold brackets in the legend set the `Color scheme` to `From thresholds`. +When the legend option is enabled it can show either the value mappings or the threshold brackets. To show the value mappings in the legend, it's important that the `Color scheme` as referenced in [Color scheme]({{< relref "../panels/configure-standard-options/#color-scheme" >}}) is set to `Single color` or `Classic palette`. To see the threshold brackets in the legend set the `Color scheme` to `From thresholds`. {{< docs/shared "visualizations/legend-mode.md" >}} diff --git a/docs/sources/visualizations/status-history.md b/docs/sources/visualizations/status-history.md index da78aa1a420..0a9429de24d 100644 --- a/docs/sources/visualizations/status-history.md +++ b/docs/sources/visualizations/status-history.md @@ -57,6 +57,6 @@ use gradient color schemes to color values. ## Legend options -When the legend option is enabled it can show either the value mappings or the threshold brackets. To show the value mappings in the legend, it's important that the `Color scheme` as referenced in [Apply color to a series and fields]({{< relref "../panels/working-with-panels/apply-color-to-series/" >}}) is set to `Single color` or `Classic palette`. To see the threshold brackets in the legend set the `Color scheme` to `From thresholds`. +When the legend option is enabled it can show either the value mappings or the threshold brackets. To show the value mappings in the legend, it's important that the `Color scheme` as referenced in [Color scheme]({{< relref "../panels/configure-standard-options/#color-scheme" >}}) is set to `Single color` or `Classic palette`. To see the threshold brackets in the legend set the `Color scheme` to `From thresholds`. {{< docs/shared "visualizations/legend-mode.md" >}} diff --git a/docs/sources/visualizations/time-series/change-axis-display.md b/docs/sources/visualizations/time-series/change-axis-display.md index cb87d3c2154..8f31bc2c191 100644 --- a/docs/sources/visualizations/time-series/change-axis-display.md +++ b/docs/sources/visualizations/time-series/change-axis-display.md @@ -70,7 +70,7 @@ Set a **Soft min** or **soft max** option for better control of Y-axis limits. B **Soft min** and **soft max** settings can prevent blips from turning into mountains when the data is mostly flat, and hard min or max derived from standard min and max field options can prevent intermittent spikes from flattening useful detail by clipping the spikes past a defined point. -You can set standard min/max options to define hard limits of the Y-axis. For more information, refer to [Standard field definitions]({{< relref "../../panels/standard-field-definitions/#max" >}}). +You can set standard min/max options to define hard limits of the Y-axis. For more information, refer to [Configure standard options]({{< relref "../../panels/configure-standard-options/#max" >}}). ![Label example](/static/img/docs/time-series-panel/axis-soft-min-max-7-4.png) diff --git a/docs/sources/visualizations/time-series/graph-color-scheme.md b/docs/sources/visualizations/time-series/graph-color-scheme.md index 78692b8005f..1dc9cb688fa 100644 --- a/docs/sources/visualizations/time-series/graph-color-scheme.md +++ b/docs/sources/visualizations/time-series/graph-color-scheme.md @@ -16,7 +16,7 @@ weight: 400 # Graph and color schemes -To set the graph and color schemes, refer to [Apply color to series and fields]({{< relref "../../panels/working-with-panels/apply-color-to-series/" >}}). +To set the graph and color schemes, refer to [Color scheme]({{< relref "../../panels/configure-standard-options/#color-scheme" >}}). ## Classic palette diff --git a/docs/sources/visualizations/time-series/graph-time-series-as-bars.md b/docs/sources/visualizations/time-series/graph-time-series-as-bars.md index aeb0ea82af0..c7afbba7cdc 100644 --- a/docs/sources/visualizations/time-series/graph-time-series-as-bars.md +++ b/docs/sources/visualizations/time-series/graph-time-series-as-bars.md @@ -83,7 +83,7 @@ Fill opacity set to 95: ### Gradient mode -Set the mode of the gradient fill. Fill gradient is based on the line color. To change the color, use the standard color scheme field option. For more information, refer to [Apply color to series and fields]({{< relref "../../panels/working-with-panels/apply-color-to-series/" >}}). +Set the mode of the gradient fill. Fill gradient is based on the line color. To change the color, use the standard color scheme field option. For more information, refer to [Color scheme]({{< relref "../../panels/configure-standard-options/#color-scheme" >}}). Gradient appearance is influenced by the **Fill opacity** setting. In the screenshots below, **Fill opacity** is set to 50. @@ -107,7 +107,7 @@ Gradient color is generated based on the hue of the line color. #### Scheme -In this mode the whole bar will use a color gradient defined by your Color scheme. For more information, refer to [Apply color to series and fields]({{< relref "../../panels/working-with-panels/apply-color-to-series/" >}}). There is more information on this option in [Graph and color scheme]({{< relref "graph-color-scheme/" >}}). +In this mode the whole bar will use a color gradient defined by your color scheme. For more information, refer to [Color scheme]({{< relref "../../panels/configure-standard-options/#color-scheme" >}}). There is more information on this option in [Graph and color scheme]({{< relref "graph-color-scheme/" >}}). {{< figure src="/static/img/docs/time-series-panel/gradient_mode_scheme_bars.png" max-width="1200px" caption="Gradient color scheme mode" >}} diff --git a/docs/sources/visualizations/time-series/graph-time-series-as-lines.md b/docs/sources/visualizations/time-series/graph-time-series-as-lines.md index d35f40b3bb2..c7e65b37093 100644 --- a/docs/sources/visualizations/time-series/graph-time-series-as-lines.md +++ b/docs/sources/visualizations/time-series/graph-time-series-as-lines.md @@ -89,7 +89,7 @@ Fill opacity set to 95: ### Gradient mode -Set the mode of the gradient fill. Fill gradient is based on the line color. To change the color, use the standard color scheme field option. For more information, refer to [Apply color to series and fields]({{< relref "../../panels/working-with-panels/apply-color-to-series/" >}}) . +Set the mode of the gradient fill. Fill gradient is based on the line color. To change the color, use the standard color scheme field option. For more information, refer to [Color scheme]({{< relref "../../panels/configure-standard-options/#color-scheme" >}}). Gradient appearance is influenced by the **Fill opacity** setting. In the screenshots below, **Fill opacity** is set to 50. @@ -113,13 +113,13 @@ Gradient color is generated based on the hue of the line color. #### Scheme -In this mode the whole line will use a color gradient defined by your Color scheme. For more information, refer to [Apply color to series and fields]({{< relref "../../panels/working-with-panels/apply-color-to-series/" >}}). There is more information on this option in [Graph and color scheme]({{< relref "graph-color-scheme/" >}}). +In this mode the whole line will use a color gradient defined by your Color scheme. For more information, refer to [Color scheme]({{< relref "../../panels/configure-standard-options/#color-scheme" >}}). There is more information on this option in [Graph and color scheme]({{< relref "graph-color-scheme/" >}}). {{< figure src="/static/img/docs/time-series-panel/gradient_mode_scheme_line.png" max-width="1200px" caption="Gradient mode scheme" >}} ### Line style -Set the style of the line. To change the color, use the standard color scheme field option. For more information, refer to [Apply color to series and fields]({{< relref "../../panels/working-with-panels/apply-color-to-series/" >}}) +Set the style of the line. To change the color, use the standard color scheme field option. For more information, refer to [Color scheme]({{< relref "../../panels/configure-standard-options/#color-scheme" >}}). Line style appearance is influenced by the **Line width** and **Fill opacity** settings. In the screenshots below, **Line width** is set to 3 and **Fill opacity** is set to 20. From 57249aa4083d4081e9fa76ac8f6ebd19f94ab6c7 Mon Sep 17 00:00:00 2001 From: Adela Almasan <88068998+adela-almasan@users.noreply.github.com> Date: Fri, 15 Jul 2022 17:03:03 -0500 Subject: [PATCH 020/116] Canvas: Overlapping elements bugfix (#52343) --- public/app/features/canvas/runtime/scene.tsx | 17 +++++++++++------ 1 file changed, 11 insertions(+), 6 deletions(-) diff --git a/public/app/features/canvas/runtime/scene.tsx b/public/app/features/canvas/runtime/scene.tsx index 3c4daee26d6..cb47f6b4af4 100644 --- a/public/app/features/canvas/runtime/scene.tsx +++ b/public/app/features/canvas/runtime/scene.tsx @@ -311,6 +311,8 @@ export class Scene { container: this.div, selectableTargets: targetElements, toggleContinueSelect: 'shift', + selectFromInside: false, + hitRate: 0, }); this.moveable = new Moveable(this.div!, { @@ -420,12 +422,15 @@ export class Scene { targets = event.selected; this.updateSelection({ targets }); - if (event.isDragStart) { - event.inputEvent.preventDefault(); - setTimeout(() => { - this.moveable!.dragStart(event.inputEvent); - }); - } + // @TODO Figure out click-drag functionality without phantom mouseup issue + // https://github.com/daybrush/moveable/issues/481 + + // if (event.isDragStart) { + // event.inputEvent.preventDefault(); + // setTimeout(() => { + // this.moveable!.dragStart(event.inputEvent); + // }); + // } }); }; From e6a5b9ee7f517c3e59c182a310e88270bc0e70dc Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Torkel=20=C3=96degaard?= Date: Sat, 16 Jul 2022 17:44:16 +0200 Subject: [PATCH 021/116] TopNav: Store collapse state for chrome top search bar in local storage (#52300) --- .../core/components/AppChrome/AppChrome.tsx | 19 +++++++++---------- .../components/AppChrome/AppChromeService.tsx | 16 ++++++++++++++++ 2 files changed, 25 insertions(+), 10 deletions(-) diff --git a/public/app/core/components/AppChrome/AppChrome.tsx b/public/app/core/components/AppChrome/AppChrome.tsx index cfa37b4e79d..a9c94b907d1 100644 --- a/public/app/core/components/AppChrome/AppChrome.tsx +++ b/public/app/core/components/AppChrome/AppChrome.tsx @@ -1,6 +1,5 @@ import { css, cx } from '@emotion/css'; -import React, { PropsWithChildren, useState } from 'react'; -import { useToggle } from 'react-use'; +import React, { PropsWithChildren } from 'react'; import { GrafanaTheme2 } from '@grafana/data'; import { config } from '@grafana/runtime'; @@ -17,8 +16,6 @@ export interface Props extends PropsWithChildren<{}> {} export function AppChrome({ children }: Props) { const styles = useStyles2(getStyles); - const [searchBarHidden, toggleSearchBar] = useToggle(false); // repace with local storage - const [megaMenuOpen, setMegaMenuOpen] = useState(false); const state = appChromeService.useState(); if (state.chromeless || !config.featureToggles.topnav) { @@ -28,18 +25,20 @@ export function AppChrome({ children }: Props) { return (
- {!searchBarHidden && } + {!state.searchBarHidden && } setMegaMenuOpen(!megaMenuOpen)} + onToggleSearchBar={appChromeService.toggleSearchBar} + onToggleMegaMenu={appChromeService.toggleMegaMenu} />
-
{children}
- {megaMenuOpen && setMegaMenuOpen(false)} />} +
{children}
+ {state.megaMenuOpen && ( + + )}
); } diff --git a/public/app/core/components/AppChrome/AppChromeService.tsx b/public/app/core/components/AppChrome/AppChromeService.tsx index 0c113f7701d..bf6c0c031b6 100644 --- a/public/app/core/components/AppChrome/AppChromeService.tsx +++ b/public/app/core/components/AppChrome/AppChromeService.tsx @@ -2,6 +2,7 @@ import { useObservable } from 'react-use'; import { BehaviorSubject } from 'rxjs'; import { NavModelItem } from '@grafana/data'; +import store from 'app/core/store'; import { isShallowEqual } from 'app/core/utils/isShallowEqual'; import { RouteDescriptor } from '../../navigation/types'; @@ -11,14 +12,19 @@ export interface AppChromeState { sectionNav: NavModelItem; pageNav?: NavModelItem; actions?: React.ReactNode; + searchBarHidden?: boolean; + megaMenuOpen?: boolean; } const defaultSection: NavModelItem = { text: 'Grafana' }; export class AppChromeService { + searchBarStorageKey = 'SearchBar_Hidden'; + readonly state = new BehaviorSubject({ chromeless: true, // start out hidden to not flash it on pages without chrome sectionNav: defaultSection, + searchBarHidden: store.getBool(this.searchBarStorageKey, false), }); routeMounted(route: RouteDescriptor) { @@ -42,6 +48,16 @@ export class AppChromeService { } } + toggleMegaMenu = () => { + this.update({ megaMenuOpen: !this.state.getValue().megaMenuOpen }); + }; + + toggleSearchBar = () => { + const searchBarHidden = !this.state.getValue().searchBarHidden; + store.set(this.searchBarStorageKey, searchBarHidden); + this.update({ searchBarHidden }); + }; + useState() { // eslint-disable-next-line react-hooks/rules-of-hooks return useObservable(this.state, this.state.getValue()); From 6188526e1d2a841c633fa0f18f24a2fc00bb4607 Mon Sep 17 00:00:00 2001 From: Artur Wierzbicki Date: Sun, 17 Jul 2022 22:41:54 +0400 Subject: [PATCH 022/116] Storage: use static access rules (#52334) * Storage: use static access rules * Storage: use static access rules * Storage: add tests --- pkg/services/store/file_guardian.go | 121 ++++++++++++++++++++++++++++ pkg/services/store/http.go | 5 +- pkg/services/store/service.go | 104 ++++++++++++++++++------ pkg/services/store/service_test.go | 90 +++++++++++++++------ pkg/services/store/static_auth.go | 41 ++++++++++ pkg/services/store/tree.go | 3 +- pkg/services/store/types.go | 2 +- pkg/services/store/utils.go | 5 ++ 8 files changed, 319 insertions(+), 52 deletions(-) create mode 100644 pkg/services/store/file_guardian.go create mode 100644 pkg/services/store/static_auth.go diff --git a/pkg/services/store/file_guardian.go b/pkg/services/store/file_guardian.go new file mode 100644 index 00000000000..56a1d58c42e --- /dev/null +++ b/pkg/services/store/file_guardian.go @@ -0,0 +1,121 @@ +package store + +import ( + "context" + "strings" + + "github.com/grafana/grafana/pkg/infra/filestorage" + "github.com/grafana/grafana/pkg/infra/log" + "github.com/grafana/grafana/pkg/models" +) + +const ( + ActionFilesRead = "files:read" + ActionFilesWrite = "files:write" + ActionFilesDelete = "files:delete" +) + +var ( + denyAllPathFilter = filestorage.NewDenyAllPathFilter() + allowAllPathFilter = filestorage.NewAllowAllPathFilter() +) + +func isValidAction(action string) bool { + return action == ActionFilesRead || action == ActionFilesWrite || action == ActionFilesDelete +} + +type storageAuthService interface { + newGuardian(ctx context.Context, user *models.SignedInUser, prefix string) fileGuardian +} + +type fileGuardian interface { + canView(path string) bool + canWrite(path string) bool + canDelete(path string) bool + can(action string, path string) bool + + getPathFilter(action string) filestorage.PathFilter +} + +type pathFilterFileGuardian struct { + ctx context.Context + user *models.SignedInUser + prefix string + pathFilterByAction map[string]filestorage.PathFilter + log log.Logger +} + +func (a *pathFilterFileGuardian) getPathFilter(action string) filestorage.PathFilter { + if !isValidAction(action) { + a.log.Warn("Unsupported action", "action", action) + return denyAllPathFilter + } + + if filter, ok := a.pathFilterByAction[action]; ok { + return filter + } + + return denyAllPathFilter +} + +func (a *pathFilterFileGuardian) canWrite(path string) bool { + return a.can(ActionFilesWrite, path) +} + +func (a *pathFilterFileGuardian) canView(path string) bool { + return a.can(ActionFilesRead, path) +} + +func (a *pathFilterFileGuardian) canDelete(path string) bool { + return a.can(ActionFilesDelete, path) +} + +func (a *pathFilterFileGuardian) can(action string, path string) bool { + if path == a.prefix { + path = filestorage.Delimiter + } else { + path = strings.TrimPrefix(path, a.prefix) + } + allow := false + + if !isValidAction(action) { + a.log.Warn("Unsupported action", "action", action, "path", path) + return false + } + + pathFilter, ok := a.pathFilterByAction[action] + + if !ok { + a.log.Warn("Missing path filter", "action", action, "path", path) + return false + } + + allow = pathFilter.IsAllowed(path) + if !allow { + a.log.Warn("denying", "action", action, "path", path) + } + return allow +} + +type denyAllFileGuardian struct { +} + +func (d denyAllFileGuardian) canView(path string) bool { + return d.can(ActionFilesRead, path) +} + +func (d denyAllFileGuardian) canWrite(path string) bool { + return d.can(ActionFilesWrite, path) +} + +func (d denyAllFileGuardian) canDelete(path string) bool { + return d.can(ActionFilesDelete, path) +} + +func (d denyAllFileGuardian) can(action string, path string) bool { + return false +} + +func (d denyAllFileGuardian) getPathFilter(action string) filestorage.PathFilter { + return denyAllPathFilter +} diff --git a/pkg/services/store/http.go b/pkg/services/store/http.go index 43f2f69c8c3..9f207d9a7a1 100644 --- a/pkg/services/store/http.go +++ b/pkg/services/store/http.go @@ -37,7 +37,7 @@ func ProvideHTTPService(store StorageService) HTTPStorageService { func UploadErrorToStatusCode(err error) int { switch { - case errors.Is(err, ErrUploadFeatureDisabled): + case errors.Is(err, ErrStorageNotFound): return 404 case errors.Is(err, ErrUnsupportedStorage): @@ -49,6 +49,9 @@ func UploadErrorToStatusCode(err error) int { case errors.Is(err, ErrFileAlreadyExists): return 400 + case errors.Is(err, ErrAccessDenied): + return 403 + default: return 500 } diff --git a/pkg/services/store/service.go b/pkg/services/store/service.go index 3713843f11f..50ab7da657d 100644 --- a/pkg/services/store/service.go +++ b/pkg/services/store/service.go @@ -3,7 +3,6 @@ package store import ( "context" "errors" - "fmt" "os" "path/filepath" @@ -19,13 +18,16 @@ import ( var grafanaStorageLogger = log.New("grafanaStorageLogger") -var ErrUploadFeatureDisabled = errors.New("upload feature is disabled") var ErrUnsupportedStorage = errors.New("storage does not support this operation") var ErrUploadInternalError = errors.New("upload internal error") var ErrValidationFailed = errors.New("request validation failed") var ErrFileAlreadyExists = errors.New("file exists") +var ErrStorageNotFound = errors.New("storage not found") +var ErrAccessDenied = errors.New("access denied") const RootPublicStatic = "public-static" +const RootResources = "resources" +const RootDevenv = "devenv" const MAX_UPLOAD_SIZE = 1 * 1024 * 1024 // 3MB @@ -66,9 +68,10 @@ type storageServiceConfig struct { } type standardStorageService struct { - sql *sqlstore.SQLStore - tree *nestedTree - cfg storageServiceConfig + sql *sqlstore.SQLStore + tree *nestedTree + cfg storageServiceConfig + authService storageAuthService } func ProvideService(sql *sqlstore.SQLStore, features featuremgmt.FeatureToggles, cfg *setting.Cfg) StorageService { @@ -90,7 +93,7 @@ func ProvideService(sql *sqlstore.SQLStore, features featuremgmt.FeatureToggles, devenv := filepath.Join(cfg.StaticRootPath, "..", "devenv") if _, err := os.Stat(devenv); !os.IsNotExist(err) { // path/to/whatever exists - s := newDiskStorage("devenv", "Development Environment", &StorageLocalDiskConfig{ + s := newDiskStorage(RootDevenv, "Development Environment", &StorageLocalDiskConfig{ Path: devenv, Roots: []string{ "/dev-dashboards/", @@ -104,7 +107,7 @@ func ProvideService(sql *sqlstore.SQLStore, features featuremgmt.FeatureToggles, storages := make([]storageRuntime, 0) if features.IsEnabled(featuremgmt.FlagStorageLocalUpload) { storages = append(storages, - newSQLStorage("resources", + newSQLStorage(RootResources, "Resources", &StorageSQLConfig{orgId: orgId}, sql). setBuiltin(true). @@ -114,10 +117,39 @@ func ProvideService(sql *sqlstore.SQLStore, features featuremgmt.FeatureToggles, return storages } - return newStandardStorageService(sql, globalRoots, initializeOrgStorages) + authService := newStaticStorageAuthService(func(ctx context.Context, user *models.SignedInUser, storageName string) map[string]filestorage.PathFilter { + if user == nil || !user.IsGrafanaAdmin { + return nil + } + + switch storageName { + case RootPublicStatic: + return map[string]filestorage.PathFilter{ + ActionFilesRead: allowAllPathFilter, + ActionFilesWrite: denyAllPathFilter, + ActionFilesDelete: denyAllPathFilter, + } + case RootDevenv: + return map[string]filestorage.PathFilter{ + ActionFilesRead: allowAllPathFilter, + ActionFilesWrite: denyAllPathFilter, + ActionFilesDelete: denyAllPathFilter, + } + case RootResources: + return map[string]filestorage.PathFilter{ + ActionFilesRead: allowAllPathFilter, + ActionFilesWrite: allowAllPathFilter, + ActionFilesDelete: allowAllPathFilter, + } + default: + return nil + } + }) + + return newStandardStorageService(sql, globalRoots, initializeOrgStorages, authService) } -func newStandardStorageService(sql *sqlstore.SQLStore, globalRoots []storageRuntime, initializeOrgStorages func(orgId int64) []storageRuntime) *standardStorageService { +func newStandardStorageService(sql *sqlstore.SQLStore, globalRoots []storageRuntime, initializeOrgStorages func(orgId int64) []storageRuntime, authService storageAuthService) *standardStorageService { rootsByOrgId := make(map[int64][]storageRuntime) rootsByOrgId[ac.GlobalOrgID] = globalRoots @@ -127,8 +159,9 @@ func newStandardStorageService(sql *sqlstore.SQLStore, globalRoots []storageRunt } res.init() return &standardStorageService{ - sql: sql, - tree: res, + sql: sql, + tree: res, + authService: authService, cfg: storageServiceConfig{ allowUnsanitizedSvgUpload: false, }, @@ -149,12 +182,15 @@ func getOrgId(user *models.SignedInUser) int64 { } func (s *standardStorageService) List(ctx context.Context, user *models.SignedInUser, path string) (*StorageListFrame, error) { - // apply access control here - return s.tree.ListFolder(ctx, getOrgId(user), path) + guardian := s.authService.newGuardian(ctx, user, getFirstSegment(path)) + return s.tree.ListFolder(ctx, getOrgId(user), path, guardian.getPathFilter(ActionFilesRead)) } func (s *standardStorageService) Read(ctx context.Context, user *models.SignedInUser, path string) (*filestorage.File, error) { - // TODO: permission check! + guardian := s.authService.newGuardian(ctx, user, getFirstSegment(path)) + if !guardian.canView(path) { + return nil, ErrAccessDenied + } return s.tree.GetFile(ctx, getOrgId(user), path) } @@ -171,12 +207,17 @@ type UploadRequest struct { } func (s *standardStorageService) Upload(ctx context.Context, user *models.SignedInUser, req *UploadRequest) error { - upload, storagePath := s.tree.getRoot(getOrgId(user), req.Path) - if upload == nil { - return ErrUploadFeatureDisabled + guardian := s.authService.newGuardian(ctx, user, getFirstSegment(req.Path)) + if !guardian.canWrite(req.Path) { + return ErrAccessDenied } - if upload.Meta().ReadOnly { + root, storagePath := s.tree.getRoot(getOrgId(user), req.Path) + if root == nil { + return ErrStorageNotFound + } + + if root.Meta().ReadOnly { return ErrUnsupportedStorage } @@ -195,7 +236,7 @@ func (s *standardStorageService) Upload(ctx context.Context, user *models.Signed grafanaStorageLogger.Info("uploading a file", "filetype", req.MimeType, "path", req.Path) if !req.OverwriteExistingFile { - file, err := upload.Store().Get(ctx, storagePath) + file, err := root.Store().Get(ctx, storagePath) if err != nil { grafanaStorageLogger.Error("failed while checking file existence", "err", err, "path", req.Path) return ErrUploadInternalError @@ -206,7 +247,7 @@ func (s *standardStorageService) Upload(ctx context.Context, user *models.Signed } } - if err := upload.Store().Upsert(ctx, upsertCommand); err != nil { + if err := root.Store().Upsert(ctx, upsertCommand); err != nil { grafanaStorageLogger.Error("failed while uploading the file", "err", err, "path", req.Path) return ErrUploadInternalError } @@ -215,9 +256,14 @@ func (s *standardStorageService) Upload(ctx context.Context, user *models.Signed } func (s *standardStorageService) DeleteFolder(ctx context.Context, user *models.SignedInUser, cmd *DeleteFolderCmd) error { + guardian := s.authService.newGuardian(ctx, user, getFirstSegment(cmd.Path)) + if !guardian.canDelete(cmd.Path) { + return ErrAccessDenied + } + root, storagePath := s.tree.getRoot(getOrgId(user), cmd.Path) if root == nil { - return fmt.Errorf("resources storage is not enabled") + return ErrStorageNotFound } if root.Meta().ReadOnly { @@ -227,13 +273,18 @@ func (s *standardStorageService) DeleteFolder(ctx context.Context, user *models. if storagePath == "" { storagePath = filestorage.Delimiter } - return root.Store().DeleteFolder(ctx, storagePath, &filestorage.DeleteFolderOptions{Force: true}) + return root.Store().DeleteFolder(ctx, storagePath, &filestorage.DeleteFolderOptions{Force: true, AccessFilter: guardian.getPathFilter(ActionFilesDelete)}) } func (s *standardStorageService) CreateFolder(ctx context.Context, user *models.SignedInUser, cmd *CreateFolderCmd) error { + guardian := s.authService.newGuardian(ctx, user, getFirstSegment(cmd.Path)) + if !guardian.canWrite(cmd.Path) { + return ErrAccessDenied + } + root, storagePath := s.tree.getRoot(getOrgId(user), cmd.Path) if root == nil { - return fmt.Errorf("resources storage is not enabled") + return ErrStorageNotFound } if root.Meta().ReadOnly { @@ -248,9 +299,14 @@ func (s *standardStorageService) CreateFolder(ctx context.Context, user *models. } func (s *standardStorageService) Delete(ctx context.Context, user *models.SignedInUser, path string) error { + guardian := s.authService.newGuardian(ctx, user, getFirstSegment(path)) + if !guardian.canDelete(path) { + return ErrAccessDenied + } + root, storagePath := s.tree.getRoot(getOrgId(user), path) if root == nil { - return fmt.Errorf("resources storage is not enabled") + return ErrStorageNotFound } if root.Meta().ReadOnly { diff --git a/pkg/services/store/service_test.go b/pkg/services/store/service_test.go index 9c705621512..597d6a6bf93 100644 --- a/pkg/services/store/service_test.go +++ b/pkg/services/store/service_test.go @@ -16,29 +16,41 @@ import ( ) var ( - dummyUser = &models.SignedInUser{OrgId: 1} + dummyUser = &models.SignedInUser{OrgId: 1} + allowAllAuthService = newStaticStorageAuthService(func(ctx context.Context, user *models.SignedInUser, storageName string) map[string]filestorage.PathFilter { + return map[string]filestorage.PathFilter{ + ActionFilesDelete: allowAllPathFilter, + ActionFilesWrite: allowAllPathFilter, + ActionFilesRead: allowAllPathFilter, + } + }) + denyAllAuthService = newStaticStorageAuthService(func(ctx context.Context, user *models.SignedInUser, storageName string) map[string]filestorage.PathFilter { + return map[string]filestorage.PathFilter{ + ActionFilesDelete: denyAllPathFilter, + ActionFilesWrite: denyAllPathFilter, + ActionFilesRead: denyAllPathFilter, + } + }) + publicRoot, _ = filepath.Abs("../../../public") + publicStaticFilesStorage = newDiskStorage("public", "Public static files", &StorageLocalDiskConfig{ + Path: publicRoot, + Roots: []string{ + "/testdata/", + "/img/icons/", + "/img/bg/", + "/gazetteer/", + "/maps/", + "/upload/", + }, + }).setReadOnly(true).setBuiltin(true) ) func TestListFiles(t *testing.T) { - publicRoot, err := filepath.Abs("../../../public") - require.NoError(t, err) - roots := []storageRuntime{ - newDiskStorage("public", "Public static files", &StorageLocalDiskConfig{ - Path: publicRoot, - Roots: []string{ - "/testdata/", - "/img/icons/", - "/img/bg/", - "/gazetteer/", - "/maps/", - "/upload/", - }, - }).setReadOnly(true).setBuiltin(true), - } + roots := []storageRuntime{publicStaticFilesStorage} store := newStandardStorageService(sqlstore.InitTestDB(t), roots, func(orgId int64) []storageRuntime { return make([]storageRuntime, 0) - }) + }, allowAllAuthService) frame, err := store.List(context.Background(), dummyUser, "public/testdata") require.NoError(t, err) @@ -53,22 +65,38 @@ func TestListFiles(t *testing.T) { experimental.CheckGoldenJSONFrame(t, "testdata", "public_testdata_js_libraries.golden", testDsFrame, true) } -func setupUploadStore(t *testing.T) (StorageService, *filestorage.MockFileStorage, string) { +func TestListFilesWithoutPermissions(t *testing.T) { + roots := []storageRuntime{publicStaticFilesStorage} + + store := newStandardStorageService(sqlstore.InitTestDB(t), roots, func(orgId int64) []storageRuntime { + return make([]storageRuntime, 0) + }, denyAllAuthService) + frame, err := store.List(context.Background(), dummyUser, "public/testdata") + require.NoError(t, err) + rowLen, err := frame.RowLen() + require.NoError(t, err) + require.Equal(t, 0, rowLen) +} + +func setupUploadStore(t *testing.T, authService storageAuthService) (StorageService, *filestorage.MockFileStorage, string) { t.Helper() storageName := "resources" mockStorage := &filestorage.MockFileStorage{} sqlStorage := newSQLStorage(storageName, "Testing upload", &StorageSQLConfig{orgId: 1}, sqlstore.InitTestDB(t)) sqlStorage.store = mockStorage + if authService == nil { + authService = allowAllAuthService + } store := newStandardStorageService(sqlstore.InitTestDB(t), []storageRuntime{sqlStorage}, func(orgId int64) []storageRuntime { return make([]storageRuntime, 0) - }) + }, authService) return store, mockStorage, storageName } func TestShouldUploadWhenNoFileAlreadyExists(t *testing.T) { - service, mockStorage, storageName := setupUploadStore(t) + service, mockStorage, storageName := setupUploadStore(t, nil) mockStorage.On("Get", mock.Anything, "/myFile.jpg").Return(nil, nil) mockStorage.On("Upsert", mock.Anything, mock.Anything).Return(nil) @@ -82,8 +110,20 @@ func TestShouldUploadWhenNoFileAlreadyExists(t *testing.T) { require.NoError(t, err) } +func TestShouldFailUploadWithoutAccess(t *testing.T) { + service, _, storageName := setupUploadStore(t, denyAllAuthService) + + err := service.Upload(context.Background(), dummyUser, &UploadRequest{ + EntityType: EntityTypeImage, + Contents: make([]byte, 0), + Path: storageName + "/myFile.jpg", + MimeType: "image/jpg", + }) + require.ErrorIs(t, err, ErrAccessDenied) +} + func TestShouldFailUploadWhenFileAlreadyExists(t *testing.T) { - service, mockStorage, storageName := setupUploadStore(t) + service, mockStorage, storageName := setupUploadStore(t, nil) mockStorage.On("Get", mock.Anything, "/myFile.jpg").Return(&filestorage.File{Contents: make([]byte, 0)}, nil) @@ -97,7 +137,7 @@ func TestShouldFailUploadWhenFileAlreadyExists(t *testing.T) { } func TestShouldDelegateFileDeletion(t *testing.T) { - service, mockStorage, storageName := setupUploadStore(t) + service, mockStorage, storageName := setupUploadStore(t, nil) mockStorage.On("Delete", mock.Anything, "/myFile.jpg").Return(nil) @@ -106,7 +146,7 @@ func TestShouldDelegateFileDeletion(t *testing.T) { } func TestShouldDelegateFolderCreation(t *testing.T) { - service, mockStorage, storageName := setupUploadStore(t) + service, mockStorage, storageName := setupUploadStore(t, nil) mockStorage.On("CreateFolder", mock.Anything, "/nestedFolder/mostNestedFolder").Return(nil) @@ -115,9 +155,9 @@ func TestShouldDelegateFolderCreation(t *testing.T) { } func TestShouldDelegateFolderDeletion(t *testing.T) { - service, mockStorage, storageName := setupUploadStore(t) + service, mockStorage, storageName := setupUploadStore(t, nil) - mockStorage.On("DeleteFolder", mock.Anything, "/", &filestorage.DeleteFolderOptions{Force: true}).Return(nil) + mockStorage.On("DeleteFolder", mock.Anything, "/", mock.Anything).Return(nil) err := service.DeleteFolder(context.Background(), dummyUser, &DeleteFolderCmd{ Path: storageName, diff --git a/pkg/services/store/static_auth.go b/pkg/services/store/static_auth.go new file mode 100644 index 00000000000..e97db52c487 --- /dev/null +++ b/pkg/services/store/static_auth.go @@ -0,0 +1,41 @@ +package store + +import ( + "context" + + "github.com/grafana/grafana/pkg/infra/filestorage" + "github.com/grafana/grafana/pkg/infra/log" + "github.com/grafana/grafana/pkg/models" +) + +type createPathFilterByAction func(ctx context.Context, user *models.SignedInUser, storageName string) map[string]filestorage.PathFilter + +func newStaticStorageAuthService(createPathFilterByAction createPathFilterByAction) storageAuthService { + return &staticStorageAuth{ + denyAllFileGuardian: &denyAllFileGuardian{}, + createPathFilterByAction: createPathFilterByAction, + log: log.New("staticStorageAuthService"), + } +} + +type staticStorageAuth struct { + log log.Logger + denyAllFileGuardian fileGuardian + createPathFilterByAction createPathFilterByAction +} + +func (a *staticStorageAuth) newGuardian(ctx context.Context, user *models.SignedInUser, storageName string) fileGuardian { + pathFilter := a.createPathFilterByAction(ctx, user, storageName) + + if pathFilter == nil { + return a.denyAllFileGuardian + } + + return &pathFilterFileGuardian{ + ctx: ctx, + user: user, + log: a.log, + prefix: storageName, + pathFilterByAction: pathFilter, + } +} diff --git a/pkg/services/store/tree.go b/pkg/services/store/tree.go index 5748fe5adc3..a28a7689410 100644 --- a/pkg/services/store/tree.go +++ b/pkg/services/store/tree.go @@ -85,7 +85,7 @@ func (t *nestedTree) GetFile(ctx context.Context, orgId int64, path string) (*fi return root.Store().Get(ctx, path) } -func (t *nestedTree) ListFolder(ctx context.Context, orgId int64, path string) (*StorageListFrame, error) { +func (t *nestedTree) ListFolder(ctx context.Context, orgId int64, path string, accessFilter filestorage.PathFilter) (*StorageListFrame, error) { if path == "" || path == "/" { t.assureOrgIsInitialized(orgId) @@ -150,6 +150,7 @@ func (t *nestedTree) ListFolder(ctx context.Context, orgId int64, path string) ( Recursive: false, WithFolders: true, WithFiles: true, + Filter: accessFilter, }) if err != nil { diff --git a/pkg/services/store/types.go b/pkg/services/store/types.go index db9cdfa8202..c076a60e7d0 100644 --- a/pkg/services/store/types.go +++ b/pkg/services/store/types.go @@ -30,7 +30,7 @@ type WriteValueResponse struct { type storageTree interface { GetFile(ctx context.Context, orgId int64, path string) (*filestorage.File, error) - ListFolder(ctx context.Context, orgId int64, path string) (*StorageListFrame, error) + ListFolder(ctx context.Context, orgId int64, path string, accessFilter filestorage.PathFilter) (*StorageListFrame, error) } //------------------------------------------- diff --git a/pkg/services/store/utils.go b/pkg/services/store/utils.go index 70f47349f39..f7fd341ac47 100644 --- a/pkg/services/store/utils.go +++ b/pkg/services/store/utils.go @@ -28,3 +28,8 @@ func getPathAndScope(c *models.ReqContext) (string, string) { } return splitFirstSegment(path) } + +func getFirstSegment(path string) string { + firstSegment, _ := splitFirstSegment(path) + return firstSegment +} From a71b4f13e477fdb15674d93eaddfde060e3193a8 Mon Sep 17 00:00:00 2001 From: Alexander Gee Date: Mon, 18 Jul 2022 01:32:52 -0500 Subject: [PATCH 023/116] Dashboard: Add guidance about reload required after updating shared cursor/tooltip setting. (#52280) * Dashboard: Add guidance about reloaded needed for shared cursor/tooltip * Dashboard: Added todo note for author of (#46581) impl * Dashboard: prettier errors fixed for new text --- .../dashboard/components/DashboardSettings/GeneralSettings.tsx | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/public/app/features/dashboard/components/DashboardSettings/GeneralSettings.tsx b/public/app/features/dashboard/components/DashboardSettings/GeneralSettings.tsx index 1ddfc1a4cef..d76cedd757d 100644 --- a/public/app/features/dashboard/components/DashboardSettings/GeneralSettings.tsx +++ b/public/app/features/dashboard/components/DashboardSettings/GeneralSettings.tsx @@ -143,10 +143,11 @@ export function GeneralSettingsUnconnected({ dashboard, updateTimeZone, updateWe liveNow={dashboard.liveNow} /> + {/* @todo: Update "Graph tooltip" description to remove prompt about reloading when resolving #46581 */} From 9abe9fa7029312d342a878de8ff13d87dd42134e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Joan=20L=C3=B3pez=20de=20la=20Franca=20Beltran?= <5459617+joanlopez@users.noreply.github.com> Date: Mon, 18 Jul 2022 08:57:58 +0200 Subject: [PATCH 024/116] Encryption: Expose secrets migrations through HTTP API (#51707) * Encryption: Move secrets migrations into secrets.Migrator * Encryption: Refactor secrets.Service initialization * Encryption: Add support to run secrets migrations even when EE is disabled * Encryption: Expose secrets migrations through HTTP API * Update docs * Fix docs links * Some adjustments to makes errors explicit through HTTP response --- docs/sources/developers/http_api/admin.md | 69 +++++++++++++++++-- .../configure-database-encryption/_index.md | 15 ++-- pkg/api/admin_encryption.go | 36 +++++++++- pkg/api/api.go | 3 + .../secretsmigrations/secretsmigrations.go | 6 +- pkg/services/secrets/migrator/migrator.go | 48 +++++++------ pkg/services/secrets/migrator/reencrypt.go | 24 ++++--- pkg/services/secrets/secrets.go | 12 +++- 8 files changed, 168 insertions(+), 45 deletions(-) diff --git a/docs/sources/developers/http_api/admin.md b/docs/sources/developers/http_api/admin.md index e83c7c941eb..d113a1280ad 100644 --- a/docs/sources/developers/http_api/admin.md +++ b/docs/sources/developers/http_api/admin.md @@ -718,11 +718,7 @@ Content-Type: application/json `POST /api/admin/encryption/rotate-data-keys` -Rotates data encryption keys, so all the active keys are disabled -and no longer used for encryption but kept for decryption operations. - -Secrets encrypted with one of the deactivated keys need to be re-encrypted -to actually stop using those keys for both encryption and decryption. +[Rotates]({{< relref "../../setup-grafana/configure-security/configure-database-encryption/#rotate-data-keys" >}}) data encryption keys. **Example Request**: @@ -738,3 +734,66 @@ Content-Type: application/json HTTP/1.1 204 Content-Type: application/json ``` + +## Re-encrypt data encryption keys + +`POST /api/admin/encryption/reencrypt-data-keys` + +[Re-encrypts]({{< relref "../../setup-grafana/configure-security/configure-database-encryption/#re-encrypt-data-keys" >}}) data encryption keys. + +**Example Request**: + +```http +POST /api/admin/encryption/reencrypt-data-keys HTTP/1.1 +Accept: application/json +Content-Type: application/json +``` + +**Example Response**: + +```http +HTTP/1.1 204 +Content-Type: application/json +``` + +## Re-encrypt secrets + +`POST /api/admin/encryption/reencrypt-secrets` + +[Re-encrypts]({{< relref "../../setup-grafana/configure-security/configure-database-encryption/#re-encrypt-secrets" >}}) secrets. + +**Example Request**: + +```http +POST /api/admin/encryption/reencrypt-secrets HTTP/1.1 +Accept: application/json +Content-Type: application/json +``` + +**Example Response**: + +```http +HTTP/1.1 204 +Content-Type: application/json +``` + +## Roll back secrets + +`POST /api/admin/encryption/rollback-secrets` + +[Rolls back]({{< relref "../../setup-grafana/configure-security/configure-database-encryption/#roll-back-secrets" >}}) secrets. + +**Example Request**: + +```http +POST /api/admin/encryption/rollback-secrets HTTP/1.1 +Accept: application/json +Content-Type: application/json +``` + +**Example Response**: + +```http +HTTP/1.1 204 +Content-Type: application/json +``` diff --git a/docs/sources/setup-grafana/configure-security/configure-database-encryption/_index.md b/docs/sources/setup-grafana/configure-security/configure-database-encryption/_index.md index 21251774e37..48864deb65f 100644 --- a/docs/sources/setup-grafana/configure-security/configure-database-encryption/_index.md +++ b/docs/sources/setup-grafana/configure-security/configure-database-encryption/_index.md @@ -18,7 +18,7 @@ Grafana encrypts these secrets before they are written to the database, by using Since Grafana v9.0, it uses [envelope encryption](#envelope-encryption) by default, which adds a layer of indirection to the encryption process that represents an [**implicit breaking change**](#implicit-breaking-change) for older versions of Grafana. -For further details about how to operate a Grafana instance with envelope encryption, see the [Operational work]({{< relref "/#operational-work" >}}) section below. +For further details about how to operate a Grafana instance with envelope encryption, see the [Operational work](#operational-work) section below. > **Note:** In Grafana Enterprise, you can also choose to [encrypt secrets in AES-GCM mode]({{< relref "#changing-your-encryption-mode-to-aes-gcm" >}}) instead of AES-CFB. @@ -31,7 +31,7 @@ Instead of encrypting all secrets with a single key, Grafana uses a set of keys encrypt them. These data encryption keys are themselves encrypted with a single key encryption key (KEK), configured through the `secret_key` attribute in your [Grafana configuration]({{< relref "../../configure-grafana/#secret_key" >}}) or with a -[KMS integration](#kms-integration). +[KMS integration](#encrypting-your-database-with-a-key-from-a-key-management-system-kms). ## Implicit breaking change @@ -67,7 +67,8 @@ Secrets re-encryption can be performed when a Grafana administrator wants to eit - Re-encrypt secrets after a [data keys rotation](#rotate-data-keys). > **Note:** This operation is available through Grafana CLI by running `grafana-cli admin secrets-migration re-encrypt` -> command. It's safe to run more than once. Recommended to run under maintenance mode. +> command and through Grafana [Admin API]({{< relref "../../../developers/http_api/admin/#re-encrypt-secrets" >}}). +> It's safe to run more than once. Recommended to run under maintenance mode. ## Roll back secrets @@ -75,16 +76,18 @@ Used to roll back secrets encrypted with envelope encryption to legacy encryptio a Grafana version earlier than Grafana v9.0 after an unsuccessful upgrade. > **Note:** This operation is available through Grafana CLI by running `grafana-cli admin secrets-migration rollback` -> command. It's safe to run more than once. Recommended to run under maintenance mode. +> command and through Grafana [Admin API]({{< relref "../../../developers/http_api/admin/#roll-back-secrets" >}}). +> It's safe to run more than once. Recommended to run under maintenance mode. ## Re-encrypt data keys Used to re-encrypt data keys encrypted with a specific key encryption key (KEK). It can be used to either re-encrypt -existing data keys with a new key encryption key version (see [KMS integration](#kms-integration) rotation) or to +existing data keys with a new key encryption key version (see [KMS integration](#encrypting-your-database-with-a-key-from-a-key-management-system-kms) rotation) or to re-encrypt them with a completely different key encryption key. > **Note:** This operation is available through Grafana CLI by running `grafana-cli admin secrets-migration re-encrypt-data-keys` -> command. It's safe to run more than once. Recommended to run under maintenance mode. +> command and through Grafana [Admin API]({{< relref "../../../developers/http_api/admin/#re-encrypt-data-encryption-keys" >}}). +> It's safe to run more than once. Recommended to run under maintenance mode. ## Rotate data keys diff --git a/pkg/api/admin_encryption.go b/pkg/api/admin_encryption.go index 6eb94022dd5..1e71b67aeae 100644 --- a/pkg/api/admin_encryption.go +++ b/pkg/api/admin_encryption.go @@ -9,8 +9,42 @@ import ( func (hs *HTTPServer) AdminRotateDataEncryptionKeys(c *models.ReqContext) response.Response { if err := hs.SecretsService.RotateDataKeys(c.Req.Context()); err != nil { - return response.Error(http.StatusInternalServerError, "Failed to rotate data key", err) + return response.Error(http.StatusInternalServerError, "Failed to rotate data keys", err) } return response.Respond(http.StatusNoContent, "") } + +func (hs *HTTPServer) AdminReEncryptEncryptionKeys(c *models.ReqContext) response.Response { + if err := hs.SecretsService.ReEncryptDataKeys(c.Req.Context()); err != nil { + return response.Error(http.StatusInternalServerError, "Failed to re-encrypt data keys", err) + } + + return response.Respond(http.StatusOK, "Data encryption keys re-encrypted successfully") +} + +func (hs *HTTPServer) AdminReEncryptSecrets(c *models.ReqContext) response.Response { + success, err := hs.secretsMigrator.ReEncryptSecrets(c.Req.Context()) + if err != nil { + return response.Error(http.StatusInternalServerError, "Failed to re-encrypt secrets", err) + } + + if !success { + return response.Error(http.StatusPartialContent, "Something unexpected happened, refer to the server logs for more details", err) + } + + return response.Respond(http.StatusOK, "Secrets re-encrypted successfully") +} + +func (hs *HTTPServer) AdminRollbackSecrets(c *models.ReqContext) response.Response { + success, err := hs.secretsMigrator.RollBackSecrets(c.Req.Context()) + if err != nil { + return response.Error(http.StatusInternalServerError, "Failed to rollback secrets", err) + } + + if !success { + return response.Error(http.StatusPartialContent, "Something unexpected happened, refer to the server logs for more details", err) + } + + return response.Respond(http.StatusOK, "Secrets rolled back successfully") +} diff --git a/pkg/api/api.go b/pkg/api/api.go index b5cacdb4915..619afbcfb73 100644 --- a/pkg/api/api.go +++ b/pkg/api/api.go @@ -575,6 +575,9 @@ func (hs *HTTPServer) registerRoutes() { } adminRoute.Post("/encryption/rotate-data-keys", reqGrafanaAdmin, routing.Wrap(hs.AdminRotateDataEncryptionKeys)) + adminRoute.Post("/encryption/reencrypt-data-keys", reqGrafanaAdmin, routing.Wrap(hs.AdminReEncryptEncryptionKeys)) + adminRoute.Post("/encryption/reencrypt-secrets", reqGrafanaAdmin, routing.Wrap(hs.AdminReEncryptSecrets)) + adminRoute.Post("/encryption/rollback-secrets", reqGrafanaAdmin, routing.Wrap(hs.AdminRollbackSecrets)) adminRoute.Post("/provisioning/dashboards/reload", authorize(reqGrafanaAdmin, ac.EvalPermission(ActionProvisioningReload, ScopeProvisionersDashboards)), routing.Wrap(hs.AdminProvisioningReloadDashboards)) adminRoute.Post("/provisioning/plugins/reload", authorize(reqGrafanaAdmin, ac.EvalPermission(ActionProvisioningReload, ScopeProvisionersPlugins)), routing.Wrap(hs.AdminProvisioningReloadPlugins)) diff --git a/pkg/cmd/grafana-cli/commands/secretsmigrations/secretsmigrations.go b/pkg/cmd/grafana-cli/commands/secretsmigrations/secretsmigrations.go index 3d6b84b22a1..540e7c92a59 100644 --- a/pkg/cmd/grafana-cli/commands/secretsmigrations/secretsmigrations.go +++ b/pkg/cmd/grafana-cli/commands/secretsmigrations/secretsmigrations.go @@ -12,9 +12,11 @@ func ReEncryptDEKS(_ utils.CommandLine, runner runner.Runner) error { } func ReEncryptSecrets(_ utils.CommandLine, runner runner.Runner) error { - return runner.SecretsMigrator.ReEncryptSecrets(context.Background()) + _, err := runner.SecretsMigrator.ReEncryptSecrets(context.Background()) + return err } func RollBackSecrets(_ utils.CommandLine, runner runner.Runner) error { - return runner.SecretsMigrator.RollBackSecrets(context.Background()) + _, err := runner.SecretsMigrator.RollBackSecrets(context.Background()) + return err } diff --git a/pkg/services/secrets/migrator/migrator.go b/pkg/services/secrets/migrator/migrator.go index 2fcba755d2d..07f29e84d8c 100644 --- a/pkg/services/secrets/migrator/migrator.go +++ b/pkg/services/secrets/migrator/migrator.go @@ -37,14 +37,14 @@ func ProvideSecretsMigrator( } } -func (m *SecretsMigrator) ReEncryptSecrets(ctx context.Context) error { +func (m *SecretsMigrator) ReEncryptSecrets(ctx context.Context) (bool, error) { err := m.initProvidersIfNeeded() if err != nil { - return err + return false, err } toReencrypt := []interface { - reencrypt(context.Context, *manager.SecretsService, *sqlstore.SQLStore) + reencrypt(context.Context, *manager.SecretsService, *sqlstore.SQLStore) bool }{ simpleSecret{tableName: "dashboard_snapshot", columnName: "dashboard_encrypted"}, b64Secret{simpleSecret: simpleSecret{tableName: "user_auth", columnName: "o_auth_access_token"}, encoding: base64.StdEncoding}, @@ -56,30 +56,21 @@ func (m *SecretsMigrator) ReEncryptSecrets(ctx context.Context) error { alertingSecret{}, } + var anyFailure bool + for _, r := range toReencrypt { - r.reencrypt(ctx, m.secretsSrv, m.sqlStore) - } - - return nil -} - -func (m *SecretsMigrator) initProvidersIfNeeded() error { - if m.features.IsEnabled(featuremgmt.FlagDisableEnvelopeEncryption) { - logger.Info("Envelope encryption is not enabled but trying to init providers anyway...") - - if err := m.secretsSrv.InitProviders(); err != nil { - logger.Error("Envelope encryption providers initialization failed", "error", err) - return err + if success := r.reencrypt(ctx, m.secretsSrv, m.sqlStore); !success { + anyFailure = true } } - return nil + return !anyFailure, nil } -func (m *SecretsMigrator) RollBackSecrets(ctx context.Context) error { +func (m *SecretsMigrator) RollBackSecrets(ctx context.Context) (bool, error) { err := m.initProvidersIfNeeded() if err != nil { - return err + return false, err } toRollback := []interface { @@ -110,11 +101,26 @@ func (m *SecretsMigrator) RollBackSecrets(ctx context.Context) error { if anyFailure { logger.Warn("Some errors happened, not cleaning up data keys table...") - return nil + return false, nil } - if _, sqlErr := m.sqlStore.NewSession(ctx).Exec("DELETE FROM data_keys"); sqlErr != nil { + _, sqlErr := m.sqlStore.NewSession(ctx).Exec("DELETE FROM data_keys") + if sqlErr != nil { logger.Warn("Error while cleaning up data keys table...", "error", sqlErr) + return false, nil + } + + return true, nil +} + +func (m *SecretsMigrator) initProvidersIfNeeded() error { + if m.features.IsEnabled(featuremgmt.FlagDisableEnvelopeEncryption) { + logger.Info("Envelope encryption is not enabled but trying to init providers anyway...") + + if err := m.secretsSrv.InitProviders(); err != nil { + logger.Error("Envelope encryption providers initialization failed", "error", err) + return err + } } return nil diff --git a/pkg/services/secrets/migrator/reencrypt.go b/pkg/services/secrets/migrator/reencrypt.go index acbacc90524..80a8adb6cde 100644 --- a/pkg/services/secrets/migrator/reencrypt.go +++ b/pkg/services/secrets/migrator/reencrypt.go @@ -12,7 +12,7 @@ import ( "github.com/grafana/grafana/pkg/services/sqlstore" ) -func (s simpleSecret) reencrypt(ctx context.Context, secretsSrv *manager.SecretsService, sqlStore *sqlstore.SQLStore) { +func (s simpleSecret) reencrypt(ctx context.Context, secretsSrv *manager.SecretsService, sqlStore *sqlstore.SQLStore) bool { var rows []struct { Id int Secret []byte @@ -20,7 +20,7 @@ func (s simpleSecret) reencrypt(ctx context.Context, secretsSrv *manager.Secrets if err := sqlStore.NewSession(ctx).Table(s.tableName).Select(fmt.Sprintf("id, %s as secret", s.columnName)).Find(&rows); err != nil { logger.Warn("Could not find any secret to re-encrypt", "table", s.tableName) - return + return false } var anyFailure bool @@ -62,9 +62,11 @@ func (s simpleSecret) reencrypt(ctx context.Context, secretsSrv *manager.Secrets } else { logger.Info(fmt.Sprintf("Column %s from %s has been re-encrypted successfully", s.columnName, s.tableName)) } + + return !anyFailure } -func (s b64Secret) reencrypt(ctx context.Context, secretsSrv *manager.SecretsService, sqlStore *sqlstore.SQLStore) { +func (s b64Secret) reencrypt(ctx context.Context, secretsSrv *manager.SecretsService, sqlStore *sqlstore.SQLStore) bool { var rows []struct { Id int Secret string @@ -72,7 +74,7 @@ func (s b64Secret) reencrypt(ctx context.Context, secretsSrv *manager.SecretsSer if err := sqlStore.NewSession(ctx).Table(s.tableName).Select(fmt.Sprintf("id, %s as secret", s.columnName)).Find(&rows); err != nil { logger.Warn("Could not find any secret to re-encrypt", "table", s.tableName) - return + return false } var anyFailure bool @@ -128,9 +130,11 @@ func (s b64Secret) reencrypt(ctx context.Context, secretsSrv *manager.SecretsSer } else { logger.Info(fmt.Sprintf("Column %s from %s has been re-encrypted successfully", s.columnName, s.tableName)) } + + return !anyFailure } -func (s jsonSecret) reencrypt(ctx context.Context, secretsSrv *manager.SecretsService, sqlStore *sqlstore.SQLStore) { +func (s jsonSecret) reencrypt(ctx context.Context, secretsSrv *manager.SecretsService, sqlStore *sqlstore.SQLStore) bool { var rows []struct { Id int SecureJsonData map[string][]byte @@ -138,7 +142,7 @@ func (s jsonSecret) reencrypt(ctx context.Context, secretsSrv *manager.SecretsSe if err := sqlStore.NewSession(ctx).Table(s.tableName).Cols("id", "secure_json_data").Find(&rows); err != nil { logger.Warn("Could not find any secret to re-encrypt", "table", s.tableName) - return + return false } var anyFailure bool @@ -184,9 +188,11 @@ func (s jsonSecret) reencrypt(ctx context.Context, secretsSrv *manager.SecretsSe } else { logger.Info(fmt.Sprintf("Secure json data secrets from %s have been re-encrypted successfully", s.tableName)) } + + return !anyFailure } -func (s alertingSecret) reencrypt(ctx context.Context, secretsSrv *manager.SecretsService, sqlStore *sqlstore.SQLStore) { +func (s alertingSecret) reencrypt(ctx context.Context, secretsSrv *manager.SecretsService, sqlStore *sqlstore.SQLStore) bool { var results []struct { Id int AlertmanagerConfiguration string @@ -195,7 +201,7 @@ func (s alertingSecret) reencrypt(ctx context.Context, secretsSrv *manager.Secre selectSQL := "SELECT id, alertmanager_configuration FROM alert_configuration" if err := sqlStore.NewSession(ctx).SQL(selectSQL).Find(&results); err != nil { logger.Warn("Could not find any alert_configuration secret to re-encrypt") - return + return false } var anyFailure bool @@ -261,4 +267,6 @@ func (s alertingSecret) reencrypt(ctx context.Context, secretsSrv *manager.Secre } else { logger.Info("Alerting configuration secrets have been re-encrypted successfully") } + + return !anyFailure } diff --git a/pkg/services/secrets/secrets.go b/pkg/services/secrets/secrets.go index aa4fdb7b13a..4b4a554406f 100644 --- a/pkg/services/secrets/secrets.go +++ b/pkg/services/secrets/secrets.go @@ -72,6 +72,14 @@ type BackgroundProvider interface { // Migrator is responsible for secrets migrations like re-encrypting or rolling back secrets. type Migrator interface { - ReEncryptSecrets(ctx context.Context) error - RollBackSecrets(ctx context.Context) error + // ReEncryptSecrets decrypts and re-encrypts the secrets with most recent + // available data key. If a secret-specific decryption / re-encryption fails, + // it does not stop, but returns false as the first return (success or not) + // at the end of the process. + ReEncryptSecrets(ctx context.Context) (bool, error) + // RollBackSecrets decrypts and re-encrypts the secrets using the legacy + // encryption. If a secret-specific decryption / re-encryption fails, it + // does not stop, but returns false as the first return (success or not) + // at the end of the process. + RollBackSecrets(ctx context.Context) (bool, error) } From 689ae96a0e06c6e500591e02f5696a60c76ef089 Mon Sep 17 00:00:00 2001 From: Joe Blubaugh Date: Mon, 18 Jul 2022 15:08:08 +0800 Subject: [PATCH 025/116] Alerting: Refactor API types generation with different names. (#51785) This changes the API codegen template (controller-api.mustache) to simplify some names. When this package was created, most APIs "forked" to either a Grafana backend implementation or a "Lotex" remote implementation. As we have added APIs it's no longer the case. Provisioning, configuration, and testing APIs do not fork, and we are likely to add additional APIs that don't fork. This change replaces {{classname}}ForkingService with {{classname}} for interface names, and names the concrete implementation {{classname}}Handler. It changes the implied implementation of a route handler from fork{{nickname}} to handle{{nickname}}. So PrometheusApiForkingService becomes PrometheusApi, ForkedPrometheusApi becomes PrometheusApiHandler and forkRouteGetGrafanaAlertStatuses becomes handleRouteGetGrafanaAlertStatuses It also renames some files - APIs that do no forking go from forked_{{name}}.go to {{name}}.go and APIs that still fork go from forked_{{name}}.go to forking_{{name}}.go to capture the idea that those files a "doing forking" rather than "are a fork of something." Signed-off-by: Joe Blubaugh --- pkg/services/ngalert/api/api.go | 14 +- .../{api_admin.go => api_configuration.go} | 10 +- pkg/services/ngalert/api/configuration.go | 34 +++++ pkg/services/ngalert/api/forked_admin.go | 35 ----- .../ngalert/api/forked_provisioning.go | 108 ---------------- pkg/services/ngalert/api/forked_testing.go | 31 ----- .../{forked_am.go => forking_alertmanager.go} | 58 ++++----- .../{forked_prom.go => forking_prometheus.go} | 16 +-- .../api/{fork_ruler.go => forking_ruler.go} | 33 +++-- .../api/generated_base_api_alertmanager.go | 122 +++++++++++------- .../api/generated_base_api_configuration.go | 21 +-- .../api/generated_base_api_prometheus.go | 22 ++-- .../api/generated_base_api_provisioning.go | 114 +++++++++------- .../ngalert/api/generated_base_api_ruler.go | 65 ++++++---- .../ngalert/api/generated_base_api_testing.go | 20 +-- pkg/services/ngalert/api/provisioning.go | 105 +++++++++++++++ pkg/services/ngalert/api/testing_api.go | 30 +++++ pkg/services/ngalert/api/tooling/Makefile | 4 +- pkg/services/ngalert/api/tooling/api.json | 4 +- pkg/services/ngalert/api/tooling/spec.json | 4 +- .../templates/controller-api.mustache | 17 ++- 21 files changed, 464 insertions(+), 403 deletions(-) rename pkg/services/ngalert/api/{api_admin.go => api_configuration.go} (88%) create mode 100644 pkg/services/ngalert/api/configuration.go delete mode 100644 pkg/services/ngalert/api/forked_admin.go delete mode 100644 pkg/services/ngalert/api/forked_provisioning.go delete mode 100644 pkg/services/ngalert/api/forked_testing.go rename pkg/services/ngalert/api/{forked_am.go => forking_alertmanager.go} (50%) rename pkg/services/ngalert/api/{forked_prom.go => forking_prometheus.go} (60%) rename pkg/services/ngalert/api/{fork_ruler.go => forking_ruler.go} (64%) create mode 100644 pkg/services/ngalert/api/provisioning.go create mode 100644 pkg/services/ngalert/api/testing_api.go diff --git a/pkg/services/ngalert/api/api.go b/pkg/services/ngalert/api/api.go index 64ac0ed1501..8056a453342 100644 --- a/pkg/services/ngalert/api/api.go +++ b/pkg/services/ngalert/api/api.go @@ -93,19 +93,19 @@ func (api *API) RegisterAPIEndpoints(m *metrics.API) { } // Register endpoints for proxying to Alertmanager-compatible backends. - api.RegisterAlertmanagerApiEndpoints(NewForkedAM( + api.RegisterAlertmanagerApiEndpoints(NewForkingAM( api.DatasourceCache, NewLotexAM(proxy, logger), &AlertmanagerSrv{crypto: api.MultiOrgAlertmanager.Crypto, log: logger, ac: api.AccessControl, mam: api.MultiOrgAlertmanager}, ), m) // Register endpoints for proxying to Prometheus-compatible backends. - api.RegisterPrometheusApiEndpoints(NewForkedProm( + api.RegisterPrometheusApiEndpoints(NewForkingProm( api.DatasourceCache, NewLotexProm(proxy, logger), &PrometheusSrv{log: logger, manager: api.StateManager, store: api.RuleStore, ac: api.AccessControl}, ), m) // Register endpoints for proxying to Cortex Ruler-compatible backends. - api.RegisterRulerApiEndpoints(NewForkedRuler( + api.RegisterRulerApiEndpoints(NewForkingRuler( api.DatasourceCache, NewLotexRuler(proxy, logger), &RulerSrv{ @@ -120,7 +120,7 @@ func (api *API) RegisterAPIEndpoints(m *metrics.API) { ac: api.AccessControl, }, ), m) - api.RegisterTestingApiEndpoints(NewForkedTestingApi( + api.RegisterTestingApiEndpoints(NewTestingApi( &TestingApiSrv{ AlertingProxy: proxy, DatasourceCache: api.DatasourceCache, @@ -128,15 +128,15 @@ func (api *API) RegisterAPIEndpoints(m *metrics.API) { accessControl: api.AccessControl, evaluator: eval.NewEvaluator(api.Cfg, log.New("ngalert.eval"), api.DatasourceCache, api.SecretsService, api.ExpressionService), }), m) - api.RegisterConfigurationApiEndpoints(NewForkedConfiguration( - &AdminSrv{ + api.RegisterConfigurationApiEndpoints(NewConfiguration( + &ConfigSrv{ store: api.AdminConfigStore, log: logger, alertmanagerProvider: api.AlertsRouter, }, ), m) - api.RegisterProvisioningApiEndpoints(NewForkedProvisioningApi(&ProvisioningSrv{ + api.RegisterProvisioningApiEndpoints(NewProvisioningApi(&ProvisioningSrv{ log: logger, policies: api.Policies, contactPointService: api.ContactPointService, diff --git a/pkg/services/ngalert/api/api_admin.go b/pkg/services/ngalert/api/api_configuration.go similarity index 88% rename from pkg/services/ngalert/api/api_admin.go rename to pkg/services/ngalert/api/api_configuration.go index e3f33f221dd..3e1ab1dd84d 100644 --- a/pkg/services/ngalert/api/api_admin.go +++ b/pkg/services/ngalert/api/api_configuration.go @@ -15,13 +15,13 @@ import ( v1 "github.com/prometheus/client_golang/api/prometheus/v1" ) -type AdminSrv struct { +type ConfigSrv struct { alertmanagerProvider ExternalAlertmanagerProvider store store.AdminConfigurationStore log log.Logger } -func (srv AdminSrv) RouteGetAlertmanagers(c *models.ReqContext) response.Response { +func (srv ConfigSrv) RouteGetAlertmanagers(c *models.ReqContext) response.Response { urls := srv.alertmanagerProvider.AlertmanagersFor(c.OrgId) droppedURLs := srv.alertmanagerProvider.DroppedAlertmanagersFor(c.OrgId) ams := v1.AlertManagersResult{Active: make([]v1.AlertManager, len(urls)), Dropped: make([]v1.AlertManager, len(droppedURLs))} @@ -38,7 +38,7 @@ func (srv AdminSrv) RouteGetAlertmanagers(c *models.ReqContext) response.Respons }) } -func (srv AdminSrv) RouteGetNGalertConfig(c *models.ReqContext) response.Response { +func (srv ConfigSrv) RouteGetNGalertConfig(c *models.ReqContext) response.Response { if c.OrgRole != models.ROLE_ADMIN { return accessForbiddenResp() } @@ -61,7 +61,7 @@ func (srv AdminSrv) RouteGetNGalertConfig(c *models.ReqContext) response.Respons return response.JSON(http.StatusOK, resp) } -func (srv AdminSrv) RoutePostNGalertConfig(c *models.ReqContext, body apimodels.PostableNGalertConfig) response.Response { +func (srv ConfigSrv) RoutePostNGalertConfig(c *models.ReqContext, body apimodels.PostableNGalertConfig) response.Response { if c.OrgRole != models.ROLE_ADMIN { return accessForbiddenResp() } @@ -97,7 +97,7 @@ func (srv AdminSrv) RoutePostNGalertConfig(c *models.ReqContext, body apimodels. return response.JSON(http.StatusCreated, util.DynMap{"message": "admin configuration updated"}) } -func (srv AdminSrv) RouteDeleteNGalertConfig(c *models.ReqContext) response.Response { +func (srv ConfigSrv) RouteDeleteNGalertConfig(c *models.ReqContext) response.Response { if c.OrgRole != models.ROLE_ADMIN { return accessForbiddenResp() } diff --git a/pkg/services/ngalert/api/configuration.go b/pkg/services/ngalert/api/configuration.go new file mode 100644 index 00000000000..070025e51ea --- /dev/null +++ b/pkg/services/ngalert/api/configuration.go @@ -0,0 +1,34 @@ +package api + +import ( + "github.com/grafana/grafana/pkg/api/response" + "github.com/grafana/grafana/pkg/models" + apimodels "github.com/grafana/grafana/pkg/services/ngalert/api/tooling/definitions" +) + +// ConfigurationApiHandler always forwards requests to grafana backend +type ConfigurationApiHandler struct { + grafana *ConfigSrv +} + +func NewConfiguration(grafana *ConfigSrv) *ConfigurationApiHandler { + return &ConfigurationApiHandler{ + grafana: grafana, + } +} + +func (f *ConfigurationApiHandler) handleRouteGetAlertmanagers(c *models.ReqContext) response.Response { + return f.grafana.RouteGetAlertmanagers(c) +} + +func (f *ConfigurationApiHandler) handleRouteGetNGalertConfig(c *models.ReqContext) response.Response { + return f.grafana.RouteGetNGalertConfig(c) +} + +func (f *ConfigurationApiHandler) handleRoutePostNGalertConfig(c *models.ReqContext, body apimodels.PostableNGalertConfig) response.Response { + return f.grafana.RoutePostNGalertConfig(c, body) +} + +func (f *ConfigurationApiHandler) handleRouteDeleteNGalertConfig(c *models.ReqContext) response.Response { + return f.grafana.RouteDeleteNGalertConfig(c) +} diff --git a/pkg/services/ngalert/api/forked_admin.go b/pkg/services/ngalert/api/forked_admin.go deleted file mode 100644 index 00c7b421546..00000000000 --- a/pkg/services/ngalert/api/forked_admin.go +++ /dev/null @@ -1,35 +0,0 @@ -package api - -import ( - "github.com/grafana/grafana/pkg/api/response" - "github.com/grafana/grafana/pkg/models" - apimodels "github.com/grafana/grafana/pkg/services/ngalert/api/tooling/definitions" -) - -// ForkedConfigurationApi always forwards requests to grafana backend -type ForkedConfigurationApi struct { - grafana *AdminSrv -} - -// NewForkedConfiguration creates a new ForkedConfigurationApi instance -func NewForkedConfiguration(grafana *AdminSrv) *ForkedConfigurationApi { - return &ForkedConfigurationApi{ - grafana: grafana, - } -} - -func (f *ForkedConfigurationApi) forkRouteGetAlertmanagers(c *models.ReqContext) response.Response { - return f.grafana.RouteGetAlertmanagers(c) -} - -func (f *ForkedConfigurationApi) forkRouteGetNGalertConfig(c *models.ReqContext) response.Response { - return f.grafana.RouteGetNGalertConfig(c) -} - -func (f *ForkedConfigurationApi) forkRoutePostNGalertConfig(c *models.ReqContext, body apimodels.PostableNGalertConfig) response.Response { - return f.grafana.RoutePostNGalertConfig(c, body) -} - -func (f *ForkedConfigurationApi) forkRouteDeleteNGalertConfig(c *models.ReqContext) response.Response { - return f.grafana.RouteDeleteNGalertConfig(c) -} diff --git a/pkg/services/ngalert/api/forked_provisioning.go b/pkg/services/ngalert/api/forked_provisioning.go deleted file mode 100644 index 28457980df8..00000000000 --- a/pkg/services/ngalert/api/forked_provisioning.go +++ /dev/null @@ -1,108 +0,0 @@ -package api - -import ( - "github.com/grafana/grafana/pkg/api/response" - "github.com/grafana/grafana/pkg/models" - apimodels "github.com/grafana/grafana/pkg/services/ngalert/api/tooling/definitions" -) - -// ForkedProvisioningApi always forwards requests to a Grafana backend. -// We do not currently support provisioning of external systems through Grafana's API. -type ForkedProvisioningApi struct { - svc *ProvisioningSrv -} - -// NewForkedProvisioningApi creates a new ForkedProvisioningApi instance. -func NewForkedProvisioningApi(svc *ProvisioningSrv) *ForkedProvisioningApi { - return &ForkedProvisioningApi{ - svc: svc, - } -} - -func (f *ForkedProvisioningApi) forkRouteGetPolicyTree(ctx *models.ReqContext) response.Response { - return f.svc.RouteGetPolicyTree(ctx) -} - -func (f *ForkedProvisioningApi) forkRoutePutPolicyTree(ctx *models.ReqContext, route apimodels.Route) response.Response { - return f.svc.RoutePutPolicyTree(ctx, route) -} - -func (f *ForkedProvisioningApi) forkRouteResetPolicyTree(ctx *models.ReqContext) response.Response { - return f.svc.RouteResetPolicyTree(ctx) -} - -func (f *ForkedProvisioningApi) forkRouteGetContactpoints(ctx *models.ReqContext) response.Response { - return f.svc.RouteGetContactPoints(ctx) -} - -func (f *ForkedProvisioningApi) forkRoutePostContactpoints(ctx *models.ReqContext, cp apimodels.EmbeddedContactPoint) response.Response { - return f.svc.RoutePostContactPoint(ctx, cp) -} - -func (f *ForkedProvisioningApi) forkRoutePutContactpoint(ctx *models.ReqContext, cp apimodels.EmbeddedContactPoint, UID string) response.Response { - return f.svc.RoutePutContactPoint(ctx, cp, UID) -} - -func (f *ForkedProvisioningApi) forkRouteDeleteContactpoints(ctx *models.ReqContext, UID string) response.Response { - return f.svc.RouteDeleteContactPoint(ctx, UID) -} - -func (f *ForkedProvisioningApi) forkRouteGetTemplates(ctx *models.ReqContext) response.Response { - return f.svc.RouteGetTemplates(ctx) -} - -func (f *ForkedProvisioningApi) forkRouteGetTemplate(ctx *models.ReqContext, name string) response.Response { - return f.svc.RouteGetTemplate(ctx, name) -} - -func (f *ForkedProvisioningApi) forkRoutePutTemplate(ctx *models.ReqContext, body apimodels.MessageTemplateContent, name string) response.Response { - return f.svc.RoutePutTemplate(ctx, body, name) -} - -func (f *ForkedProvisioningApi) forkRouteDeleteTemplate(ctx *models.ReqContext, name string) response.Response { - return f.svc.RouteDeleteTemplate(ctx, name) -} - -func (f *ForkedProvisioningApi) forkRouteGetMuteTiming(ctx *models.ReqContext, name string) response.Response { - return f.svc.RouteGetMuteTiming(ctx, name) -} - -func (f *ForkedProvisioningApi) forkRouteGetMuteTimings(ctx *models.ReqContext) response.Response { - return f.svc.RouteGetMuteTimings(ctx) -} - -func (f *ForkedProvisioningApi) forkRoutePostMuteTiming(ctx *models.ReqContext, mt apimodels.MuteTimeInterval) response.Response { - return f.svc.RoutePostMuteTiming(ctx, mt) -} - -func (f *ForkedProvisioningApi) forkRoutePutMuteTiming(ctx *models.ReqContext, mt apimodels.MuteTimeInterval, name string) response.Response { - return f.svc.RoutePutMuteTiming(ctx, mt, name) -} - -func (f *ForkedProvisioningApi) forkRouteDeleteMuteTiming(ctx *models.ReqContext, name string) response.Response { - return f.svc.RouteDeleteMuteTiming(ctx, name) -} - -func (f *ForkedProvisioningApi) forkRouteGetAlertRule(ctx *models.ReqContext, UID string) response.Response { - return f.svc.RouteRouteGetAlertRule(ctx, UID) -} - -func (f *ForkedProvisioningApi) forkRoutePostAlertRule(ctx *models.ReqContext, ar apimodels.ProvisionedAlertRule) response.Response { - return f.svc.RoutePostAlertRule(ctx, ar) -} - -func (f *ForkedProvisioningApi) forkRoutePutAlertRule(ctx *models.ReqContext, ar apimodels.ProvisionedAlertRule, UID string) response.Response { - return f.svc.RoutePutAlertRule(ctx, ar, UID) -} - -func (f *ForkedProvisioningApi) forkRouteDeleteAlertRule(ctx *models.ReqContext, UID string) response.Response { - return f.svc.RouteDeleteAlertRule(ctx, UID) -} - -func (f *ForkedProvisioningApi) forkRouteGetAlertRuleGroup(ctx *models.ReqContext, folder, group string) response.Response { - return f.svc.RouteGetAlertRuleGroup(ctx, folder, group) -} - -func (f *ForkedProvisioningApi) forkRoutePutAlertRuleGroup(ctx *models.ReqContext, ag apimodels.AlertRuleGroupMetadata, folder, group string) response.Response { - return f.svc.RoutePutAlertRuleGroup(ctx, ag, folder, group) -} diff --git a/pkg/services/ngalert/api/forked_testing.go b/pkg/services/ngalert/api/forked_testing.go deleted file mode 100644 index 6b41961e770..00000000000 --- a/pkg/services/ngalert/api/forked_testing.go +++ /dev/null @@ -1,31 +0,0 @@ -package api - -import ( - "github.com/grafana/grafana/pkg/api/response" - "github.com/grafana/grafana/pkg/models" - apimodels "github.com/grafana/grafana/pkg/services/ngalert/api/tooling/definitions" -) - -// ForkedTestingApi always forwards requests to grafana backend -type ForkedTestingApi struct { - svc *TestingApiSrv -} - -// NewForkedTestingApi creates a new ForkedTestingApi instance -func NewForkedTestingApi(svc *TestingApiSrv) *ForkedTestingApi { - return &ForkedTestingApi{ - svc: svc, - } -} - -func (f *ForkedTestingApi) forkRouteTestRuleConfig(c *models.ReqContext, body apimodels.TestRulePayload, dsUID string) response.Response { - return f.svc.RouteTestRuleConfig(c, body, dsUID) -} - -func (f *ForkedTestingApi) forkRouteTestRuleGrafanaConfig(c *models.ReqContext, body apimodels.TestRulePayload) response.Response { - return f.svc.RouteTestGrafanaRuleConfig(c, body) -} - -func (f *ForkedTestingApi) forkRouteEvalQueries(c *models.ReqContext, body apimodels.EvalQueriesPayload) response.Response { - return f.svc.RouteEvalQueries(c, body) -} diff --git a/pkg/services/ngalert/api/forked_am.go b/pkg/services/ngalert/api/forking_alertmanager.go similarity index 50% rename from pkg/services/ngalert/api/forked_am.go rename to pkg/services/ngalert/api/forking_alertmanager.go index 28ad7442f7d..32c835217af 100644 --- a/pkg/services/ngalert/api/forked_am.go +++ b/pkg/services/ngalert/api/forking_alertmanager.go @@ -9,22 +9,22 @@ import ( apimodels "github.com/grafana/grafana/pkg/services/ngalert/api/tooling/definitions" ) -type ForkedAlertmanagerApi struct { +type AlertmanagerApiHandler struct { AMSvc *LotexAM GrafanaSvc *AlertmanagerSrv DatasourceCache datasources.CacheService } -// NewForkedAM implements a set of routes that proxy to various Alertmanager-compatible backends. -func NewForkedAM(datasourceCache datasources.CacheService, proxy *LotexAM, grafana *AlertmanagerSrv) *ForkedAlertmanagerApi { - return &ForkedAlertmanagerApi{ +// NewForkingAM implements a set of routes that proxy to various Alertmanager-compatible backends. +func NewForkingAM(datasourceCache datasources.CacheService, proxy *LotexAM, grafana *AlertmanagerSrv) *AlertmanagerApiHandler { + return &AlertmanagerApiHandler{ AMSvc: proxy, GrafanaSvc: grafana, DatasourceCache: datasourceCache, } } -func (f *ForkedAlertmanagerApi) getService(ctx *models.ReqContext) (*LotexAM, error) { +func (f *AlertmanagerApiHandler) getService(ctx *models.ReqContext) (*LotexAM, error) { t, err := backendTypeByUID(ctx, f.DatasourceCache) if err != nil { return nil, err @@ -38,7 +38,7 @@ func (f *ForkedAlertmanagerApi) getService(ctx *models.ReqContext) (*LotexAM, er } } -func (f *ForkedAlertmanagerApi) forkRouteGetAMStatus(ctx *models.ReqContext, dsUID string) response.Response { +func (f *AlertmanagerApiHandler) handleRouteGetAMStatus(ctx *models.ReqContext, dsUID string) response.Response { s, err := f.getService(ctx) if err != nil { return response.Error(400, err.Error(), nil) @@ -47,7 +47,7 @@ func (f *ForkedAlertmanagerApi) forkRouteGetAMStatus(ctx *models.ReqContext, dsU return s.RouteGetAMStatus(ctx) } -func (f *ForkedAlertmanagerApi) forkRouteCreateSilence(ctx *models.ReqContext, body apimodels.PostableSilence, dsUID string) response.Response { +func (f *AlertmanagerApiHandler) handleRouteCreateSilence(ctx *models.ReqContext, body apimodels.PostableSilence, dsUID string) response.Response { s, err := f.getService(ctx) if err != nil { return ErrResp(400, err, "") @@ -56,7 +56,7 @@ func (f *ForkedAlertmanagerApi) forkRouteCreateSilence(ctx *models.ReqContext, b return s.RouteCreateSilence(ctx, body) } -func (f *ForkedAlertmanagerApi) forkRouteDeleteAlertingConfig(ctx *models.ReqContext, dsUID string) response.Response { +func (f *AlertmanagerApiHandler) handleRouteDeleteAlertingConfig(ctx *models.ReqContext, dsUID string) response.Response { s, err := f.getService(ctx) if err != nil { return ErrResp(400, err, "") @@ -65,7 +65,7 @@ func (f *ForkedAlertmanagerApi) forkRouteDeleteAlertingConfig(ctx *models.ReqCon return s.RouteDeleteAlertingConfig(ctx) } -func (f *ForkedAlertmanagerApi) forkRouteDeleteSilence(ctx *models.ReqContext, silenceID string, dsUID string) response.Response { +func (f *AlertmanagerApiHandler) handleRouteDeleteSilence(ctx *models.ReqContext, silenceID string, dsUID string) response.Response { s, err := f.getService(ctx) if err != nil { return ErrResp(400, err, "") @@ -74,7 +74,7 @@ func (f *ForkedAlertmanagerApi) forkRouteDeleteSilence(ctx *models.ReqContext, s return s.RouteDeleteSilence(ctx, silenceID) } -func (f *ForkedAlertmanagerApi) forkRouteGetAlertingConfig(ctx *models.ReqContext, dsUID string) response.Response { +func (f *AlertmanagerApiHandler) handleRouteGetAlertingConfig(ctx *models.ReqContext, dsUID string) response.Response { s, err := f.getService(ctx) if err != nil { return ErrResp(400, err, "") @@ -83,7 +83,7 @@ func (f *ForkedAlertmanagerApi) forkRouteGetAlertingConfig(ctx *models.ReqContex return s.RouteGetAlertingConfig(ctx) } -func (f *ForkedAlertmanagerApi) forkRouteGetAMAlertGroups(ctx *models.ReqContext, dsUID string) response.Response { +func (f *AlertmanagerApiHandler) handleRouteGetAMAlertGroups(ctx *models.ReqContext, dsUID string) response.Response { s, err := f.getService(ctx) if err != nil { return ErrResp(400, err, "") @@ -92,7 +92,7 @@ func (f *ForkedAlertmanagerApi) forkRouteGetAMAlertGroups(ctx *models.ReqContext return s.RouteGetAMAlertGroups(ctx) } -func (f *ForkedAlertmanagerApi) forkRouteGetAMAlerts(ctx *models.ReqContext, dsUID string) response.Response { +func (f *AlertmanagerApiHandler) handleRouteGetAMAlerts(ctx *models.ReqContext, dsUID string) response.Response { s, err := f.getService(ctx) if err != nil { return ErrResp(400, err, "") @@ -101,7 +101,7 @@ func (f *ForkedAlertmanagerApi) forkRouteGetAMAlerts(ctx *models.ReqContext, dsU return s.RouteGetAMAlerts(ctx) } -func (f *ForkedAlertmanagerApi) forkRouteGetSilence(ctx *models.ReqContext, silenceID string, dsUID string) response.Response { +func (f *AlertmanagerApiHandler) handleRouteGetSilence(ctx *models.ReqContext, silenceID string, dsUID string) response.Response { s, err := f.getService(ctx) if err != nil { return ErrResp(400, err, "") @@ -110,7 +110,7 @@ func (f *ForkedAlertmanagerApi) forkRouteGetSilence(ctx *models.ReqContext, sile return s.RouteGetSilence(ctx, silenceID) } -func (f *ForkedAlertmanagerApi) forkRouteGetSilences(ctx *models.ReqContext, dsUID string) response.Response { +func (f *AlertmanagerApiHandler) handleRouteGetSilences(ctx *models.ReqContext, dsUID string) response.Response { s, err := f.getService(ctx) if err != nil { return ErrResp(400, err, "") @@ -119,7 +119,7 @@ func (f *ForkedAlertmanagerApi) forkRouteGetSilences(ctx *models.ReqContext, dsU return s.RouteGetSilences(ctx) } -func (f *ForkedAlertmanagerApi) forkRoutePostAlertingConfig(ctx *models.ReqContext, body apimodels.PostableUserConfig, dsUID string) response.Response { +func (f *AlertmanagerApiHandler) handleRoutePostAlertingConfig(ctx *models.ReqContext, body apimodels.PostableUserConfig, dsUID string) response.Response { s, err := f.getService(ctx) if err != nil { return ErrResp(400, err, "") @@ -137,7 +137,7 @@ func (f *ForkedAlertmanagerApi) forkRoutePostAlertingConfig(ctx *models.ReqConte return s.RoutePostAlertingConfig(ctx, body) } -func (f *ForkedAlertmanagerApi) forkRoutePostAMAlerts(ctx *models.ReqContext, body apimodels.PostableAlerts, dsUID string) response.Response { +func (f *AlertmanagerApiHandler) handleRoutePostAMAlerts(ctx *models.ReqContext, body apimodels.PostableAlerts, dsUID string) response.Response { s, err := f.getService(ctx) if err != nil { return ErrResp(400, err, "") @@ -146,7 +146,7 @@ func (f *ForkedAlertmanagerApi) forkRoutePostAMAlerts(ctx *models.ReqContext, bo return s.RoutePostAMAlerts(ctx, body) } -func (f *ForkedAlertmanagerApi) forkRoutePostTestReceivers(ctx *models.ReqContext, body apimodels.TestReceiversConfigBodyParams, dsUID string) response.Response { +func (f *AlertmanagerApiHandler) handleRoutePostTestReceivers(ctx *models.ReqContext, body apimodels.TestReceiversConfigBodyParams, dsUID string) response.Response { s, err := f.getService(ctx) if err != nil { return ErrResp(400, err, "") @@ -155,50 +155,50 @@ func (f *ForkedAlertmanagerApi) forkRoutePostTestReceivers(ctx *models.ReqContex return s.RoutePostTestReceivers(ctx, body) } -func (f *ForkedAlertmanagerApi) forkRouteDeleteGrafanaSilence(ctx *models.ReqContext, id string) response.Response { +func (f *AlertmanagerApiHandler) handleRouteDeleteGrafanaSilence(ctx *models.ReqContext, id string) response.Response { return f.GrafanaSvc.RouteDeleteSilence(ctx, id) } -func (f *ForkedAlertmanagerApi) forkRouteDeleteGrafanaAlertingConfig(ctx *models.ReqContext) response.Response { +func (f *AlertmanagerApiHandler) handleRouteDeleteGrafanaAlertingConfig(ctx *models.ReqContext) response.Response { return f.GrafanaSvc.RouteDeleteAlertingConfig(ctx) } -func (f *ForkedAlertmanagerApi) forkRouteCreateGrafanaSilence(ctx *models.ReqContext, body apimodels.PostableSilence) response.Response { +func (f *AlertmanagerApiHandler) handleRouteCreateGrafanaSilence(ctx *models.ReqContext, body apimodels.PostableSilence) response.Response { return f.GrafanaSvc.RouteCreateSilence(ctx, body) } -func (f *ForkedAlertmanagerApi) forkRouteGetGrafanaAMStatus(ctx *models.ReqContext) response.Response { +func (f *AlertmanagerApiHandler) handleRouteGetGrafanaAMStatus(ctx *models.ReqContext) response.Response { return f.GrafanaSvc.RouteGetAMStatus(ctx) } -func (f *ForkedAlertmanagerApi) forkRouteGetGrafanaAMAlerts(ctx *models.ReqContext) response.Response { +func (f *AlertmanagerApiHandler) handleRouteGetGrafanaAMAlerts(ctx *models.ReqContext) response.Response { return f.GrafanaSvc.RouteGetAMAlerts(ctx) } -func (f *ForkedAlertmanagerApi) forkRouteGetGrafanaAMAlertGroups(ctx *models.ReqContext) response.Response { +func (f *AlertmanagerApiHandler) handleRouteGetGrafanaAMAlertGroups(ctx *models.ReqContext) response.Response { return f.GrafanaSvc.RouteGetAMAlertGroups(ctx) } -func (f *ForkedAlertmanagerApi) forkRouteGetGrafanaAlertingConfig(ctx *models.ReqContext) response.Response { +func (f *AlertmanagerApiHandler) handleRouteGetGrafanaAlertingConfig(ctx *models.ReqContext) response.Response { return f.GrafanaSvc.RouteGetAlertingConfig(ctx) } -func (f *ForkedAlertmanagerApi) forkRouteGetGrafanaSilence(ctx *models.ReqContext, id string) response.Response { +func (f *AlertmanagerApiHandler) handleRouteGetGrafanaSilence(ctx *models.ReqContext, id string) response.Response { return f.GrafanaSvc.RouteGetSilence(ctx, id) } -func (f *ForkedAlertmanagerApi) forkRouteGetGrafanaSilences(ctx *models.ReqContext) response.Response { +func (f *AlertmanagerApiHandler) handleRouteGetGrafanaSilences(ctx *models.ReqContext) response.Response { return f.GrafanaSvc.RouteGetSilences(ctx) } -func (f *ForkedAlertmanagerApi) forkRoutePostGrafanaAMAlerts(ctx *models.ReqContext, conf apimodels.PostableAlerts) response.Response { +func (f *AlertmanagerApiHandler) handleRoutePostGrafanaAMAlerts(ctx *models.ReqContext, conf apimodels.PostableAlerts) response.Response { return f.GrafanaSvc.RoutePostAMAlerts(ctx, conf) } -func (f *ForkedAlertmanagerApi) forkRoutePostGrafanaAlertingConfig(ctx *models.ReqContext, conf apimodels.PostableUserConfig) response.Response { +func (f *AlertmanagerApiHandler) handleRoutePostGrafanaAlertingConfig(ctx *models.ReqContext, conf apimodels.PostableUserConfig) response.Response { return f.GrafanaSvc.RoutePostAlertingConfig(ctx, conf) } -func (f *ForkedAlertmanagerApi) forkRoutePostTestGrafanaReceivers(ctx *models.ReqContext, conf apimodels.TestReceiversConfigBodyParams) response.Response { +func (f *AlertmanagerApiHandler) handleRoutePostTestGrafanaReceivers(ctx *models.ReqContext, conf apimodels.TestReceiversConfigBodyParams) response.Response { return f.GrafanaSvc.RoutePostTestReceivers(ctx, conf) } diff --git a/pkg/services/ngalert/api/forked_prom.go b/pkg/services/ngalert/api/forking_prometheus.go similarity index 60% rename from pkg/services/ngalert/api/forked_prom.go rename to pkg/services/ngalert/api/forking_prometheus.go index 7009969c43a..c192e696de6 100644 --- a/pkg/services/ngalert/api/forked_prom.go +++ b/pkg/services/ngalert/api/forking_prometheus.go @@ -9,22 +9,22 @@ import ( apimodels "github.com/grafana/grafana/pkg/services/ngalert/api/tooling/definitions" ) -type ForkedPrometheusApi struct { +type PrometheusApiHandler struct { ProxySvc *LotexProm GrafanaSvc *PrometheusSrv DatasourceCache datasources.CacheService } -// NewForkedProm implements a set of routes that proxy to various Prometheus-compatible backends. -func NewForkedProm(datasourceCache datasources.CacheService, proxy *LotexProm, grafana *PrometheusSrv) *ForkedPrometheusApi { - return &ForkedPrometheusApi{ +// NewForkingProm implements a set of routes that proxy to various Prometheus-compatible backends. +func NewForkingProm(datasourceCache datasources.CacheService, proxy *LotexProm, grafana *PrometheusSrv) *PrometheusApiHandler { + return &PrometheusApiHandler{ ProxySvc: proxy, GrafanaSvc: grafana, DatasourceCache: datasourceCache, } } -func (f *ForkedPrometheusApi) forkRouteGetAlertStatuses(ctx *models.ReqContext, dsUID string) response.Response { +func (f *PrometheusApiHandler) handleRouteGetAlertStatuses(ctx *models.ReqContext, dsUID string) response.Response { t, err := backendTypeByUID(ctx, f.DatasourceCache) if err != nil { return ErrResp(400, err, "") @@ -38,7 +38,7 @@ func (f *ForkedPrometheusApi) forkRouteGetAlertStatuses(ctx *models.ReqContext, } } -func (f *ForkedPrometheusApi) forkRouteGetRuleStatuses(ctx *models.ReqContext, dsUID string) response.Response { +func (f *PrometheusApiHandler) handleRouteGetRuleStatuses(ctx *models.ReqContext, dsUID string) response.Response { t, err := backendTypeByUID(ctx, f.DatasourceCache) if err != nil { return ErrResp(400, err, "") @@ -52,10 +52,10 @@ func (f *ForkedPrometheusApi) forkRouteGetRuleStatuses(ctx *models.ReqContext, d } } -func (f *ForkedPrometheusApi) forkRouteGetGrafanaAlertStatuses(ctx *models.ReqContext) response.Response { +func (f *PrometheusApiHandler) handleRouteGetGrafanaAlertStatuses(ctx *models.ReqContext) response.Response { return f.GrafanaSvc.RouteGetAlertStatuses(ctx) } -func (f *ForkedPrometheusApi) forkRouteGetGrafanaRuleStatuses(ctx *models.ReqContext) response.Response { +func (f *PrometheusApiHandler) handleRouteGetGrafanaRuleStatuses(ctx *models.ReqContext) response.Response { return f.GrafanaSvc.RouteGetRuleStatuses(ctx) } diff --git a/pkg/services/ngalert/api/fork_ruler.go b/pkg/services/ngalert/api/forking_ruler.go similarity index 64% rename from pkg/services/ngalert/api/fork_ruler.go rename to pkg/services/ngalert/api/forking_ruler.go index 739370d3a04..44b06c85a4f 100644 --- a/pkg/services/ngalert/api/fork_ruler.go +++ b/pkg/services/ngalert/api/forking_ruler.go @@ -9,23 +9,22 @@ import ( apimodels "github.com/grafana/grafana/pkg/services/ngalert/api/tooling/definitions" ) -// ForkedRulerApi will validate and proxy requests to the correct backend type depending on the datasource. -type ForkedRulerApi struct { +// RulerApiHandler will validate and proxy requests to the correct backend type depending on the datasource. +type RulerApiHandler struct { LotexRuler *LotexRuler GrafanaRuler *RulerSrv DatasourceCache datasources.CacheService } -// NewForkedRuler implements a set of routes that proxy to various Cortex Ruler-compatible backends. -func NewForkedRuler(datasourceCache datasources.CacheService, lotex *LotexRuler, grafana *RulerSrv) *ForkedRulerApi { - return &ForkedRulerApi{ +func NewForkingRuler(datasourceCache datasources.CacheService, lotex *LotexRuler, grafana *RulerSrv) *RulerApiHandler { + return &RulerApiHandler{ LotexRuler: lotex, GrafanaRuler: grafana, DatasourceCache: datasourceCache, } } -func (f *ForkedRulerApi) forkRouteDeleteNamespaceRulesConfig(ctx *models.ReqContext, dsUID, namespace string) response.Response { +func (f *RulerApiHandler) handleRouteDeleteNamespaceRulesConfig(ctx *models.ReqContext, dsUID, namespace string) response.Response { t, err := backendTypeByUID(ctx, f.DatasourceCache) if err != nil { return ErrResp(400, err, "") @@ -38,7 +37,7 @@ func (f *ForkedRulerApi) forkRouteDeleteNamespaceRulesConfig(ctx *models.ReqCont } } -func (f *ForkedRulerApi) forkRouteDeleteRuleGroupConfig(ctx *models.ReqContext, dsUID, namespace, group string) response.Response { +func (f *RulerApiHandler) handleRouteDeleteRuleGroupConfig(ctx *models.ReqContext, dsUID, namespace, group string) response.Response { t, err := backendTypeByUID(ctx, f.DatasourceCache) if err != nil { return ErrResp(400, err, "") @@ -51,7 +50,7 @@ func (f *ForkedRulerApi) forkRouteDeleteRuleGroupConfig(ctx *models.ReqContext, } } -func (f *ForkedRulerApi) forkRouteGetNamespaceRulesConfig(ctx *models.ReqContext, dsUID, namespace string) response.Response { +func (f *RulerApiHandler) handleRouteGetNamespaceRulesConfig(ctx *models.ReqContext, dsUID, namespace string) response.Response { t, err := backendTypeByUID(ctx, f.DatasourceCache) if err != nil { return ErrResp(400, err, "") @@ -64,7 +63,7 @@ func (f *ForkedRulerApi) forkRouteGetNamespaceRulesConfig(ctx *models.ReqContext } } -func (f *ForkedRulerApi) forkRouteGetRulegGroupConfig(ctx *models.ReqContext, dsUID, namespace, group string) response.Response { +func (f *RulerApiHandler) handleRouteGetRulegGroupConfig(ctx *models.ReqContext, dsUID, namespace, group string) response.Response { t, err := backendTypeByUID(ctx, f.DatasourceCache) if err != nil { return ErrResp(400, err, "") @@ -77,7 +76,7 @@ func (f *ForkedRulerApi) forkRouteGetRulegGroupConfig(ctx *models.ReqContext, ds } } -func (f *ForkedRulerApi) forkRouteGetRulesConfig(ctx *models.ReqContext, dsUID string) response.Response { +func (f *RulerApiHandler) handleRouteGetRulesConfig(ctx *models.ReqContext, dsUID string) response.Response { t, err := backendTypeByUID(ctx, f.DatasourceCache) if err != nil { return ErrResp(400, err, "") @@ -90,7 +89,7 @@ func (f *ForkedRulerApi) forkRouteGetRulesConfig(ctx *models.ReqContext, dsUID s } } -func (f *ForkedRulerApi) forkRoutePostNameRulesConfig(ctx *models.ReqContext, conf apimodels.PostableRuleGroupConfig, dsUID, namespace string) response.Response { +func (f *RulerApiHandler) handleRoutePostNameRulesConfig(ctx *models.ReqContext, conf apimodels.PostableRuleGroupConfig, dsUID, namespace string) response.Response { backendType, err := backendTypeByUID(ctx, f.DatasourceCache) if err != nil { return ErrResp(400, err, "") @@ -109,27 +108,27 @@ func (f *ForkedRulerApi) forkRoutePostNameRulesConfig(ctx *models.ReqContext, co } } -func (f *ForkedRulerApi) forkRouteDeleteNamespaceGrafanaRulesConfig(ctx *models.ReqContext, namespace string) response.Response { +func (f *RulerApiHandler) handleRouteDeleteNamespaceGrafanaRulesConfig(ctx *models.ReqContext, namespace string) response.Response { return f.GrafanaRuler.RouteDeleteAlertRules(ctx, namespace, "") } -func (f *ForkedRulerApi) forkRouteDeleteGrafanaRuleGroupConfig(ctx *models.ReqContext, namespace, groupName string) response.Response { +func (f *RulerApiHandler) handleRouteDeleteGrafanaRuleGroupConfig(ctx *models.ReqContext, namespace, groupName string) response.Response { return f.GrafanaRuler.RouteDeleteAlertRules(ctx, namespace, groupName) } -func (f *ForkedRulerApi) forkRouteGetNamespaceGrafanaRulesConfig(ctx *models.ReqContext, namespace string) response.Response { +func (f *RulerApiHandler) handleRouteGetNamespaceGrafanaRulesConfig(ctx *models.ReqContext, namespace string) response.Response { return f.GrafanaRuler.RouteGetNamespaceRulesConfig(ctx, namespace) } -func (f *ForkedRulerApi) forkRouteGetGrafanaRuleGroupConfig(ctx *models.ReqContext, namespace, group string) response.Response { +func (f *RulerApiHandler) handleRouteGetGrafanaRuleGroupConfig(ctx *models.ReqContext, namespace, group string) response.Response { return f.GrafanaRuler.RouteGetRulesGroupConfig(ctx, namespace, group) } -func (f *ForkedRulerApi) forkRouteGetGrafanaRulesConfig(ctx *models.ReqContext) response.Response { +func (f *RulerApiHandler) handleRouteGetGrafanaRulesConfig(ctx *models.ReqContext) response.Response { return f.GrafanaRuler.RouteGetRulesConfig(ctx) } -func (f *ForkedRulerApi) forkRoutePostNameGrafanaRulesConfig(ctx *models.ReqContext, conf apimodels.PostableRuleGroupConfig, namespace string) response.Response { +func (f *RulerApiHandler) handleRoutePostNameGrafanaRulesConfig(ctx *models.ReqContext, conf apimodels.PostableRuleGroupConfig, namespace string) response.Response { payloadType := conf.Type() if payloadType != apimodels.GrafanaBackend { return ErrResp(400, fmt.Errorf("unexpected backend type (%v) vs payload type (%v)", apimodels.GrafanaBackend, payloadType), "") diff --git a/pkg/services/ngalert/api/generated_base_api_alertmanager.go b/pkg/services/ngalert/api/generated_base_api_alertmanager.go index c983f3f85fd..21f1c3cdc64 100644 --- a/pkg/services/ngalert/api/generated_base_api_alertmanager.go +++ b/pkg/services/ngalert/api/generated_base_api_alertmanager.go @@ -18,7 +18,7 @@ import ( "github.com/grafana/grafana/pkg/web" ) -type AlertmanagerApiForkingService interface { +type AlertmanagerApi interface { RouteCreateGrafanaSilence(*models.ReqContext) response.Response RouteCreateSilence(*models.ReqContext) response.Response RouteDeleteAlertingConfig(*models.ReqContext) response.Response @@ -45,128 +45,150 @@ type AlertmanagerApiForkingService interface { RoutePostTestReceivers(*models.ReqContext) response.Response } -func (f *ForkedAlertmanagerApi) RouteCreateGrafanaSilence(ctx *models.ReqContext) response.Response { +func (f *AlertmanagerApiHandler) RouteCreateGrafanaSilence(ctx *models.ReqContext) response.Response { + // Parse Request Body conf := apimodels.PostableSilence{} if err := web.Bind(ctx.Req, &conf); err != nil { return response.Error(http.StatusBadRequest, "bad request data", err) } - return f.forkRouteCreateGrafanaSilence(ctx, conf) + return f.handleRouteCreateGrafanaSilence(ctx, conf) } -func (f *ForkedAlertmanagerApi) RouteCreateSilence(ctx *models.ReqContext) response.Response { +func (f *AlertmanagerApiHandler) RouteCreateSilence(ctx *models.ReqContext) response.Response { + // Parse Path Parameters datasourceUIDParam := web.Params(ctx.Req)[":DatasourceUID"] + // Parse Request Body conf := apimodels.PostableSilence{} if err := web.Bind(ctx.Req, &conf); err != nil { return response.Error(http.StatusBadRequest, "bad request data", err) } - return f.forkRouteCreateSilence(ctx, conf, datasourceUIDParam) + return f.handleRouteCreateSilence(ctx, conf, datasourceUIDParam) } -func (f *ForkedAlertmanagerApi) RouteDeleteAlertingConfig(ctx *models.ReqContext) response.Response { +func (f *AlertmanagerApiHandler) RouteDeleteAlertingConfig(ctx *models.ReqContext) response.Response { + // Parse Path Parameters datasourceUIDParam := web.Params(ctx.Req)[":DatasourceUID"] - return f.forkRouteDeleteAlertingConfig(ctx, datasourceUIDParam) + return f.handleRouteDeleteAlertingConfig(ctx, datasourceUIDParam) } -func (f *ForkedAlertmanagerApi) RouteDeleteGrafanaAlertingConfig(ctx *models.ReqContext) response.Response { - return f.forkRouteDeleteGrafanaAlertingConfig(ctx) +func (f *AlertmanagerApiHandler) RouteDeleteGrafanaAlertingConfig(ctx *models.ReqContext) response.Response { + return f.handleRouteDeleteGrafanaAlertingConfig(ctx) } -func (f *ForkedAlertmanagerApi) RouteDeleteGrafanaSilence(ctx *models.ReqContext) response.Response { +func (f *AlertmanagerApiHandler) RouteDeleteGrafanaSilence(ctx *models.ReqContext) response.Response { + // Parse Path Parameters silenceIdParam := web.Params(ctx.Req)[":SilenceId"] - return f.forkRouteDeleteGrafanaSilence(ctx, silenceIdParam) + return f.handleRouteDeleteGrafanaSilence(ctx, silenceIdParam) } -func (f *ForkedAlertmanagerApi) RouteDeleteSilence(ctx *models.ReqContext) response.Response { +func (f *AlertmanagerApiHandler) RouteDeleteSilence(ctx *models.ReqContext) response.Response { + // Parse Path Parameters silenceIdParam := web.Params(ctx.Req)[":SilenceId"] datasourceUIDParam := web.Params(ctx.Req)[":DatasourceUID"] - return f.forkRouteDeleteSilence(ctx, silenceIdParam, datasourceUIDParam) + return f.handleRouteDeleteSilence(ctx, silenceIdParam, datasourceUIDParam) } -func (f *ForkedAlertmanagerApi) RouteGetAMAlertGroups(ctx *models.ReqContext) response.Response { +func (f *AlertmanagerApiHandler) RouteGetAMAlertGroups(ctx *models.ReqContext) response.Response { + // Parse Path Parameters datasourceUIDParam := web.Params(ctx.Req)[":DatasourceUID"] - return f.forkRouteGetAMAlertGroups(ctx, datasourceUIDParam) + return f.handleRouteGetAMAlertGroups(ctx, datasourceUIDParam) } -func (f *ForkedAlertmanagerApi) RouteGetAMAlerts(ctx *models.ReqContext) response.Response { +func (f *AlertmanagerApiHandler) RouteGetAMAlerts(ctx *models.ReqContext) response.Response { + // Parse Path Parameters datasourceUIDParam := web.Params(ctx.Req)[":DatasourceUID"] - return f.forkRouteGetAMAlerts(ctx, datasourceUIDParam) + return f.handleRouteGetAMAlerts(ctx, datasourceUIDParam) } -func (f *ForkedAlertmanagerApi) RouteGetAMStatus(ctx *models.ReqContext) response.Response { +func (f *AlertmanagerApiHandler) RouteGetAMStatus(ctx *models.ReqContext) response.Response { + // Parse Path Parameters datasourceUIDParam := web.Params(ctx.Req)[":DatasourceUID"] - return f.forkRouteGetAMStatus(ctx, datasourceUIDParam) + return f.handleRouteGetAMStatus(ctx, datasourceUIDParam) } -func (f *ForkedAlertmanagerApi) RouteGetAlertingConfig(ctx *models.ReqContext) response.Response { +func (f *AlertmanagerApiHandler) RouteGetAlertingConfig(ctx *models.ReqContext) response.Response { + // Parse Path Parameters datasourceUIDParam := web.Params(ctx.Req)[":DatasourceUID"] - return f.forkRouteGetAlertingConfig(ctx, datasourceUIDParam) + return f.handleRouteGetAlertingConfig(ctx, datasourceUIDParam) } -func (f *ForkedAlertmanagerApi) RouteGetGrafanaAMAlertGroups(ctx *models.ReqContext) response.Response { - return f.forkRouteGetGrafanaAMAlertGroups(ctx) +func (f *AlertmanagerApiHandler) RouteGetGrafanaAMAlertGroups(ctx *models.ReqContext) response.Response { + return f.handleRouteGetGrafanaAMAlertGroups(ctx) } -func (f *ForkedAlertmanagerApi) RouteGetGrafanaAMAlerts(ctx *models.ReqContext) response.Response { - return f.forkRouteGetGrafanaAMAlerts(ctx) +func (f *AlertmanagerApiHandler) RouteGetGrafanaAMAlerts(ctx *models.ReqContext) response.Response { + return f.handleRouteGetGrafanaAMAlerts(ctx) } -func (f *ForkedAlertmanagerApi) RouteGetGrafanaAMStatus(ctx *models.ReqContext) response.Response { - return f.forkRouteGetGrafanaAMStatus(ctx) +func (f *AlertmanagerApiHandler) RouteGetGrafanaAMStatus(ctx *models.ReqContext) response.Response { + return f.handleRouteGetGrafanaAMStatus(ctx) } -func (f *ForkedAlertmanagerApi) RouteGetGrafanaAlertingConfig(ctx *models.ReqContext) response.Response { - return f.forkRouteGetGrafanaAlertingConfig(ctx) +func (f *AlertmanagerApiHandler) RouteGetGrafanaAlertingConfig(ctx *models.ReqContext) response.Response { + return f.handleRouteGetGrafanaAlertingConfig(ctx) } -func (f *ForkedAlertmanagerApi) RouteGetGrafanaSilence(ctx *models.ReqContext) response.Response { +func (f *AlertmanagerApiHandler) RouteGetGrafanaSilence(ctx *models.ReqContext) response.Response { + // Parse Path Parameters silenceIdParam := web.Params(ctx.Req)[":SilenceId"] - return f.forkRouteGetGrafanaSilence(ctx, silenceIdParam) + return f.handleRouteGetGrafanaSilence(ctx, silenceIdParam) } -func (f *ForkedAlertmanagerApi) RouteGetGrafanaSilences(ctx *models.ReqContext) response.Response { - return f.forkRouteGetGrafanaSilences(ctx) +func (f *AlertmanagerApiHandler) RouteGetGrafanaSilences(ctx *models.ReqContext) response.Response { + return f.handleRouteGetGrafanaSilences(ctx) } -func (f *ForkedAlertmanagerApi) RouteGetSilence(ctx *models.ReqContext) response.Response { +func (f *AlertmanagerApiHandler) RouteGetSilence(ctx *models.ReqContext) response.Response { + // Parse Path Parameters silenceIdParam := web.Params(ctx.Req)[":SilenceId"] datasourceUIDParam := web.Params(ctx.Req)[":DatasourceUID"] - return f.forkRouteGetSilence(ctx, silenceIdParam, datasourceUIDParam) + return f.handleRouteGetSilence(ctx, silenceIdParam, datasourceUIDParam) } -func (f *ForkedAlertmanagerApi) RouteGetSilences(ctx *models.ReqContext) response.Response { +func (f *AlertmanagerApiHandler) RouteGetSilences(ctx *models.ReqContext) response.Response { + // Parse Path Parameters datasourceUIDParam := web.Params(ctx.Req)[":DatasourceUID"] - return f.forkRouteGetSilences(ctx, datasourceUIDParam) + return f.handleRouteGetSilences(ctx, datasourceUIDParam) } -func (f *ForkedAlertmanagerApi) RoutePostAMAlerts(ctx *models.ReqContext) response.Response { +func (f *AlertmanagerApiHandler) RoutePostAMAlerts(ctx *models.ReqContext) response.Response { + // Parse Path Parameters datasourceUIDParam := web.Params(ctx.Req)[":DatasourceUID"] + // Parse Request Body conf := apimodels.PostableAlerts{} if err := web.Bind(ctx.Req, &conf); err != nil { return response.Error(http.StatusBadRequest, "bad request data", err) } - return f.forkRoutePostAMAlerts(ctx, conf, datasourceUIDParam) + return f.handleRoutePostAMAlerts(ctx, conf, datasourceUIDParam) } -func (f *ForkedAlertmanagerApi) RoutePostAlertingConfig(ctx *models.ReqContext) response.Response { +func (f *AlertmanagerApiHandler) RoutePostAlertingConfig(ctx *models.ReqContext) response.Response { + // Parse Path Parameters datasourceUIDParam := web.Params(ctx.Req)[":DatasourceUID"] + // Parse Request Body conf := apimodels.PostableUserConfig{} if err := web.Bind(ctx.Req, &conf); err != nil { return response.Error(http.StatusBadRequest, "bad request data", err) } - return f.forkRoutePostAlertingConfig(ctx, conf, datasourceUIDParam) + return f.handleRoutePostAlertingConfig(ctx, conf, datasourceUIDParam) } -func (f *ForkedAlertmanagerApi) RoutePostGrafanaAMAlerts(ctx *models.ReqContext) response.Response { +func (f *AlertmanagerApiHandler) RoutePostGrafanaAMAlerts(ctx *models.ReqContext) response.Response { + // Parse Request Body conf := apimodels.PostableAlerts{} if err := web.Bind(ctx.Req, &conf); err != nil { return response.Error(http.StatusBadRequest, "bad request data", err) } - return f.forkRoutePostGrafanaAMAlerts(ctx, conf) + return f.handleRoutePostGrafanaAMAlerts(ctx, conf) } -func (f *ForkedAlertmanagerApi) RoutePostGrafanaAlertingConfig(ctx *models.ReqContext) response.Response { +func (f *AlertmanagerApiHandler) RoutePostGrafanaAlertingConfig(ctx *models.ReqContext) response.Response { + // Parse Request Body conf := apimodels.PostableUserConfig{} if err := web.Bind(ctx.Req, &conf); err != nil { return response.Error(http.StatusBadRequest, "bad request data", err) } - return f.forkRoutePostGrafanaAlertingConfig(ctx, conf) + return f.handleRoutePostGrafanaAlertingConfig(ctx, conf) } -func (f *ForkedAlertmanagerApi) RoutePostTestGrafanaReceivers(ctx *models.ReqContext) response.Response { +func (f *AlertmanagerApiHandler) RoutePostTestGrafanaReceivers(ctx *models.ReqContext) response.Response { + // Parse Request Body conf := apimodels.TestReceiversConfigBodyParams{} if err := web.Bind(ctx.Req, &conf); err != nil { return response.Error(http.StatusBadRequest, "bad request data", err) } - return f.forkRoutePostTestGrafanaReceivers(ctx, conf) + return f.handleRoutePostTestGrafanaReceivers(ctx, conf) } -func (f *ForkedAlertmanagerApi) RoutePostTestReceivers(ctx *models.ReqContext) response.Response { +func (f *AlertmanagerApiHandler) RoutePostTestReceivers(ctx *models.ReqContext) response.Response { + // Parse Path Parameters datasourceUIDParam := web.Params(ctx.Req)[":DatasourceUID"] + // Parse Request Body conf := apimodels.TestReceiversConfigBodyParams{} if err := web.Bind(ctx.Req, &conf); err != nil { return response.Error(http.StatusBadRequest, "bad request data", err) } - return f.forkRoutePostTestReceivers(ctx, conf, datasourceUIDParam) + return f.handleRoutePostTestReceivers(ctx, conf, datasourceUIDParam) } -func (api *API) RegisterAlertmanagerApiEndpoints(srv AlertmanagerApiForkingService, m *metrics.API) { +func (api *API) RegisterAlertmanagerApiEndpoints(srv AlertmanagerApi, m *metrics.API) { api.RouteRegister.Group("", func(group routing.RouteRegister) { group.Post( toMacaronPath("/api/alertmanager/grafana/api/v2/silences"), diff --git a/pkg/services/ngalert/api/generated_base_api_configuration.go b/pkg/services/ngalert/api/generated_base_api_configuration.go index 084c27dec82..d4b1788082e 100644 --- a/pkg/services/ngalert/api/generated_base_api_configuration.go +++ b/pkg/services/ngalert/api/generated_base_api_configuration.go @@ -18,31 +18,32 @@ import ( "github.com/grafana/grafana/pkg/web" ) -type ConfigurationApiForkingService interface { +type ConfigurationApi interface { RouteDeleteNGalertConfig(*models.ReqContext) response.Response RouteGetAlertmanagers(*models.ReqContext) response.Response RouteGetNGalertConfig(*models.ReqContext) response.Response RoutePostNGalertConfig(*models.ReqContext) response.Response } -func (f *ForkedConfigurationApi) RouteDeleteNGalertConfig(ctx *models.ReqContext) response.Response { - return f.forkRouteDeleteNGalertConfig(ctx) +func (f *ConfigurationApiHandler) RouteDeleteNGalertConfig(ctx *models.ReqContext) response.Response { + return f.handleRouteDeleteNGalertConfig(ctx) } -func (f *ForkedConfigurationApi) RouteGetAlertmanagers(ctx *models.ReqContext) response.Response { - return f.forkRouteGetAlertmanagers(ctx) +func (f *ConfigurationApiHandler) RouteGetAlertmanagers(ctx *models.ReqContext) response.Response { + return f.handleRouteGetAlertmanagers(ctx) } -func (f *ForkedConfigurationApi) RouteGetNGalertConfig(ctx *models.ReqContext) response.Response { - return f.forkRouteGetNGalertConfig(ctx) +func (f *ConfigurationApiHandler) RouteGetNGalertConfig(ctx *models.ReqContext) response.Response { + return f.handleRouteGetNGalertConfig(ctx) } -func (f *ForkedConfigurationApi) RoutePostNGalertConfig(ctx *models.ReqContext) response.Response { +func (f *ConfigurationApiHandler) RoutePostNGalertConfig(ctx *models.ReqContext) response.Response { + // Parse Request Body conf := apimodels.PostableNGalertConfig{} if err := web.Bind(ctx.Req, &conf); err != nil { return response.Error(http.StatusBadRequest, "bad request data", err) } - return f.forkRoutePostNGalertConfig(ctx, conf) + return f.handleRoutePostNGalertConfig(ctx, conf) } -func (api *API) RegisterConfigurationApiEndpoints(srv ConfigurationApiForkingService, m *metrics.API) { +func (api *API) RegisterConfigurationApiEndpoints(srv ConfigurationApi, m *metrics.API) { api.RouteRegister.Group("", func(group routing.RouteRegister) { group.Delete( toMacaronPath("/api/v1/ngalert/admin_config"), diff --git a/pkg/services/ngalert/api/generated_base_api_prometheus.go b/pkg/services/ngalert/api/generated_base_api_prometheus.go index f9018b7b6ff..05297027bc3 100644 --- a/pkg/services/ngalert/api/generated_base_api_prometheus.go +++ b/pkg/services/ngalert/api/generated_base_api_prometheus.go @@ -17,29 +17,31 @@ import ( "github.com/grafana/grafana/pkg/web" ) -type PrometheusApiForkingService interface { +type PrometheusApi interface { RouteGetAlertStatuses(*models.ReqContext) response.Response RouteGetGrafanaAlertStatuses(*models.ReqContext) response.Response RouteGetGrafanaRuleStatuses(*models.ReqContext) response.Response RouteGetRuleStatuses(*models.ReqContext) response.Response } -func (f *ForkedPrometheusApi) RouteGetAlertStatuses(ctx *models.ReqContext) response.Response { +func (f *PrometheusApiHandler) RouteGetAlertStatuses(ctx *models.ReqContext) response.Response { + // Parse Path Parameters datasourceUIDParam := web.Params(ctx.Req)[":DatasourceUID"] - return f.forkRouteGetAlertStatuses(ctx, datasourceUIDParam) + return f.handleRouteGetAlertStatuses(ctx, datasourceUIDParam) } -func (f *ForkedPrometheusApi) RouteGetGrafanaAlertStatuses(ctx *models.ReqContext) response.Response { - return f.forkRouteGetGrafanaAlertStatuses(ctx) +func (f *PrometheusApiHandler) RouteGetGrafanaAlertStatuses(ctx *models.ReqContext) response.Response { + return f.handleRouteGetGrafanaAlertStatuses(ctx) } -func (f *ForkedPrometheusApi) RouteGetGrafanaRuleStatuses(ctx *models.ReqContext) response.Response { - return f.forkRouteGetGrafanaRuleStatuses(ctx) +func (f *PrometheusApiHandler) RouteGetGrafanaRuleStatuses(ctx *models.ReqContext) response.Response { + return f.handleRouteGetGrafanaRuleStatuses(ctx) } -func (f *ForkedPrometheusApi) RouteGetRuleStatuses(ctx *models.ReqContext) response.Response { +func (f *PrometheusApiHandler) RouteGetRuleStatuses(ctx *models.ReqContext) response.Response { + // Parse Path Parameters datasourceUIDParam := web.Params(ctx.Req)[":DatasourceUID"] - return f.forkRouteGetRuleStatuses(ctx, datasourceUIDParam) + return f.handleRouteGetRuleStatuses(ctx, datasourceUIDParam) } -func (api *API) RegisterPrometheusApiEndpoints(srv PrometheusApiForkingService, m *metrics.API) { +func (api *API) RegisterPrometheusApiEndpoints(srv PrometheusApi, m *metrics.API) { api.RouteRegister.Group("", func(group routing.RouteRegister) { group.Get( toMacaronPath("/api/prometheus/{DatasourceUID}/api/v1/alerts"), diff --git a/pkg/services/ngalert/api/generated_base_api_provisioning.go b/pkg/services/ngalert/api/generated_base_api_provisioning.go index e3efd6a1713..484524672b1 100644 --- a/pkg/services/ngalert/api/generated_base_api_provisioning.go +++ b/pkg/services/ngalert/api/generated_base_api_provisioning.go @@ -18,7 +18,7 @@ import ( "github.com/grafana/grafana/pkg/web" ) -type ProvisioningApiForkingService interface { +type ProvisioningApi interface { RouteDeleteAlertRule(*models.ReqContext) response.Response RouteDeleteContactpoints(*models.ReqContext) response.Response RouteDeleteMuteTiming(*models.ReqContext) response.Response @@ -43,125 +43,147 @@ type ProvisioningApiForkingService interface { RouteResetPolicyTree(*models.ReqContext) response.Response } -func (f *ForkedProvisioningApi) RouteDeleteAlertRule(ctx *models.ReqContext) response.Response { +func (f *ProvisioningApiHandler) RouteDeleteAlertRule(ctx *models.ReqContext) response.Response { + // Parse Path Parameters uIDParam := web.Params(ctx.Req)[":UID"] - return f.forkRouteDeleteAlertRule(ctx, uIDParam) + return f.handleRouteDeleteAlertRule(ctx, uIDParam) } -func (f *ForkedProvisioningApi) RouteDeleteContactpoints(ctx *models.ReqContext) response.Response { +func (f *ProvisioningApiHandler) RouteDeleteContactpoints(ctx *models.ReqContext) response.Response { + // Parse Path Parameters uIDParam := web.Params(ctx.Req)[":UID"] - return f.forkRouteDeleteContactpoints(ctx, uIDParam) + return f.handleRouteDeleteContactpoints(ctx, uIDParam) } -func (f *ForkedProvisioningApi) RouteDeleteMuteTiming(ctx *models.ReqContext) response.Response { +func (f *ProvisioningApiHandler) RouteDeleteMuteTiming(ctx *models.ReqContext) response.Response { + // Parse Path Parameters nameParam := web.Params(ctx.Req)[":name"] - return f.forkRouteDeleteMuteTiming(ctx, nameParam) + return f.handleRouteDeleteMuteTiming(ctx, nameParam) } -func (f *ForkedProvisioningApi) RouteDeleteTemplate(ctx *models.ReqContext) response.Response { +func (f *ProvisioningApiHandler) RouteDeleteTemplate(ctx *models.ReqContext) response.Response { + // Parse Path Parameters nameParam := web.Params(ctx.Req)[":name"] - return f.forkRouteDeleteTemplate(ctx, nameParam) + return f.handleRouteDeleteTemplate(ctx, nameParam) } -func (f *ForkedProvisioningApi) RouteGetAlertRule(ctx *models.ReqContext) response.Response { +func (f *ProvisioningApiHandler) RouteGetAlertRule(ctx *models.ReqContext) response.Response { + // Parse Path Parameters uIDParam := web.Params(ctx.Req)[":UID"] - return f.forkRouteGetAlertRule(ctx, uIDParam) + return f.handleRouteGetAlertRule(ctx, uIDParam) } -func (f *ForkedProvisioningApi) RouteGetAlertRuleGroup(ctx *models.ReqContext) response.Response { +func (f *ProvisioningApiHandler) RouteGetAlertRuleGroup(ctx *models.ReqContext) response.Response { + // Parse Path Parameters folderUIDParam := web.Params(ctx.Req)[":FolderUID"] groupParam := web.Params(ctx.Req)[":Group"] - return f.forkRouteGetAlertRuleGroup(ctx, folderUIDParam, groupParam) + return f.handleRouteGetAlertRuleGroup(ctx, folderUIDParam, groupParam) } -func (f *ForkedProvisioningApi) RouteGetContactpoints(ctx *models.ReqContext) response.Response { - return f.forkRouteGetContactpoints(ctx) +func (f *ProvisioningApiHandler) RouteGetContactpoints(ctx *models.ReqContext) response.Response { + return f.handleRouteGetContactpoints(ctx) } -func (f *ForkedProvisioningApi) RouteGetMuteTiming(ctx *models.ReqContext) response.Response { +func (f *ProvisioningApiHandler) RouteGetMuteTiming(ctx *models.ReqContext) response.Response { + // Parse Path Parameters nameParam := web.Params(ctx.Req)[":name"] - return f.forkRouteGetMuteTiming(ctx, nameParam) + return f.handleRouteGetMuteTiming(ctx, nameParam) } -func (f *ForkedProvisioningApi) RouteGetMuteTimings(ctx *models.ReqContext) response.Response { - return f.forkRouteGetMuteTimings(ctx) +func (f *ProvisioningApiHandler) RouteGetMuteTimings(ctx *models.ReqContext) response.Response { + return f.handleRouteGetMuteTimings(ctx) } -func (f *ForkedProvisioningApi) RouteGetPolicyTree(ctx *models.ReqContext) response.Response { - return f.forkRouteGetPolicyTree(ctx) +func (f *ProvisioningApiHandler) RouteGetPolicyTree(ctx *models.ReqContext) response.Response { + return f.handleRouteGetPolicyTree(ctx) } -func (f *ForkedProvisioningApi) RouteGetTemplate(ctx *models.ReqContext) response.Response { +func (f *ProvisioningApiHandler) RouteGetTemplate(ctx *models.ReqContext) response.Response { + // Parse Path Parameters nameParam := web.Params(ctx.Req)[":name"] - return f.forkRouteGetTemplate(ctx, nameParam) + return f.handleRouteGetTemplate(ctx, nameParam) } -func (f *ForkedProvisioningApi) RouteGetTemplates(ctx *models.ReqContext) response.Response { - return f.forkRouteGetTemplates(ctx) +func (f *ProvisioningApiHandler) RouteGetTemplates(ctx *models.ReqContext) response.Response { + return f.handleRouteGetTemplates(ctx) } -func (f *ForkedProvisioningApi) RoutePostAlertRule(ctx *models.ReqContext) response.Response { +func (f *ProvisioningApiHandler) RoutePostAlertRule(ctx *models.ReqContext) response.Response { + // Parse Request Body conf := apimodels.ProvisionedAlertRule{} if err := web.Bind(ctx.Req, &conf); err != nil { return response.Error(http.StatusBadRequest, "bad request data", err) } - return f.forkRoutePostAlertRule(ctx, conf) + return f.handleRoutePostAlertRule(ctx, conf) } -func (f *ForkedProvisioningApi) RoutePostContactpoints(ctx *models.ReqContext) response.Response { +func (f *ProvisioningApiHandler) RoutePostContactpoints(ctx *models.ReqContext) response.Response { + // Parse Request Body conf := apimodels.EmbeddedContactPoint{} if err := web.Bind(ctx.Req, &conf); err != nil { return response.Error(http.StatusBadRequest, "bad request data", err) } - return f.forkRoutePostContactpoints(ctx, conf) + return f.handleRoutePostContactpoints(ctx, conf) } -func (f *ForkedProvisioningApi) RoutePostMuteTiming(ctx *models.ReqContext) response.Response { +func (f *ProvisioningApiHandler) RoutePostMuteTiming(ctx *models.ReqContext) response.Response { + // Parse Request Body conf := apimodels.MuteTimeInterval{} if err := web.Bind(ctx.Req, &conf); err != nil { return response.Error(http.StatusBadRequest, "bad request data", err) } - return f.forkRoutePostMuteTiming(ctx, conf) + return f.handleRoutePostMuteTiming(ctx, conf) } -func (f *ForkedProvisioningApi) RoutePutAlertRule(ctx *models.ReqContext) response.Response { +func (f *ProvisioningApiHandler) RoutePutAlertRule(ctx *models.ReqContext) response.Response { + // Parse Path Parameters uIDParam := web.Params(ctx.Req)[":UID"] + // Parse Request Body conf := apimodels.ProvisionedAlertRule{} if err := web.Bind(ctx.Req, &conf); err != nil { return response.Error(http.StatusBadRequest, "bad request data", err) } - return f.forkRoutePutAlertRule(ctx, conf, uIDParam) + return f.handleRoutePutAlertRule(ctx, conf, uIDParam) } -func (f *ForkedProvisioningApi) RoutePutAlertRuleGroup(ctx *models.ReqContext) response.Response { +func (f *ProvisioningApiHandler) RoutePutAlertRuleGroup(ctx *models.ReqContext) response.Response { + // Parse Path Parameters folderUIDParam := web.Params(ctx.Req)[":FolderUID"] groupParam := web.Params(ctx.Req)[":Group"] + // Parse Request Body conf := apimodels.AlertRuleGroupMetadata{} if err := web.Bind(ctx.Req, &conf); err != nil { return response.Error(http.StatusBadRequest, "bad request data", err) } - return f.forkRoutePutAlertRuleGroup(ctx, conf, folderUIDParam, groupParam) + return f.handleRoutePutAlertRuleGroup(ctx, conf, folderUIDParam, groupParam) } -func (f *ForkedProvisioningApi) RoutePutContactpoint(ctx *models.ReqContext) response.Response { +func (f *ProvisioningApiHandler) RoutePutContactpoint(ctx *models.ReqContext) response.Response { + // Parse Path Parameters uIDParam := web.Params(ctx.Req)[":UID"] + // Parse Request Body conf := apimodels.EmbeddedContactPoint{} if err := web.Bind(ctx.Req, &conf); err != nil { return response.Error(http.StatusBadRequest, "bad request data", err) } - return f.forkRoutePutContactpoint(ctx, conf, uIDParam) + return f.handleRoutePutContactpoint(ctx, conf, uIDParam) } -func (f *ForkedProvisioningApi) RoutePutMuteTiming(ctx *models.ReqContext) response.Response { +func (f *ProvisioningApiHandler) RoutePutMuteTiming(ctx *models.ReqContext) response.Response { + // Parse Path Parameters nameParam := web.Params(ctx.Req)[":name"] + // Parse Request Body conf := apimodels.MuteTimeInterval{} if err := web.Bind(ctx.Req, &conf); err != nil { return response.Error(http.StatusBadRequest, "bad request data", err) } - return f.forkRoutePutMuteTiming(ctx, conf, nameParam) + return f.handleRoutePutMuteTiming(ctx, conf, nameParam) } -func (f *ForkedProvisioningApi) RoutePutPolicyTree(ctx *models.ReqContext) response.Response { +func (f *ProvisioningApiHandler) RoutePutPolicyTree(ctx *models.ReqContext) response.Response { + // Parse Request Body conf := apimodels.Route{} if err := web.Bind(ctx.Req, &conf); err != nil { return response.Error(http.StatusBadRequest, "bad request data", err) } - return f.forkRoutePutPolicyTree(ctx, conf) + return f.handleRoutePutPolicyTree(ctx, conf) } -func (f *ForkedProvisioningApi) RoutePutTemplate(ctx *models.ReqContext) response.Response { +func (f *ProvisioningApiHandler) RoutePutTemplate(ctx *models.ReqContext) response.Response { + // Parse Path Parameters nameParam := web.Params(ctx.Req)[":name"] + // Parse Request Body conf := apimodels.MessageTemplateContent{} if err := web.Bind(ctx.Req, &conf); err != nil { return response.Error(http.StatusBadRequest, "bad request data", err) } - return f.forkRoutePutTemplate(ctx, conf, nameParam) + return f.handleRoutePutTemplate(ctx, conf, nameParam) } -func (f *ForkedProvisioningApi) RouteResetPolicyTree(ctx *models.ReqContext) response.Response { - return f.forkRouteResetPolicyTree(ctx) +func (f *ProvisioningApiHandler) RouteResetPolicyTree(ctx *models.ReqContext) response.Response { + return f.handleRouteResetPolicyTree(ctx) } -func (api *API) RegisterProvisioningApiEndpoints(srv ProvisioningApiForkingService, m *metrics.API) { +func (api *API) RegisterProvisioningApiEndpoints(srv ProvisioningApi, m *metrics.API) { api.RouteRegister.Group("", func(group routing.RouteRegister) { group.Delete( toMacaronPath("/api/v1/provisioning/alert-rules/{UID}"), diff --git a/pkg/services/ngalert/api/generated_base_api_ruler.go b/pkg/services/ngalert/api/generated_base_api_ruler.go index 3c2ac808b9b..ac584615874 100644 --- a/pkg/services/ngalert/api/generated_base_api_ruler.go +++ b/pkg/services/ngalert/api/generated_base_api_ruler.go @@ -18,7 +18,7 @@ import ( "github.com/grafana/grafana/pkg/web" ) -type RulerApiForkingService interface { +type RulerApi interface { RouteDeleteGrafanaRuleGroupConfig(*models.ReqContext) response.Response RouteDeleteNamespaceGrafanaRulesConfig(*models.ReqContext) response.Response RouteDeleteNamespaceRulesConfig(*models.ReqContext) response.Response @@ -33,72 +33,85 @@ type RulerApiForkingService interface { RoutePostNameRulesConfig(*models.ReqContext) response.Response } -func (f *ForkedRulerApi) RouteDeleteGrafanaRuleGroupConfig(ctx *models.ReqContext) response.Response { +func (f *RulerApiHandler) RouteDeleteGrafanaRuleGroupConfig(ctx *models.ReqContext) response.Response { + // Parse Path Parameters namespaceParam := web.Params(ctx.Req)[":Namespace"] groupnameParam := web.Params(ctx.Req)[":Groupname"] - return f.forkRouteDeleteGrafanaRuleGroupConfig(ctx, namespaceParam, groupnameParam) + return f.handleRouteDeleteGrafanaRuleGroupConfig(ctx, namespaceParam, groupnameParam) } -func (f *ForkedRulerApi) RouteDeleteNamespaceGrafanaRulesConfig(ctx *models.ReqContext) response.Response { +func (f *RulerApiHandler) RouteDeleteNamespaceGrafanaRulesConfig(ctx *models.ReqContext) response.Response { + // Parse Path Parameters namespaceParam := web.Params(ctx.Req)[":Namespace"] - return f.forkRouteDeleteNamespaceGrafanaRulesConfig(ctx, namespaceParam) + return f.handleRouteDeleteNamespaceGrafanaRulesConfig(ctx, namespaceParam) } -func (f *ForkedRulerApi) RouteDeleteNamespaceRulesConfig(ctx *models.ReqContext) response.Response { +func (f *RulerApiHandler) RouteDeleteNamespaceRulesConfig(ctx *models.ReqContext) response.Response { + // Parse Path Parameters datasourceUIDParam := web.Params(ctx.Req)[":DatasourceUID"] namespaceParam := web.Params(ctx.Req)[":Namespace"] - return f.forkRouteDeleteNamespaceRulesConfig(ctx, datasourceUIDParam, namespaceParam) + return f.handleRouteDeleteNamespaceRulesConfig(ctx, datasourceUIDParam, namespaceParam) } -func (f *ForkedRulerApi) RouteDeleteRuleGroupConfig(ctx *models.ReqContext) response.Response { +func (f *RulerApiHandler) RouteDeleteRuleGroupConfig(ctx *models.ReqContext) response.Response { + // Parse Path Parameters datasourceUIDParam := web.Params(ctx.Req)[":DatasourceUID"] namespaceParam := web.Params(ctx.Req)[":Namespace"] groupnameParam := web.Params(ctx.Req)[":Groupname"] - return f.forkRouteDeleteRuleGroupConfig(ctx, datasourceUIDParam, namespaceParam, groupnameParam) + return f.handleRouteDeleteRuleGroupConfig(ctx, datasourceUIDParam, namespaceParam, groupnameParam) } -func (f *ForkedRulerApi) RouteGetGrafanaRuleGroupConfig(ctx *models.ReqContext) response.Response { +func (f *RulerApiHandler) RouteGetGrafanaRuleGroupConfig(ctx *models.ReqContext) response.Response { + // Parse Path Parameters namespaceParam := web.Params(ctx.Req)[":Namespace"] groupnameParam := web.Params(ctx.Req)[":Groupname"] - return f.forkRouteGetGrafanaRuleGroupConfig(ctx, namespaceParam, groupnameParam) + return f.handleRouteGetGrafanaRuleGroupConfig(ctx, namespaceParam, groupnameParam) } -func (f *ForkedRulerApi) RouteGetGrafanaRulesConfig(ctx *models.ReqContext) response.Response { - return f.forkRouteGetGrafanaRulesConfig(ctx) +func (f *RulerApiHandler) RouteGetGrafanaRulesConfig(ctx *models.ReqContext) response.Response { + return f.handleRouteGetGrafanaRulesConfig(ctx) } -func (f *ForkedRulerApi) RouteGetNamespaceGrafanaRulesConfig(ctx *models.ReqContext) response.Response { +func (f *RulerApiHandler) RouteGetNamespaceGrafanaRulesConfig(ctx *models.ReqContext) response.Response { + // Parse Path Parameters namespaceParam := web.Params(ctx.Req)[":Namespace"] - return f.forkRouteGetNamespaceGrafanaRulesConfig(ctx, namespaceParam) + return f.handleRouteGetNamespaceGrafanaRulesConfig(ctx, namespaceParam) } -func (f *ForkedRulerApi) RouteGetNamespaceRulesConfig(ctx *models.ReqContext) response.Response { +func (f *RulerApiHandler) RouteGetNamespaceRulesConfig(ctx *models.ReqContext) response.Response { + // Parse Path Parameters datasourceUIDParam := web.Params(ctx.Req)[":DatasourceUID"] namespaceParam := web.Params(ctx.Req)[":Namespace"] - return f.forkRouteGetNamespaceRulesConfig(ctx, datasourceUIDParam, namespaceParam) + return f.handleRouteGetNamespaceRulesConfig(ctx, datasourceUIDParam, namespaceParam) } -func (f *ForkedRulerApi) RouteGetRulegGroupConfig(ctx *models.ReqContext) response.Response { +func (f *RulerApiHandler) RouteGetRulegGroupConfig(ctx *models.ReqContext) response.Response { + // Parse Path Parameters datasourceUIDParam := web.Params(ctx.Req)[":DatasourceUID"] namespaceParam := web.Params(ctx.Req)[":Namespace"] groupnameParam := web.Params(ctx.Req)[":Groupname"] - return f.forkRouteGetRulegGroupConfig(ctx, datasourceUIDParam, namespaceParam, groupnameParam) + return f.handleRouteGetRulegGroupConfig(ctx, datasourceUIDParam, namespaceParam, groupnameParam) } -func (f *ForkedRulerApi) RouteGetRulesConfig(ctx *models.ReqContext) response.Response { +func (f *RulerApiHandler) RouteGetRulesConfig(ctx *models.ReqContext) response.Response { + // Parse Path Parameters datasourceUIDParam := web.Params(ctx.Req)[":DatasourceUID"] - return f.forkRouteGetRulesConfig(ctx, datasourceUIDParam) + return f.handleRouteGetRulesConfig(ctx, datasourceUIDParam) } -func (f *ForkedRulerApi) RoutePostNameGrafanaRulesConfig(ctx *models.ReqContext) response.Response { +func (f *RulerApiHandler) RoutePostNameGrafanaRulesConfig(ctx *models.ReqContext) response.Response { + // Parse Path Parameters namespaceParam := web.Params(ctx.Req)[":Namespace"] + // Parse Request Body conf := apimodels.PostableRuleGroupConfig{} if err := web.Bind(ctx.Req, &conf); err != nil { return response.Error(http.StatusBadRequest, "bad request data", err) } - return f.forkRoutePostNameGrafanaRulesConfig(ctx, conf, namespaceParam) + return f.handleRoutePostNameGrafanaRulesConfig(ctx, conf, namespaceParam) } -func (f *ForkedRulerApi) RoutePostNameRulesConfig(ctx *models.ReqContext) response.Response { +func (f *RulerApiHandler) RoutePostNameRulesConfig(ctx *models.ReqContext) response.Response { + // Parse Path Parameters datasourceUIDParam := web.Params(ctx.Req)[":DatasourceUID"] namespaceParam := web.Params(ctx.Req)[":Namespace"] + // Parse Request Body conf := apimodels.PostableRuleGroupConfig{} if err := web.Bind(ctx.Req, &conf); err != nil { return response.Error(http.StatusBadRequest, "bad request data", err) } - return f.forkRoutePostNameRulesConfig(ctx, conf, datasourceUIDParam, namespaceParam) + return f.handleRoutePostNameRulesConfig(ctx, conf, datasourceUIDParam, namespaceParam) } -func (api *API) RegisterRulerApiEndpoints(srv RulerApiForkingService, m *metrics.API) { +func (api *API) RegisterRulerApiEndpoints(srv RulerApi, m *metrics.API) { api.RouteRegister.Group("", func(group routing.RouteRegister) { group.Delete( toMacaronPath("/api/ruler/grafana/api/v1/rules/{Namespace}/{Groupname}"), diff --git a/pkg/services/ngalert/api/generated_base_api_testing.go b/pkg/services/ngalert/api/generated_base_api_testing.go index b181e73ba49..c25842a1e6f 100644 --- a/pkg/services/ngalert/api/generated_base_api_testing.go +++ b/pkg/services/ngalert/api/generated_base_api_testing.go @@ -18,36 +18,40 @@ import ( "github.com/grafana/grafana/pkg/web" ) -type TestingApiForkingService interface { +type TestingApi interface { RouteEvalQueries(*models.ReqContext) response.Response RouteTestRuleConfig(*models.ReqContext) response.Response RouteTestRuleGrafanaConfig(*models.ReqContext) response.Response } -func (f *ForkedTestingApi) RouteEvalQueries(ctx *models.ReqContext) response.Response { +func (f *TestingApiHandler) RouteEvalQueries(ctx *models.ReqContext) response.Response { + // Parse Request Body conf := apimodels.EvalQueriesPayload{} if err := web.Bind(ctx.Req, &conf); err != nil { return response.Error(http.StatusBadRequest, "bad request data", err) } - return f.forkRouteEvalQueries(ctx, conf) + return f.handleRouteEvalQueries(ctx, conf) } -func (f *ForkedTestingApi) RouteTestRuleConfig(ctx *models.ReqContext) response.Response { +func (f *TestingApiHandler) RouteTestRuleConfig(ctx *models.ReqContext) response.Response { + // Parse Path Parameters datasourceUIDParam := web.Params(ctx.Req)[":DatasourceUID"] + // Parse Request Body conf := apimodels.TestRulePayload{} if err := web.Bind(ctx.Req, &conf); err != nil { return response.Error(http.StatusBadRequest, "bad request data", err) } - return f.forkRouteTestRuleConfig(ctx, conf, datasourceUIDParam) + return f.handleRouteTestRuleConfig(ctx, conf, datasourceUIDParam) } -func (f *ForkedTestingApi) RouteTestRuleGrafanaConfig(ctx *models.ReqContext) response.Response { +func (f *TestingApiHandler) RouteTestRuleGrafanaConfig(ctx *models.ReqContext) response.Response { + // Parse Request Body conf := apimodels.TestRulePayload{} if err := web.Bind(ctx.Req, &conf); err != nil { return response.Error(http.StatusBadRequest, "bad request data", err) } - return f.forkRouteTestRuleGrafanaConfig(ctx, conf) + return f.handleRouteTestRuleGrafanaConfig(ctx, conf) } -func (api *API) RegisterTestingApiEndpoints(srv TestingApiForkingService, m *metrics.API) { +func (api *API) RegisterTestingApiEndpoints(srv TestingApi, m *metrics.API) { api.RouteRegister.Group("", func(group routing.RouteRegister) { group.Post( toMacaronPath("/api/v1/eval"), diff --git a/pkg/services/ngalert/api/provisioning.go b/pkg/services/ngalert/api/provisioning.go new file mode 100644 index 00000000000..192f8c1c3f0 --- /dev/null +++ b/pkg/services/ngalert/api/provisioning.go @@ -0,0 +1,105 @@ +package api + +import ( + "github.com/grafana/grafana/pkg/api/response" + "github.com/grafana/grafana/pkg/models" + apimodels "github.com/grafana/grafana/pkg/services/ngalert/api/tooling/definitions" +) + +type ProvisioningApiHandler struct { + svc *ProvisioningSrv +} + +func NewProvisioningApi(svc *ProvisioningSrv) *ProvisioningApiHandler { + return &ProvisioningApiHandler{ + svc: svc, + } +} + +func (f *ProvisioningApiHandler) handleRouteGetPolicyTree(ctx *models.ReqContext) response.Response { + return f.svc.RouteGetPolicyTree(ctx) +} + +func (f *ProvisioningApiHandler) handleRoutePutPolicyTree(ctx *models.ReqContext, route apimodels.Route) response.Response { + return f.svc.RoutePutPolicyTree(ctx, route) +} + +func (f *ProvisioningApiHandler) handleRouteGetContactpoints(ctx *models.ReqContext) response.Response { + return f.svc.RouteGetContactPoints(ctx) +} + +func (f *ProvisioningApiHandler) handleRoutePostContactpoints(ctx *models.ReqContext, cp apimodels.EmbeddedContactPoint) response.Response { + return f.svc.RoutePostContactPoint(ctx, cp) +} + +func (f *ProvisioningApiHandler) handleRoutePutContactpoint(ctx *models.ReqContext, cp apimodels.EmbeddedContactPoint, UID string) response.Response { + return f.svc.RoutePutContactPoint(ctx, cp, UID) +} + +func (f *ProvisioningApiHandler) handleRouteDeleteContactpoints(ctx *models.ReqContext, UID string) response.Response { + return f.svc.RouteDeleteContactPoint(ctx, UID) +} + +func (f *ProvisioningApiHandler) handleRouteGetTemplates(ctx *models.ReqContext) response.Response { + return f.svc.RouteGetTemplates(ctx) +} + +func (f *ProvisioningApiHandler) handleRouteGetTemplate(ctx *models.ReqContext, name string) response.Response { + return f.svc.RouteGetTemplate(ctx, name) +} + +func (f *ProvisioningApiHandler) handleRoutePutTemplate(ctx *models.ReqContext, body apimodels.MessageTemplateContent, name string) response.Response { + return f.svc.RoutePutTemplate(ctx, body, name) +} + +func (f *ProvisioningApiHandler) handleRouteDeleteTemplate(ctx *models.ReqContext, name string) response.Response { + return f.svc.RouteDeleteTemplate(ctx, name) +} + +func (f *ProvisioningApiHandler) handleRouteGetMuteTiming(ctx *models.ReqContext, name string) response.Response { + return f.svc.RouteGetMuteTiming(ctx, name) +} + +func (f *ProvisioningApiHandler) handleRouteGetMuteTimings(ctx *models.ReqContext) response.Response { + return f.svc.RouteGetMuteTimings(ctx) +} + +func (f *ProvisioningApiHandler) handleRoutePostMuteTiming(ctx *models.ReqContext, mt apimodels.MuteTimeInterval) response.Response { + return f.svc.RoutePostMuteTiming(ctx, mt) +} + +func (f *ProvisioningApiHandler) handleRoutePutMuteTiming(ctx *models.ReqContext, mt apimodels.MuteTimeInterval, name string) response.Response { + return f.svc.RoutePutMuteTiming(ctx, mt, name) +} + +func (f *ProvisioningApiHandler) handleRouteDeleteMuteTiming(ctx *models.ReqContext, name string) response.Response { + return f.svc.RouteDeleteMuteTiming(ctx, name) +} + +func (f *ProvisioningApiHandler) handleRouteGetAlertRule(ctx *models.ReqContext, UID string) response.Response { + return f.svc.RouteRouteGetAlertRule(ctx, UID) +} + +func (f *ProvisioningApiHandler) handleRoutePostAlertRule(ctx *models.ReqContext, ar apimodels.ProvisionedAlertRule) response.Response { + return f.svc.RoutePostAlertRule(ctx, ar) +} + +func (f *ProvisioningApiHandler) handleRoutePutAlertRule(ctx *models.ReqContext, ar apimodels.ProvisionedAlertRule, UID string) response.Response { + return f.svc.RoutePutAlertRule(ctx, ar, UID) +} + +func (f *ProvisioningApiHandler) handleRouteDeleteAlertRule(ctx *models.ReqContext, UID string) response.Response { + return f.svc.RouteDeleteAlertRule(ctx, UID) +} + +func (f *ProvisioningApiHandler) handleRouteResetPolicyTree(ctx *models.ReqContext) response.Response { + return f.svc.RouteResetPolicyTree(ctx) +} + +func (f *ProvisioningApiHandler) handleRouteGetAlertRuleGroup(ctx *models.ReqContext, folder, group string) response.Response { + return f.svc.RouteGetAlertRuleGroup(ctx, folder, group) +} + +func (f *ProvisioningApiHandler) handleRoutePutAlertRuleGroup(ctx *models.ReqContext, ag apimodels.AlertRuleGroupMetadata, folder, group string) response.Response { + return f.svc.RoutePutAlertRuleGroup(ctx, ag, folder, group) +} diff --git a/pkg/services/ngalert/api/testing_api.go b/pkg/services/ngalert/api/testing_api.go new file mode 100644 index 00000000000..7a84a1e1bf0 --- /dev/null +++ b/pkg/services/ngalert/api/testing_api.go @@ -0,0 +1,30 @@ +package api + +import ( + "github.com/grafana/grafana/pkg/api/response" + "github.com/grafana/grafana/pkg/models" + apimodels "github.com/grafana/grafana/pkg/services/ngalert/api/tooling/definitions" +) + +// TestingApiHandler always forwards requests to grafana backend +type TestingApiHandler struct { + svc *TestingApiSrv +} + +func NewTestingApi(svc *TestingApiSrv) *TestingApiHandler { + return &TestingApiHandler{ + svc: svc, + } +} + +func (f *TestingApiHandler) handleRouteTestRuleConfig(c *models.ReqContext, body apimodels.TestRulePayload, dsUID string) response.Response { + return f.svc.RouteTestRuleConfig(c, body, dsUID) +} + +func (f *TestingApiHandler) handleRouteTestRuleGrafanaConfig(c *models.ReqContext, body apimodels.TestRulePayload) response.Response { + return f.svc.RouteTestGrafanaRuleConfig(c, body) +} + +func (f *TestingApiHandler) handleRouteEvalQueries(c *models.ReqContext, body apimodels.EvalQueriesPayload) response.Response { + return f.svc.RouteEvalQueries(c, body) +} diff --git a/pkg/services/ngalert/api/tooling/Makefile b/pkg/services/ngalert/api/tooling/Makefile index 279f0632116..2cf406d766e 100644 --- a/pkg/services/ngalert/api/tooling/Makefile +++ b/pkg/services/ngalert/api/tooling/Makefile @@ -65,4 +65,6 @@ serve: post.json serve-stable: api.json docker run --rm -p 80:8080 -v $$(pwd):/tmp -e SWAGGER_FILE=/tmp/$(<) swaggerapi/swagger-editor -all: post.json validate api.json validate-stable swagger-codegen-api fix copy-files clean +gen: swagger-codegen-api fix copy-files clean + +all: post.json api.json gen diff --git a/pkg/services/ngalert/api/tooling/api.json b/pkg/services/ngalert/api/tooling/api.json index 4b5b7c14432..e59a028322e 100644 --- a/pkg/services/ngalert/api/tooling/api.json +++ b/pkg/services/ngalert/api/tooling/api.json @@ -2702,6 +2702,7 @@ "type": "object" }, "URL": { + "description": "The general form represented is:\n\n[scheme:][//[userinfo@]host][/]path[?query][#fragment]\n\nURLs that do not start with a slash after the scheme are interpreted as:\n\nscheme:opaque[?query][#fragment]\n\nNote that the Path field is stored in decoded form: /%47%6f%2f becomes /Go/.\nA consequence is that it is impossible to tell which slashes in the Path were\nslashes in the raw URL and which were %2f. This distinction is rarely important,\nbut when it is, the code should use RawPath, an optional field which only gets\nset if the default encoding is different from Path.\n\nURL's String method uses the EscapedPath method to obtain the path. See the\nEscapedPath method for more details.", "properties": { "ForceQuery": { "type": "boolean" @@ -2734,7 +2735,7 @@ "$ref": "#/definitions/Userinfo" } }, - "title": "URL is a custom URL type that allows validation at configuration load time.", + "title": "A URL represents a parsed URL (technically, a URI reference).", "type": "object" }, "Userinfo": { @@ -3295,6 +3296,7 @@ "type": "object" }, "receiver": { + "description": "Receiver receiver", "properties": { "name": { "description": "name", diff --git a/pkg/services/ngalert/api/tooling/spec.json b/pkg/services/ngalert/api/tooling/spec.json index 289e18c7b3f..19ec03f7f72 100644 --- a/pkg/services/ngalert/api/tooling/spec.json +++ b/pkg/services/ngalert/api/tooling/spec.json @@ -5289,6 +5289,7 @@ } }, "alertGroup": { + "description": "AlertGroup alert group", "type": "object", "required": [ "alerts", @@ -5487,7 +5488,6 @@ "$ref": "#/definitions/gettableAlerts" }, "gettableSilence": { - "description": "GettableSilence gettable silence", "type": "object", "required": [ "comment", @@ -5537,7 +5537,6 @@ "$ref": "#/definitions/gettableSilence" }, "gettableSilences": { - "description": "GettableSilences gettable silences", "type": "array", "items": { "$ref": "#/definitions/gettableSilence" @@ -5688,6 +5687,7 @@ "$ref": "#/definitions/postableSilence" }, "receiver": { + "description": "Receiver receiver", "type": "object", "required": [ "name" diff --git a/pkg/services/ngalert/api/tooling/swagger-codegen/templates/controller-api.mustache b/pkg/services/ngalert/api/tooling/swagger-codegen/templates/controller-api.mustache index 0ab26e5596a..34dfce4f1ad 100644 --- a/pkg/services/ngalert/api/tooling/swagger-codegen/templates/controller-api.mustache +++ b/pkg/services/ngalert/api/tooling/swagger-codegen/templates/controller-api.mustache @@ -13,26 +13,25 @@ import ( "github.com/grafana/grafana/pkg/middleware" ) -type {{classname}}ForkingService interface { {{#operation}} +type {{classname}} interface { {{#operation}} {{nickname}}(*models.ReqContext) response.Response{{/operation}} } {{#operations}}{{#operation}} -func (f *Forked{{classname}}) {{nickname}}(ctx *models.ReqContext) response.Response { - {{#pathParams}} - {{paramName}}Param := web.Params(ctx.Req)[":{{baseName}}"] - {{/pathParams}} +func (f *{{classname}}Handler) {{nickname}}(ctx *models.ReqContext) response.Response { {{#hasPathParams}} + // Parse Path Parameters{{/hasPathParams}}{{#pathParams}} + {{paramName}}Param := web.Params(ctx.Req)[":{{baseName}}"]{{/pathParams}} {{#bodyParams}} + // Parse Request Body conf := apimodels.{{dataType}}{} if err := web.Bind(ctx.Req, &conf); err != nil { return response.Error(http.StatusBadRequest, "bad request data", err) } - {{/bodyParams}} - return f.fork{{nickname}}(ctx{{#bodyParams}}, conf{{/bodyParams}}{{#pathParams}}, {{paramName}}Param{{/pathParams}}) + {{/bodyParams}}return f.handle{{nickname}}(ctx{{#bodyParams}}, conf{{/bodyParams}}{{#pathParams}}, {{paramName}}Param{{/pathParams}}) } {{/operation}}{{/operations}} -func (api *API) Register{{classname}}Endpoints(srv {{classname}}ForkingService, m *metrics.API) { +func (api *API) Register{{classname}}Endpoints(srv {{classname}}, m *metrics.API) { api.RouteRegister.Group("", func(group routing.RouteRegister){ {{#operations}}{{#operation}} group.{{httpMethod}}( toMacaronPath("{{{path}}}"), @@ -46,4 +45,4 @@ func (api *API) Register{{classname}}Endpoints(srv {{classname}}ForkingService, ){{/operation}}{{/operations}} }, middleware.ReqSignedIn) }{{#operation}} -{{/operation}}{{/operations}} \ No newline at end of file +{{/operation}}{{/operations}} From 3617eac5f3d2895433c08448f536674343b55d0c Mon Sep 17 00:00:00 2001 From: Joey Tawadrous <90795735+joey-grafana@users.noreply.github.com> Date: Mon, 18 Jul 2022 08:08:35 +0100 Subject: [PATCH 026/116] Traces: Add more template variables in Tempo & Zipkin (#52306) * Add support for more vars in Tempo * Tests for Tempo vars * Tempo ds vars * Tempo ds vars test * Zipkin template var * Zipkin tests --- .../tempo/QueryEditor/NativeSearch.test.tsx | 45 ++++++++++++++++++- .../tempo/QueryEditor/NativeSearch.tsx | 12 ++++- .../datasource/tempo/datasource.test.ts | 8 +++- .../plugins/datasource/tempo/datasource.ts | 2 + .../datasource/zipkin/datasource.test.ts | 13 +++++- .../plugins/datasource/zipkin/datasource.ts | 34 ++++++++++++-- 6 files changed, 103 insertions(+), 11 deletions(-) diff --git a/public/app/plugins/datasource/tempo/QueryEditor/NativeSearch.test.tsx b/public/app/plugins/datasource/tempo/QueryEditor/NativeSearch.test.tsx index dc01b7262f4..c59b44d8a86 100644 --- a/public/app/plugins/datasource/tempo/QueryEditor/NativeSearch.test.tsx +++ b/public/app/plugins/datasource/tempo/QueryEditor/NativeSearch.test.tsx @@ -30,7 +30,17 @@ jest.mock('../language_provider', () => { }); }); -const mockQuery = { +jest.mock('@grafana/runtime', () => ({ + ...jest.requireActual('@grafana/runtime'), + getTemplateSrv: () => ({ + replace: jest.fn(), + containsTemplate: (val: string): boolean => { + return val.includes('$'); + }, + }), +})); + +let mockQuery = { refId: 'A', queryType: 'nativeSearch', key: 'Q-595a9bbc-2a25-49a7-9249-a52a0a475d83-0', @@ -117,7 +127,38 @@ describe('NativeSearch', () => { expect(option).toBeDefined(); await user.type(select, 'a'); - option = await screen.findByText('No options found'); + option = await screen.findByText('Hit enter to add'); expect(option).toBeDefined(); }); + + it('should add variable to select menu options', async () => { + mockQuery = { + ...mockQuery, + refId: '121314', + serviceName: '$service', + spanName: '$span', + }; + + render( + {}} onRunQuery={() => {}} /> + ); + + const asyncServiceSelect = screen.getByRole('combobox', { name: 'select-service-name' }); + expect(asyncServiceSelect).toBeInTheDocument(); + await user.click(asyncServiceSelect); + jest.advanceTimersByTime(3000); + + await user.type(asyncServiceSelect, '$'); + var serviceOption = await screen.findByText('$service'); + expect(serviceOption).toBeDefined(); + + const asyncSpanSelect = screen.getByRole('combobox', { name: 'select-span-name' }); + expect(asyncSpanSelect).toBeInTheDocument(); + await user.click(asyncSpanSelect); + jest.advanceTimersByTime(3000); + + await user.type(asyncSpanSelect, '$'); + var operationOption = await screen.findByText('$span'); + expect(operationOption).toBeDefined(); + }); }); diff --git a/public/app/plugins/datasource/tempo/QueryEditor/NativeSearch.tsx b/public/app/plugins/datasource/tempo/QueryEditor/NativeSearch.tsx index 0fdb8e983fa..d73eae5d4f9 100644 --- a/public/app/plugins/datasource/tempo/QueryEditor/NativeSearch.tsx +++ b/public/app/plugins/datasource/tempo/QueryEditor/NativeSearch.tsx @@ -3,7 +3,7 @@ import Prism from 'prismjs'; import React, { useCallback, useState, useEffect, useMemo } from 'react'; import { Node } from 'slate'; -import { GrafanaTheme2, isValidGoDuration, SelectableValue } from '@grafana/data'; +import { GrafanaTheme2, isValidGoDuration, SelectableValue, toOption } from '@grafana/data'; import { FetchError, getTemplateSrv, isFetchError, TemplateSrv } from '@grafana/runtime'; import { InlineFieldRow, @@ -90,7 +90,13 @@ const NativeSearch = ({ datasource, query, onChange, onBlur, onRunQuery }: Props const fetchOptions = async () => { try { const [services, spans] = await Promise.all([loadOptions('serviceName'), loadOptions('spanName')]); + if (query.serviceName && getTemplateSrv().containsTemplate(query.serviceName)) { + services.push(toOption(query.serviceName)); + } setServiceOptions(services); + if (query.spanName && getTemplateSrv().containsTemplate(query.spanName)) { + spans.push(toOption(query.spanName)); + } setSpanOptions(spans); } catch (error) { // Display message if Tempo is connected but search 404's @@ -102,7 +108,7 @@ const NativeSearch = ({ datasource, query, onChange, onBlur, onRunQuery }: Props } }; fetchOptions(); - }, [languageProvider, loadOptions]); + }, [languageProvider, loadOptions, query.serviceName, query.spanName]); useEffect(() => { const fetchTags = async () => { @@ -161,6 +167,7 @@ const NativeSearch = ({ datasource, query, onChange, onBlur, onRunQuery }: Props isClearable onKeyDown={onKeyDown} aria-label={'select-service-name'} + allowCustomValue={true} /> @@ -184,6 +191,7 @@ const NativeSearch = ({ datasource, query, onChange, onBlur, onRunQuery }: Props isClearable onKeyDown={onKeyDown} aria-label={'select-span-name'} + allowCustomValue={true} /> diff --git a/public/app/plugins/datasource/tempo/datasource.test.ts b/public/app/plugins/datasource/tempo/datasource.test.ts index ec834e35a5b..704b408bea4 100644 --- a/public/app/plugins/datasource/tempo/datasource.test.ts +++ b/public/app/plugins/datasource/tempo/datasource.test.ts @@ -77,9 +77,11 @@ describe('Tempo data source', () => { const queries = ds.interpolateVariablesInQueries([getQuery()], { interpolationVar: { text: text, value: text }, }); - expect(templateSrv.replace).toBeCalledTimes(5); + expect(templateSrv.replace).toBeCalledTimes(7); expect(queries[0].linkedQuery?.expr).toBe(text); expect(queries[0].query).toBe(text); + expect(queries[0].serviceName).toBe(text); + expect(queries[0].spanName).toBe(text); expect(queries[0].search).toBe(text); expect(queries[0].minDuration).toBe(text); expect(queries[0].maxDuration).toBe(text); @@ -94,9 +96,11 @@ describe('Tempo data source', () => { const resp = ds.applyTemplateVariables(getQuery(), { interpolationVar: { text: text, value: text }, }); - expect(templateSrv.replace).toBeCalledTimes(5); + expect(templateSrv.replace).toBeCalledTimes(7); expect(resp.linkedQuery?.expr).toBe(text); expect(resp.query).toBe(text); + expect(resp.serviceName).toBe(text); + expect(resp.spanName).toBe(text); expect(resp.search).toBe(text); expect(resp.minDuration).toBe(text); expect(resp.maxDuration).toBe(text); diff --git a/public/app/plugins/datasource/tempo/datasource.ts b/public/app/plugins/datasource/tempo/datasource.ts index 93308043580..3fa28b7cd99 100644 --- a/public/app/plugins/datasource/tempo/datasource.ts +++ b/public/app/plugins/datasource/tempo/datasource.ts @@ -296,6 +296,8 @@ export class TempoDatasource extends DataSourceWithBackend ({ describe('ZipkinDatasource', () => { describe('query', () => { + const templateSrv: TemplateSrv = { + replace: jest.fn(), + getVariables: jest.fn(), + containsTemplate: jest.fn(), + updateTimeRange: jest.fn(), + }; + it('runs query', async () => { setupBackendSrv(zipkinResponse); - const ds = new ZipkinDatasource(defaultSettings); + const ds = new ZipkinDatasource(defaultSettings, templateSrv); await expect(ds.query({ targets: [{ query: '12345' }] } as any)).toEmitValuesWith((val) => { expect(val[0].data[0].fields).toMatchObject(traceFrameFields); }); }); + it('runs query with traceId that includes special characters', async () => { setupBackendSrv(zipkinResponse); - const ds = new ZipkinDatasource(defaultSettings); + const ds = new ZipkinDatasource(defaultSettings, templateSrv); await expect(ds.query({ targets: [{ query: 'a/b' }] } as any)).toEmitValuesWith((val) => { expect(val[0].data[0].fields).toMatchObject(traceFrameFields); }); diff --git a/public/app/plugins/datasource/zipkin/datasource.ts b/public/app/plugins/datasource/zipkin/datasource.ts index 4c088c4548c..cf9ed51c03f 100644 --- a/public/app/plugins/datasource/zipkin/datasource.ts +++ b/public/app/plugins/datasource/zipkin/datasource.ts @@ -9,8 +9,9 @@ import { DataSourceJsonData, FieldType, MutableDataFrame, + ScopedVars, } from '@grafana/data'; -import { BackendSrvRequest, FetchResponse, getBackendSrv } from '@grafana/runtime'; +import { BackendSrvRequest, FetchResponse, getBackendSrv, getTemplateSrv, TemplateSrv } from '@grafana/runtime'; import { SpanBarOptions } from '@jaegertracing/jaeger-ui-components'; import { NodeGraphOptions } from 'app/core/components/NodeGraphSettings'; @@ -29,7 +30,10 @@ export class ZipkinDatasource extends DataSourceApi uploadedJson: string | ArrayBuffer | null = null; nodeGraph?: NodeGraphOptions; spanBar?: SpanBarOptions; - constructor(private instanceSettings: DataSourceInstanceSettings) { + constructor( + private instanceSettings: DataSourceInstanceSettings, + private readonly templateSrv: TemplateSrv = getTemplateSrv() + ) { super(instanceSettings); this.nodeGraph = instanceSettings.jsonData.nodeGraph; } @@ -50,7 +54,8 @@ export class ZipkinDatasource extends DataSourceApi } if (target.query) { - return this.request(`${apiPrefix}/trace/${encodeURIComponent(target.query)}`).pipe( + const query = this.applyVariables(target, options.scopedVars); + return this.request(`${apiPrefix}/trace/${encodeURIComponent(query.query)}`).pipe( map((res) => responseToDataQueryResponse(res, this.nodeGraph?.enabled)) ); } @@ -71,6 +76,29 @@ export class ZipkinDatasource extends DataSourceApi return query.query; } + interpolateVariablesInQueries(queries: ZipkinQuery[], scopedVars: ScopedVars): ZipkinQuery[] { + if (!queries || queries.length === 0) { + return []; + } + + return queries.map((query) => { + return { + ...query, + datasource: this.getRef(), + ...this.applyVariables(query, scopedVars), + }; + }); + } + + applyVariables(query: ZipkinQuery, scopedVars: ScopedVars) { + const expandedQuery = { ...query }; + + return { + ...expandedQuery, + query: this.templateSrv.replace(query.query ?? '', scopedVars), + }; + } + private request( apiUrl: string, data?: any, From 332639ce43a51e3f226eb4d924a515a624438852 Mon Sep 17 00:00:00 2001 From: eledobleefe Date: Mon, 18 Jul 2022 11:02:18 +0200 Subject: [PATCH 027/116] PanelEdit: Hide multi-/all-select datasource variables in datasource picker (#52142) --- public/app/features/plugins/datasource_srv.ts | 13 +++++++------ public/app/features/variables/guard.ts | 5 +++++ 2 files changed, 12 insertions(+), 6 deletions(-) diff --git a/public/app/features/plugins/datasource_srv.ts b/public/app/features/plugins/datasource_srv.ts index a60ad1b8f91..41a4fc7b4a2 100644 --- a/public/app/features/plugins/datasource_srv.ts +++ b/public/app/features/plugins/datasource_srv.ts @@ -24,7 +24,7 @@ import { instanceSettings as expressionInstanceSettings, } from 'app/features/expressions/ExpressionDatasource'; -import { DataSourceVariableModel } from '../variables/types'; +import { isDataSource } from '../variables/guard'; import { importDataSourcePlugin } from './plugin_loader'; @@ -245,11 +245,12 @@ export class DatasourceSrv implements DataSourceService { }); if (filters.variables) { - for (const variable of this.templateSrv.getVariables().filter((variable) => variable.type === 'datasource')) { - const dsVar = variable as DataSourceVariableModel; - const first = dsVar.current.value === 'default' ? this.defaultName : dsVar.current.value; - const dsName = first as unknown as string; - const dsSettings = this.settingsMapByName[dsName]; + for (const variable of this.templateSrv.getVariables()) { + if (!isDataSource(variable) || variable.multi || variable.includeAll) { + continue; + } + const dsName = variable.current.value === 'default' ? this.defaultName : variable.current.value; + const dsSettings = !Array.isArray(dsName) && this.settingsMapByName[dsName]; if (dsSettings) { const key = `$\{${variable.name}\}`; diff --git a/public/app/features/variables/guard.ts b/public/app/features/variables/guard.ts index 0efb21229b5..5d8eecababe 100644 --- a/public/app/features/variables/guard.ts +++ b/public/app/features/variables/guard.ts @@ -25,6 +25,7 @@ import { VariableQueryEditorProps, VariableWithMultiSupport, VariableWithOptions, + DataSourceVariableModel, } from './types'; export const isQuery = (model: VariableModel): model is QueryVariableModel => { @@ -39,6 +40,10 @@ export const isConstant = (model: VariableModel): model is ConstantVariableModel return model.type === 'constant'; }; +export const isDataSource = (model: VariableModel): model is DataSourceVariableModel => { + return model.type === 'datasource'; +}; + export const isMulti = (model: VariableModel): model is VariableWithMultiSupport => { const withMulti = model as VariableWithMultiSupport; return withMulti.hasOwnProperty('multi') && typeof withMulti.multi === 'boolean'; From fb379ae43672e4775b26f997023ec232bfb17fe0 Mon Sep 17 00:00:00 2001 From: Sofia Papagiannaki <1632407+papagian@users.noreply.github.com> Date: Mon, 18 Jul 2022 12:26:35 +0300 Subject: [PATCH 028/116] Chore: Introduce playlist service (#52252) * Store: Introduce playlist service * Integrate playlist service * Update swagger --- pkg/api/docs/definitions/playlists.go | 16 +- pkg/api/http_server.go | 5 +- pkg/api/playlist.go | 67 +++--- pkg/server/wire.go | 2 + pkg/services/playlist/model.go | 95 ++++++++ pkg/services/playlist/playlist.go | 14 ++ .../playlist/playlistimpl/playlist.go | 44 ++++ pkg/services/playlist/playlistimpl/store.go | 226 ++++++++++++++++++ .../playlist/playlistimpl/store_test.go | 82 +++++++ pkg/services/playlist/playlisttest/fake.go | 43 ++++ pkg/services/sqlstore/store.go | 6 + public/api-merged.json | 37 ++- public/api-spec.json | 8 +- 13 files changed, 587 insertions(+), 58 deletions(-) create mode 100644 pkg/services/playlist/model.go create mode 100644 pkg/services/playlist/playlist.go create mode 100644 pkg/services/playlist/playlistimpl/playlist.go create mode 100644 pkg/services/playlist/playlistimpl/store.go create mode 100644 pkg/services/playlist/playlistimpl/store_test.go create mode 100644 pkg/services/playlist/playlisttest/fake.go diff --git a/pkg/api/docs/definitions/playlists.go b/pkg/api/docs/definitions/playlists.go index 321a137601b..03989c4d153 100644 --- a/pkg/api/docs/definitions/playlists.go +++ b/pkg/api/docs/definitions/playlists.go @@ -2,7 +2,7 @@ package definitions import ( "github.com/grafana/grafana/pkg/api/dtos" - "github.com/grafana/grafana/pkg/models" + "github.com/grafana/grafana/pkg/services/playlist" ) // swagger:route GET /playlists playlists searchPlaylists @@ -121,7 +121,7 @@ type DeletePlaylistParams struct { type UpdatePlaylistParams struct { // in:body // required:true - Body models.UpdatePlaylistCommand + Body playlist.UpdatePlaylistCommand // in:path // required:true UID string `json:"uid"` @@ -131,28 +131,28 @@ type UpdatePlaylistParams struct { type CreatePlaylistParams struct { // in:body // required:true - Body models.CreatePlaylistCommand + Body playlist.CreatePlaylistCommand } // swagger:response searchPlaylistsResponse type SearchPlaylistsResponse struct { // The response message // in: body - Body models.Playlists `json:"body"` + Body playlist.Playlists `json:"body"` } // swagger:response getPlaylistResponse type GetPlaylistResponse struct { // The response message // in: body - Body *models.PlaylistDTO `json:"body"` + Body *playlist.PlaylistDTO `json:"body"` } // swagger:response getPlaylistItemsResponse type GetPlaylistItemsResponse struct { // The response message // in: body - Body []models.PlaylistItemDTO `json:"body"` + Body []playlist.PlaylistItemDTO `json:"body"` } // swagger:response getPlaylistDashboardsResponse @@ -166,12 +166,12 @@ type GetPlaylistDashboardsResponse struct { type UpdatePlaylistResponseResponse struct { // The response message // in: body - Body *models.PlaylistDTO `json:"body"` + Body *playlist.PlaylistDTO `json:"body"` } // swagger:response createPlaylistResponse type CreatePlaylistResponse struct { // The response message // in: body - Body *models.Playlist `json:"body"` + Body *playlist.Playlist `json:"body"` } diff --git a/pkg/api/http_server.go b/pkg/api/http_server.go index 8152dbf0004..532a0b201f1 100644 --- a/pkg/api/http_server.go +++ b/pkg/api/http_server.go @@ -58,6 +58,7 @@ import ( "github.com/grafana/grafana/pkg/services/login" "github.com/grafana/grafana/pkg/services/ngalert" "github.com/grafana/grafana/pkg/services/notifications" + "github.com/grafana/grafana/pkg/services/playlist" "github.com/grafana/grafana/pkg/services/plugindashboards" pluginSettings "github.com/grafana/grafana/pkg/services/pluginsettings/service" pref "github.com/grafana/grafana/pkg/services/preference" @@ -168,6 +169,7 @@ type HTTPServer struct { dashboardVersionService dashver.Service PublicDashboardsApi *publicdashboardsApi.Api starService star.Service + playlistService playlist.Service CoremodelRegistry *registry.Generic CoremodelStaticRegistry *registry.Static kvStore kvstore.KVStore @@ -206,7 +208,7 @@ func ProvideHTTPServer(opts ServerOptions, cfg *setting.Cfg, routeRegister routi avatarCacheServer *avatar.AvatarCacheServer, preferenceService pref.Service, entityEventsService store.EntityEventsService, teamsPermissionsService accesscontrol.TeamPermissionsService, folderPermissionsService accesscontrol.FolderPermissionsService, dashboardPermissionsService accesscontrol.DashboardPermissionsService, dashboardVersionService dashver.Service, - starService star.Service, csrfService csrf.Service, coremodelRegistry *registry.Generic, coremodelStaticRegistry *registry.Static, + starService star.Service, playlistService playlist.Service, csrfService csrf.Service, coremodelRegistry *registry.Generic, coremodelStaticRegistry *registry.Static, kvStore kvstore.KVStore, secretsMigrator secrets.Migrator, remoteSecretsCheck secretsKV.UseRemoteSecretsPluginCheck, publicDashboardsApi *publicdashboardsApi.Api, userService user.Service) (*HTTPServer, error) { web.Env = cfg.Env @@ -289,6 +291,7 @@ func ProvideHTTPServer(opts ServerOptions, cfg *setting.Cfg, routeRegister routi dashboardPermissionsService: dashboardPermissionsService, dashboardVersionService: dashboardVersionService, starService: starService, + playlistService: playlistService, CoremodelRegistry: coremodelRegistry, CoremodelStaticRegistry: coremodelStaticRegistry, kvStore: kvStore, diff --git a/pkg/api/playlist.go b/pkg/api/playlist.go index 47889948e13..6c85aec19d8 100644 --- a/pkg/api/playlist.go +++ b/pkg/api/playlist.go @@ -6,25 +6,26 @@ import ( "github.com/grafana/grafana/pkg/api/response" "github.com/grafana/grafana/pkg/models" + "github.com/grafana/grafana/pkg/services/playlist" "github.com/grafana/grafana/pkg/web" ) func (hs *HTTPServer) ValidateOrgPlaylist(c *models.ReqContext) { uid := web.Params(c.Req)[":uid"] - query := models.GetPlaylistByUidQuery{UID: uid, OrgId: c.OrgId} - err := hs.SQLStore.GetPlaylist(c.Req.Context(), &query) + query := playlist.GetPlaylistByUidQuery{UID: uid, OrgId: c.OrgId} + p, err := hs.playlistService.Get(c.Req.Context(), &query) if err != nil { c.JsonApiErr(404, "Playlist not found", err) return } - if query.Result.OrgId == 0 { + if p.OrgId == 0 { c.JsonApiErr(404, "Playlist not found", err) return } - if query.Result.OrgId != c.OrgId { + if p.OrgId != c.OrgId { c.JsonApiErr(403, "You are not allowed to edit/view playlist", nil) return } @@ -38,53 +39,54 @@ func (hs *HTTPServer) SearchPlaylists(c *models.ReqContext) response.Response { limit = 1000 } - searchQuery := models.GetPlaylistsQuery{ + searchQuery := playlist.GetPlaylistsQuery{ Name: query, Limit: limit, OrgId: c.OrgId, } - err := hs.SQLStore.SearchPlaylists(c.Req.Context(), &searchQuery) + playlists, err := hs.playlistService.Search(c.Req.Context(), &searchQuery) if err != nil { return response.Error(500, "Search failed", err) } - return response.JSON(http.StatusOK, searchQuery.Result) + return response.JSON(http.StatusOK, playlists) } func (hs *HTTPServer) GetPlaylist(c *models.ReqContext) response.Response { uid := web.Params(c.Req)[":uid"] - cmd := models.GetPlaylistByUidQuery{UID: uid, OrgId: c.OrgId} + cmd := playlist.GetPlaylistByUidQuery{UID: uid, OrgId: c.OrgId} - if err := hs.SQLStore.GetPlaylist(c.Req.Context(), &cmd); err != nil { + p, err := hs.playlistService.Get(c.Req.Context(), &cmd) + if err != nil { return response.Error(500, "Playlist not found", err) } playlistDTOs, _ := hs.LoadPlaylistItemDTOs(c.Req.Context(), uid, c.OrgId) - dto := &models.PlaylistDTO{ - Id: cmd.Result.Id, - UID: cmd.Result.UID, - Name: cmd.Result.Name, - Interval: cmd.Result.Interval, - OrgId: cmd.Result.OrgId, + dto := &playlist.PlaylistDTO{ + Id: p.Id, + UID: p.UID, + Name: p.Name, + Interval: p.Interval, + OrgId: p.OrgId, Items: playlistDTOs, } return response.JSON(http.StatusOK, dto) } -func (hs *HTTPServer) LoadPlaylistItemDTOs(ctx context.Context, uid string, orgId int64) ([]models.PlaylistItemDTO, error) { +func (hs *HTTPServer) LoadPlaylistItemDTOs(ctx context.Context, uid string, orgId int64) ([]playlist.PlaylistItemDTO, error) { playlistitems, err := hs.LoadPlaylistItems(ctx, uid, orgId) if err != nil { return nil, err } - playlistDTOs := make([]models.PlaylistItemDTO, 0) + playlistDTOs := make([]playlist.PlaylistItemDTO, 0) for _, item := range playlistitems { - playlistDTOs = append(playlistDTOs, models.PlaylistItemDTO{ + playlistDTOs = append(playlistDTOs, playlist.PlaylistItemDTO{ Id: item.Id, PlaylistId: item.PlaylistId, Type: item.Type, @@ -97,13 +99,14 @@ func (hs *HTTPServer) LoadPlaylistItemDTOs(ctx context.Context, uid string, orgI return playlistDTOs, nil } -func (hs *HTTPServer) LoadPlaylistItems(ctx context.Context, uid string, orgId int64) ([]models.PlaylistItem, error) { - itemQuery := models.GetPlaylistItemsByUidQuery{PlaylistUID: uid, OrgId: orgId} - if err := hs.SQLStore.GetPlaylistItem(ctx, &itemQuery); err != nil { +func (hs *HTTPServer) LoadPlaylistItems(ctx context.Context, uid string, orgId int64) ([]playlist.PlaylistItem, error) { + itemQuery := playlist.GetPlaylistItemsByUidQuery{PlaylistUID: uid, OrgId: orgId} + items, err := hs.playlistService.GetItems(ctx, &itemQuery) + if err != nil { return nil, err } - return *itemQuery.Result, nil + return items, nil } func (hs *HTTPServer) GetPlaylistItems(c *models.ReqContext) response.Response { @@ -132,8 +135,8 @@ func (hs *HTTPServer) GetPlaylistDashboards(c *models.ReqContext) response.Respo func (hs *HTTPServer) DeletePlaylist(c *models.ReqContext) response.Response { uid := web.Params(c.Req)[":uid"] - cmd := models.DeletePlaylistCommand{UID: uid, OrgId: c.OrgId} - if err := hs.SQLStore.DeletePlaylist(c.Req.Context(), &cmd); err != nil { + cmd := playlist.DeletePlaylistCommand{UID: uid, OrgId: c.OrgId} + if err := hs.playlistService.Delete(c.Req.Context(), &cmd); err != nil { return response.Error(500, "Failed to delete playlist", err) } @@ -141,28 +144,30 @@ func (hs *HTTPServer) DeletePlaylist(c *models.ReqContext) response.Response { } func (hs *HTTPServer) CreatePlaylist(c *models.ReqContext) response.Response { - cmd := models.CreatePlaylistCommand{} + cmd := playlist.CreatePlaylistCommand{} if err := web.Bind(c.Req, &cmd); err != nil { return response.Error(http.StatusBadRequest, "bad request data", err) } cmd.OrgId = c.OrgId - if err := hs.SQLStore.CreatePlaylist(c.Req.Context(), &cmd); err != nil { + p, err := hs.playlistService.Create(c.Req.Context(), &cmd) + if err != nil { return response.Error(500, "Failed to create playlist", err) } - return response.JSON(http.StatusOK, cmd.Result) + return response.JSON(http.StatusOK, p) } func (hs *HTTPServer) UpdatePlaylist(c *models.ReqContext) response.Response { - cmd := models.UpdatePlaylistCommand{} + cmd := playlist.UpdatePlaylistCommand{} if err := web.Bind(c.Req, &cmd); err != nil { return response.Error(http.StatusBadRequest, "bad request data", err) } cmd.OrgId = c.OrgId cmd.UID = web.Params(c.Req)[":uid"] - if err := hs.SQLStore.UpdatePlaylist(c.Req.Context(), &cmd); err != nil { + p, err := hs.playlistService.Update(c.Req.Context(), &cmd) + if err != nil { return response.Error(500, "Failed to save playlist", err) } @@ -171,6 +176,6 @@ func (hs *HTTPServer) UpdatePlaylist(c *models.ReqContext) response.Response { return response.Error(500, "Failed to save playlist", err) } - cmd.Result.Items = playlistDTOs - return response.JSON(http.StatusOK, cmd.Result) + p.Items = playlistDTOs + return response.JSON(http.StatusOK, p) } diff --git a/pkg/server/wire.go b/pkg/server/wire.go index 7af7d59a278..70ef423e848 100644 --- a/pkg/server/wire.go +++ b/pkg/server/wire.go @@ -6,6 +6,7 @@ package server import ( "github.com/google/wire" sdkhttpclient "github.com/grafana/grafana-plugin-sdk-go/backend/httpclient" + "github.com/grafana/grafana/pkg/services/playlist/playlistimpl" "github.com/grafana/grafana/pkg/services/store/sanitizer" "github.com/grafana/grafana/pkg/api" @@ -286,6 +287,7 @@ var wireBasicSet = wire.NewSet( ossaccesscontrol.ProvideDashboardPermissions, wire.Bind(new(accesscontrol.DashboardPermissionsService), new(*ossaccesscontrol.DashboardPermissionsService)), starimpl.ProvideService, + playlistimpl.ProvideService, dashverimpl.ProvideService, publicdashboardsService.ProvideService, wire.Bind(new(publicdashboards.Service), new(*publicdashboardsService.PublicDashboardServiceImpl)), diff --git a/pkg/services/playlist/model.go b/pkg/services/playlist/model.go new file mode 100644 index 00000000000..b310edba74c --- /dev/null +++ b/pkg/services/playlist/model.go @@ -0,0 +1,95 @@ +package playlist + +import ( + "errors" +) + +// Typed errors +var ( + ErrPlaylistNotFound = errors.New("Playlist not found") + ErrPlaylistFailedGenerateUniqueUid = errors.New("failed to generate unique playlist UID") + ErrCommandValidationFailed = errors.New("command missing required fields") +) + +// Playlist model +type Playlist struct { + Id int64 `json:"id"` + UID string `json:"uid" xorm:"uid"` + Name string `json:"name"` + Interval string `json:"interval"` + OrgId int64 `json:"-"` +} + +type PlaylistDTO struct { + Id int64 `json:"id"` + UID string `json:"uid"` + Name string `json:"name"` + Interval string `json:"interval"` + OrgId int64 `json:"-"` + Items []PlaylistItemDTO `json:"items"` +} + +type PlaylistItemDTO struct { + Id int64 `json:"id"` + PlaylistId int64 `json:"playlistid"` + Type string `json:"type"` + Title string `json:"title"` + Value string `json:"value"` + Order int `json:"order"` +} + +type PlaylistItem struct { + Id int64 + PlaylistId int64 + Type string + Value string + Order int + Title string +} + +type Playlists []*Playlist + +// +// COMMANDS +// + +type UpdatePlaylistCommand struct { + OrgId int64 `json:"-"` + UID string `json:"uid"` + Name string `json:"name" binding:"Required"` + Interval string `json:"interval"` + Items []PlaylistItemDTO `json:"items"` +} + +type CreatePlaylistCommand struct { + Name string `json:"name" binding:"Required"` + Interval string `json:"interval"` + Items []PlaylistItemDTO `json:"items"` + + OrgId int64 `json:"-"` +} + +type DeletePlaylistCommand struct { + UID string + OrgId int64 +} + +// +// QUERIES +// + +type GetPlaylistsQuery struct { + Name string + Limit int + OrgId int64 +} + +type GetPlaylistByUidQuery struct { + UID string + OrgId int64 +} + +type GetPlaylistItemsByUidQuery struct { + PlaylistUID string + OrgId int64 +} diff --git a/pkg/services/playlist/playlist.go b/pkg/services/playlist/playlist.go new file mode 100644 index 00000000000..fbea7a84817 --- /dev/null +++ b/pkg/services/playlist/playlist.go @@ -0,0 +1,14 @@ +package playlist + +import ( + "context" +) + +type Service interface { + Create(context.Context, *CreatePlaylistCommand) (*Playlist, error) + Update(context.Context, *UpdatePlaylistCommand) (*PlaylistDTO, error) + Get(context.Context, *GetPlaylistByUidQuery) (*Playlist, error) + GetItems(context.Context, *GetPlaylistItemsByUidQuery) ([]PlaylistItem, error) + Search(context.Context, *GetPlaylistsQuery) (Playlists, error) + Delete(ctx context.Context, cmd *DeletePlaylistCommand) error +} diff --git a/pkg/services/playlist/playlistimpl/playlist.go b/pkg/services/playlist/playlistimpl/playlist.go new file mode 100644 index 00000000000..de2a8588051 --- /dev/null +++ b/pkg/services/playlist/playlistimpl/playlist.go @@ -0,0 +1,44 @@ +package playlistimpl + +import ( + "context" + + "github.com/grafana/grafana/pkg/services/playlist" + "github.com/grafana/grafana/pkg/services/sqlstore/db" +) + +type Service struct { + store store +} + +func ProvideService(db db.DB) playlist.Service { + return &Service{ + store: &sqlStore{ + db: db, + }, + } +} + +func (s *Service) Create(ctx context.Context, cmd *playlist.CreatePlaylistCommand) (*playlist.Playlist, error) { + return s.store.Insert(ctx, cmd) +} + +func (s *Service) Update(ctx context.Context, cmd *playlist.UpdatePlaylistCommand) (*playlist.PlaylistDTO, error) { + return s.store.Update(ctx, cmd) +} + +func (s *Service) Get(ctx context.Context, q *playlist.GetPlaylistByUidQuery) (*playlist.Playlist, error) { + return s.store.Get(ctx, q) +} + +func (s *Service) GetItems(ctx context.Context, q *playlist.GetPlaylistItemsByUidQuery) ([]playlist.PlaylistItem, error) { + return s.store.GetItems(ctx, q) +} + +func (s *Service) Search(ctx context.Context, q *playlist.GetPlaylistsQuery) (playlist.Playlists, error) { + return s.store.List(ctx, q) +} + +func (s *Service) Delete(ctx context.Context, cmd *playlist.DeletePlaylistCommand) error { + return s.store.Delete(ctx, cmd) +} diff --git a/pkg/services/playlist/playlistimpl/store.go b/pkg/services/playlist/playlistimpl/store.go new file mode 100644 index 00000000000..121e3144ffc --- /dev/null +++ b/pkg/services/playlist/playlistimpl/store.go @@ -0,0 +1,226 @@ +package playlistimpl + +import ( + "context" + + "github.com/grafana/grafana/pkg/models" + "github.com/grafana/grafana/pkg/services/playlist" + "github.com/grafana/grafana/pkg/services/sqlstore" + "github.com/grafana/grafana/pkg/services/sqlstore/db" + "github.com/grafana/grafana/pkg/util" +) + +type store interface { + Insert(context.Context, *playlist.CreatePlaylistCommand) (*playlist.Playlist, error) + Delete(context.Context, *playlist.DeletePlaylistCommand) error + Get(context.Context, *playlist.GetPlaylistByUidQuery) (*playlist.Playlist, error) + GetItems(context.Context, *playlist.GetPlaylistItemsByUidQuery) ([]playlist.PlaylistItem, error) + List(context.Context, *playlist.GetPlaylistsQuery) (playlist.Playlists, error) + Update(context.Context, *playlist.UpdatePlaylistCommand) (*playlist.PlaylistDTO, error) +} + +type sqlStore struct { + db db.DB +} + +func (s *sqlStore) Insert(ctx context.Context, cmd *playlist.CreatePlaylistCommand) (*playlist.Playlist, error) { + p := playlist.Playlist{} + err := s.db.WithTransactionalDbSession(ctx, func(sess *sqlstore.DBSession) error { + uid, err := generateAndValidateNewPlaylistUid(sess, cmd.OrgId) + if err != nil { + return err + } + + p = playlist.Playlist{ + Name: cmd.Name, + Interval: cmd.Interval, + OrgId: cmd.OrgId, + UID: uid, + } + + _, err = sess.Insert(&p) + if err != nil { + return err + } + + playlistItems := make([]playlist.PlaylistItem, 0) + for _, item := range cmd.Items { + playlistItems = append(playlistItems, playlist.PlaylistItem{ + PlaylistId: p.Id, + Type: item.Type, + Value: item.Value, + Order: item.Order, + Title: item.Title, + }) + } + + _, err = sess.Insert(&playlistItems) + + return err + }) + return &p, err +} + +func (s *sqlStore) Update(ctx context.Context, cmd *playlist.UpdatePlaylistCommand) (*playlist.PlaylistDTO, error) { + dto := playlist.PlaylistDTO{} + err := s.db.WithTransactionalDbSession(ctx, func(sess *sqlstore.DBSession) error { + p := playlist.Playlist{ + UID: cmd.UID, + OrgId: cmd.OrgId, + Name: cmd.Name, + Interval: cmd.Interval, + } + + existingPlaylist := playlist.Playlist{UID: cmd.UID, OrgId: cmd.OrgId} + _, err := sess.Get(&existingPlaylist) + if err != nil { + return err + } + p.Id = existingPlaylist.Id + + dto = playlist.PlaylistDTO{ + + Id: p.Id, + UID: p.UID, + OrgId: p.OrgId, + Name: p.Name, + Interval: p.Interval, + } + + _, err = sess.Where("id=?", p.Id).Cols("name", "interval").Update(&p) + if err != nil { + return err + } + + rawSQL := "DELETE FROM playlist_item WHERE playlist_id = ?" + _, err = sess.Exec(rawSQL, p.Id) + + if err != nil { + return err + } + + playlistItems := make([]models.PlaylistItem, 0) + + for index, item := range cmd.Items { + playlistItems = append(playlistItems, models.PlaylistItem{ + PlaylistId: p.Id, + Type: item.Type, + Value: item.Value, + Order: index + 1, + Title: item.Title, + }) + } + + _, err = sess.Insert(&playlistItems) + return err + }) + return &dto, err +} + +func (s *sqlStore) Get(ctx context.Context, query *playlist.GetPlaylistByUidQuery) (*playlist.Playlist, error) { + if query.UID == "" || query.OrgId == 0 { + return nil, playlist.ErrCommandValidationFailed + } + + p := playlist.Playlist{} + err := s.db.WithDbSession(ctx, func(sess *sqlstore.DBSession) error { + p = playlist.Playlist{UID: query.UID, OrgId: query.OrgId} + exists, err := sess.Get(&p) + if !exists { + return playlist.ErrPlaylistNotFound + } + + return err + }) + return &p, err +} + +func (s *sqlStore) Delete(ctx context.Context, cmd *playlist.DeletePlaylistCommand) error { + if cmd.UID == "" || cmd.OrgId == 0 { + return playlist.ErrCommandValidationFailed + } + + return s.db.WithTransactionalDbSession(ctx, func(sess *sqlstore.DBSession) error { + playlist := playlist.Playlist{UID: cmd.UID, OrgId: cmd.OrgId} + _, err := sess.Get(&playlist) + if err != nil { + return err + } + + var rawPlaylistSQL = "DELETE FROM playlist WHERE uid = ? and org_id = ?" + _, err = sess.Exec(rawPlaylistSQL, cmd.UID, cmd.OrgId) + if err != nil { + return err + } + + var rawItemSQL = "DELETE FROM playlist_item WHERE playlist_id = ?" + _, err = sess.Exec(rawItemSQL, playlist.Id) + + return err + }) +} + +func (s *sqlStore) List(ctx context.Context, query *playlist.GetPlaylistsQuery) (playlist.Playlists, error) { + playlists := make(playlist.Playlists, 0) + if query.OrgId == 0 { + return playlists, playlist.ErrCommandValidationFailed + } + + err := s.db.WithDbSession(ctx, func(dbSess *sqlstore.DBSession) error { + sess := dbSess.Limit(query.Limit) + + if query.Name != "" { + sess.Where("name LIKE ?", "%"+query.Name+"%") + } + + sess.Where("org_id = ?", query.OrgId) + err := sess.Find(&playlists) + + return err + }) + return playlists, err +} + +func (s *sqlStore) GetItems(ctx context.Context, query *playlist.GetPlaylistItemsByUidQuery) ([]playlist.PlaylistItem, error) { + var playlistItems = make([]playlist.PlaylistItem, 0) + err := s.db.WithDbSession(ctx, func(sess *sqlstore.DBSession) error { + if query.PlaylistUID == "" || query.OrgId == 0 { + return models.ErrCommandValidationFailed + } + + // getQuery the playlist Id + getQuery := &playlist.GetPlaylistByUidQuery{UID: query.PlaylistUID, OrgId: query.OrgId} + p, err := s.Get(ctx, getQuery) + if err != nil { + return err + } + + err = sess.Where("playlist_id=?", p.Id).Find(&playlistItems) + + return err + }) + return playlistItems, err +} + +// generateAndValidateNewPlaylistUid generates a playlistUID and verifies that +// the uid isn't already in use. This is deliberately overly cautious, since users +// can also specify playlist uids during provisioning. +func generateAndValidateNewPlaylistUid(sess *sqlstore.DBSession, orgId int64) (string, error) { + for i := 0; i < 3; i++ { + uid := generateNewUid() + + playlist := models.Playlist{OrgId: orgId, UID: uid} + exists, err := sess.Get(&playlist) + if err != nil { + return "", err + } + + if !exists { + return uid, nil + } + } + + return "", models.ErrPlaylistFailedGenerateUniqueUid +} + +var generateNewUid func() string = util.GenerateShortUID diff --git a/pkg/services/playlist/playlistimpl/store_test.go b/pkg/services/playlist/playlistimpl/store_test.go new file mode 100644 index 00000000000..0eb1fe842aa --- /dev/null +++ b/pkg/services/playlist/playlistimpl/store_test.go @@ -0,0 +1,82 @@ +package playlistimpl + +import ( + "context" + "testing" + + "github.com/grafana/grafana/pkg/services/playlist" + "github.com/grafana/grafana/pkg/services/sqlstore" + "github.com/stretchr/testify/require" +) + +func TestIntegrationPlaylistDataAccess(t *testing.T) { + if testing.Short() { + t.Skip("skipping integration test") + } + ss := sqlstore.InitTestDB(t) + playlistStore := sqlStore{db: ss} + + t.Run("Can create playlist", func(t *testing.T) { + items := []playlist.PlaylistItemDTO{ + {Title: "graphite", Value: "graphite", Type: "dashboard_by_tag"}, + {Title: "Backend response times", Value: "3", Type: "dashboard_by_id"}, + } + cmd := playlist.CreatePlaylistCommand{Name: "NYC office", Interval: "10m", OrgId: 1, Items: items} + p, err := playlistStore.Insert(context.Background(), &cmd) + require.NoError(t, err) + uid := p.UID + + t.Run("Can get playlist items", func(t *testing.T) { + get := &playlist.GetPlaylistItemsByUidQuery{PlaylistUID: uid, OrgId: 1} + storedPlaylistItems, err := playlistStore.GetItems(context.Background(), get) + require.NoError(t, err) + require.Equal(t, len(storedPlaylistItems), len(items)) + }) + + t.Run("Can update playlist", func(t *testing.T) { + items := []playlist.PlaylistItemDTO{ + {Title: "influxdb", Value: "influxdb", Type: "dashboard_by_tag"}, + {Title: "Backend response times", Value: "2", Type: "dashboard_by_id"}, + } + query := playlist.UpdatePlaylistCommand{Name: "NYC office ", OrgId: 1, UID: uid, Interval: "10s", Items: items} + _, err = playlistStore.Update(context.Background(), &query) + require.NoError(t, err) + }) + + t.Run("Can remove playlist", func(t *testing.T) { + deleteQuery := playlist.DeletePlaylistCommand{UID: uid, OrgId: 1} + err = playlistStore.Delete(context.Background(), &deleteQuery) + require.NoError(t, err) + + getQuery := playlist.GetPlaylistByUidQuery{UID: uid, OrgId: 1} + p, err := playlistStore.Get(context.Background(), &getQuery) + require.Error(t, err) + require.Equal(t, uid, p.UID, "playlist should've been removed") + require.ErrorIs(t, err, playlist.ErrPlaylistNotFound) + }) + }) + + t.Run("Delete playlist that doesn't exist", func(t *testing.T) { + deleteQuery := playlist.DeletePlaylistCommand{UID: "654312", OrgId: 1} + err := playlistStore.Delete(context.Background(), &deleteQuery) + require.NoError(t, err) + }) + + t.Run("Delete playlist with invalid command yields error", func(t *testing.T) { + testCases := []struct { + desc string + cmd playlist.DeletePlaylistCommand + }{ + {desc: "none", cmd: playlist.DeletePlaylistCommand{}}, + {desc: "no OrgId", cmd: playlist.DeletePlaylistCommand{UID: "1"}}, + {desc: "no Uid", cmd: playlist.DeletePlaylistCommand{OrgId: 1}}, + } + + for _, tc := range testCases { + t.Run(tc.desc, func(t *testing.T) { + err := playlistStore.Delete(context.Background(), &tc.cmd) + require.EqualError(t, err, playlist.ErrCommandValidationFailed.Error()) + }) + } + }) +} diff --git a/pkg/services/playlist/playlisttest/fake.go b/pkg/services/playlist/playlisttest/fake.go new file mode 100644 index 00000000000..2f99a88c5b1 --- /dev/null +++ b/pkg/services/playlist/playlisttest/fake.go @@ -0,0 +1,43 @@ +package playlisttest + +import ( + "context" + + "github.com/grafana/grafana/pkg/services/playlist" +) + +type FakePlaylistService struct { + ExpectedPlaylist *playlist.Playlist + ExpectedPlaylistDTO *playlist.PlaylistDTO + ExpectedPlaylistItems []playlist.PlaylistItem + ExpectedPlaylists playlist.Playlists + ExpectedError error +} + +func NewPlaylistServiveFake() *FakePlaylistService { + return &FakePlaylistService{} +} + +func (f *FakePlaylistService) Create(context.Context, *playlist.CreatePlaylistCommand) (*playlist.Playlist, error) { + return f.ExpectedPlaylist, f.ExpectedError +} + +func (f *FakePlaylistService) Update(context.Context, *playlist.UpdatePlaylistCommand) (*playlist.PlaylistDTO, error) { + return f.ExpectedPlaylistDTO, f.ExpectedError +} + +func (f *FakePlaylistService) Get(context.Context, *playlist.GetPlaylistByUidQuery) (*playlist.Playlist, error) { + return f.ExpectedPlaylist, f.ExpectedError +} + +func (f *FakePlaylistService) GetItems(context.Context, *playlist.GetPlaylistItemsByUidQuery) ([]playlist.PlaylistItem, error) { + return f.ExpectedPlaylistItems, f.ExpectedError +} + +func (f *FakePlaylistService) Search(context.Context, *playlist.GetPlaylistsQuery) (playlist.Playlists, error) { + return f.ExpectedPlaylists, f.ExpectedError +} + +func (f *FakePlaylistService) Delete(ctx context.Context, cmd *playlist.DeletePlaylistCommand) error { + return f.ExpectedError +} diff --git a/pkg/services/sqlstore/store.go b/pkg/services/sqlstore/store.go index 55bc7f85a3d..bcba1b75781 100644 --- a/pkg/services/sqlstore/store.go +++ b/pkg/services/sqlstore/store.go @@ -72,11 +72,17 @@ type Store interface { GetGlobalQuotaByTarget(ctx context.Context, query *models.GetGlobalQuotaByTargetQuery) error WithTransactionalDbSession(ctx context.Context, callback DBTransactionFunc) error InTransaction(ctx context.Context, fn func(ctx context.Context) error) error + // deprecated CreatePlaylist(ctx context.Context, cmd *models.CreatePlaylistCommand) error + // deprecated UpdatePlaylist(ctx context.Context, cmd *models.UpdatePlaylistCommand) error + // deprecated GetPlaylist(ctx context.Context, query *models.GetPlaylistByUidQuery) error + // deprecated DeletePlaylist(ctx context.Context, cmd *models.DeletePlaylistCommand) error + // deprecated SearchPlaylists(ctx context.Context, query *models.GetPlaylistsQuery) error + // deprecated GetPlaylistItem(ctx context.Context, query *models.GetPlaylistItemsByUidQuery) error GetAlertById(ctx context.Context, query *models.GetAlertByIdQuery) error GetAllAlertQueryHandler(ctx context.Context, query *models.GetAllAlertsQuery) error diff --git a/public/api-merged.json b/public/api-merged.json index c83c8d61fb2..98ab31b0a08 100644 --- a/public/api-merged.json +++ b/public/api-merged.json @@ -8535,6 +8535,14 @@ "tags": ["provisioning"], "summary": "Get all the contact points.", "operationId": "RouteGetContactpoints", + "parameters": [ + { + "type": "string", + "description": "Filter by name", + "name": "name", + "in": "query" + } + ], "responses": { "200": { "description": "ContactPoints", @@ -8874,6 +8882,20 @@ } } } + }, + "delete": { + "consumes": ["application/json"], + "tags": ["provisioning"], + "summary": "Clears the notification policy tree.", + "operationId": "RouteResetPolicyTree", + "responses": { + "202": { + "description": "Ack", + "schema": { + "$ref": "#/definitions/Ack" + } + } + } } }, "/v1/provisioning/templates": { @@ -10250,7 +10272,7 @@ "$ref": "#/definitions/ScheduleDTO" }, "state": { - "type": "string" + "$ref": "#/definitions/State" }, "templateVars": { "type": "object" @@ -10268,9 +10290,6 @@ "CreatePlaylistCommand": { "type": "object", "properties": { - "Result": { - "$ref": "#/definitions/Playlist" - }, "interval": { "type": "string" }, @@ -15496,9 +15515,8 @@ "type": "string" }, "URL": { - "description": "The general form represented is:\n\n[scheme:][//[userinfo@]host][/]path[?query][#fragment]\n\nURLs that do not start with a slash after the scheme are interpreted as:\n\nscheme:opaque[?query][#fragment]\n\nNote that the Path field is stored in decoded form: /%47%6f%2f becomes /Go/.\nA consequence is that it is impossible to tell which slashes in the Path were\nslashes in the raw URL and which were %2f. This distinction is rarely important,\nbut when it is, the code should use RawPath, an optional field which only gets\nset if the default encoding is different from Path.\n\nURL's String method uses the EscapedPath method to obtain the path. See the\nEscapedPath method for more details.", "type": "object", - "title": "A URL represents a parsed URL (technically, a URI reference).", + "title": "URL is a custom URL type that allows validation at configuration load time.", "properties": { "ForceQuery": { "type": "boolean" @@ -15768,9 +15786,6 @@ "UpdatePlaylistCommand": { "type": "object", "properties": { - "Result": { - "$ref": "#/definitions/PlaylistDTO" - }, "interval": { "type": "string" }, @@ -16298,6 +16313,7 @@ } }, "alertGroup": { + "description": "AlertGroup alert group", "type": "object", "required": ["alerts", "labels", "receiver"], "properties": { @@ -16461,6 +16477,7 @@ } }, "gettableSilence": { + "description": "GettableSilence gettable silence", "type": "object", "required": ["comment", "createdBy", "endsAt", "matchers", "startsAt", "id", "status", "updatedAt"], "properties": { @@ -16500,7 +16517,6 @@ } }, "gettableSilences": { - "description": "GettableSilences gettable silences", "type": "array", "items": { "$ref": "#/definitions/gettableSilence" @@ -16634,7 +16650,6 @@ } }, "receiver": { - "description": "Receiver receiver", "type": "object", "required": ["name"], "properties": { diff --git a/public/api-spec.json b/public/api-spec.json index b438b6d5e39..e9275cccdff 100644 --- a/public/api-spec.json +++ b/public/api-spec.json @@ -9380,7 +9380,7 @@ "$ref": "#/definitions/ScheduleDTO" }, "state": { - "type": "string" + "$ref": "#/definitions/State" }, "templateVars": { "type": "object" @@ -9398,9 +9398,6 @@ "CreatePlaylistCommand": { "type": "object", "properties": { - "Result": { - "$ref": "#/definitions/Playlist" - }, "interval": { "type": "string" }, @@ -12676,9 +12673,6 @@ "UpdatePlaylistCommand": { "type": "object", "properties": { - "Result": { - "$ref": "#/definitions/PlaylistDTO" - }, "interval": { "type": "string" }, From 076851313ddb95cda03203e434c03a499fdede83 Mon Sep 17 00:00:00 2001 From: brendamuir <100768211+brendamuir@users.noreply.github.com> Date: Mon, 18 Jul 2022 11:09:29 +0100 Subject: [PATCH 029/116] Docs: fixes warning for enterprise customers (#52385) * Docs: fixes data source links * fixes unified alerting redirect * Docs: fixes enterprise customer warning --- docs/sources/alerting/migrating-alerts/_index.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/sources/alerting/migrating-alerts/_index.md b/docs/sources/alerting/migrating-alerts/_index.md index 83b714edd6a..8e7c2ce778c 100644 --- a/docs/sources/alerting/migrating-alerts/_index.md +++ b/docs/sources/alerting/migrating-alerts/_index.md @@ -13,7 +13,7 @@ weight: 101 Grafana Alerting is enabled by default for new installations or existing installations whether or not legacy alerting is configured. -> **Note**: We recommend that Grafana Enterprise customers with more than a dozen Grafana dashboard alert rules do not upgrade and remain on legacy alerting for now by [opting out]({{< relref "opt-out/" >}}). If you do want to upgrade to Grafana Alerting, contact customer support. +> **Note**: When upgrading, your dashboard alerts are migrated to a new format. This migration can be rolled back easily by [opting out]({{< relref "opt-out/" >}}). If you have any questions regarding this migration, please contact us. Existing installations that do not use legacy alerting will have Grafana Alerting enabled by default unless alerting is disabled in the configuration. From 67ea2da57e0f0d47d98ed3ee8513f0277d9c1ba9 Mon Sep 17 00:00:00 2001 From: Artur Wierzbicki Date: Mon, 18 Jul 2022 14:36:54 +0400 Subject: [PATCH 030/116] Storage: fix svg upload (#52395) --- pkg/services/store/http.go | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pkg/services/store/http.go b/pkg/services/store/http.go index 9f207d9a7a1..2523e500bc0 100644 --- a/pkg/services/store/http.go +++ b/pkg/services/store/http.go @@ -110,7 +110,7 @@ func (s *httpStorage) Upload(c *models.ReqContext) response.Response { entityType := EntityTypeJSON mimeType := http.DetectContentType(data) - if strings.HasPrefix(mimeType, "image") { + if strings.HasPrefix(mimeType, "image") || strings.HasSuffix(path, ".svg") { entityType = EntityTypeImage } From b2736ac1fea71ebeb4255cf71fb488b2c117114d Mon Sep 17 00:00:00 2001 From: Artur Wierzbicki Date: Mon, 18 Jul 2022 15:24:39 +0400 Subject: [PATCH 031/116] Storage: limit the number of uploaded files (#50796) * #50608: sql file upload quotas * rename `files_in_sql` to `file` * merge conflict --- .betterer.results | 7 ++----- conf/defaults.ini | 3 +++ pkg/services/quota/quotaimpl/quota.go | 5 +++++ pkg/services/sqlstore/quota.go | 15 ++++++++++++++- pkg/services/store/http.go | 19 ++++++++++++++++--- pkg/setting/setting_quota.go | 2 ++ 6 files changed, 42 insertions(+), 9 deletions(-) diff --git a/.betterer.results b/.betterer.results index 343a79f5634..0ae53be6813 100644 --- a/.betterer.results +++ b/.betterer.results @@ -5517,11 +5517,8 @@ exports[`better eslint`] = { [0, 0, 0, "Unexpected any. Specify a different type.", "20"], [0, 0, 0, "Do not use any type assertions.", "21"], [0, 0, 0, "Unexpected any. Specify a different type.", "22"], - [0, 0, 0, "Do not use any type assertions.", "23"], - [0, 0, 0, "Do not use any type assertions.", "24"], - [0, 0, 0, "Do not use any type assertions.", "25"], - [0, 0, 0, "Unexpected any. Specify a different type.", "26"], - [0, 0, 0, "Do not use any type assertions.", "27"] + [0, 0, 0, "Unexpected any. Specify a different type.", "23"], + [0, 0, 0, "Do not use any type assertions.", "24"] ], "public/app/features/plugins/hooks/tests/useImportAppPlugin.test.tsx:5381": [ [0, 0, 0, "Unexpected any. Specify a different type.", "0"], diff --git a/conf/defaults.ini b/conf/defaults.ini index 02093ab900c..ad5affa2e7a 100644 --- a/conf/defaults.ini +++ b/conf/defaults.ini @@ -806,6 +806,9 @@ global_session = -1 # global limit of alerts global_alert_rule = -1 +# global limit of files uploaded to the SQL DB +global_file = 1000 + #################################### Unified Alerting #################### [unified_alerting] # Enable the Unified Alerting sub-system and interface. When enabled we'll migrate all of your alert rules and notification channels to the new system. New alert rules will be created and your notification channels will be converted into an Alertmanager configuration. Previous data is preserved to enable backwards compatibility but new data is removed when switching. When this configuration section and flag are not defined, the state is defined at runtime. See the documentation for more details. diff --git a/pkg/services/quota/quotaimpl/quota.go b/pkg/services/quota/quotaimpl/quota.go index f16cecaae74..fc85fc6d97f 100644 --- a/pkg/services/quota/quotaimpl/quota.go +++ b/pkg/services/quota/quotaimpl/quota.go @@ -190,6 +190,11 @@ func (s *Service) getQuotaScopes(target string) ([]models.QuotaScope, error) { models.QuotaScope{Name: "org", Target: target, DefaultLimit: s.Cfg.Quota.Org.AlertRule}, ) return scopes, nil + case "file": + scopes = append(scopes, + models.QuotaScope{Name: "global", Target: target, DefaultLimit: s.Cfg.Quota.Global.File}, + ) + return scopes, nil default: return scopes, quota.ErrInvalidQuotaTarget } diff --git a/pkg/services/sqlstore/quota.go b/pkg/services/sqlstore/quota.go index 3ed4428fa71..6f7fc67f28b 100644 --- a/pkg/services/sqlstore/quota.go +++ b/pkg/services/sqlstore/quota.go @@ -12,6 +12,7 @@ import ( const ( alertRuleTarget = "alert_rule" dashboardTarget = "dashboard" + filesTarget = "file" ) type targetCount struct { @@ -255,7 +256,19 @@ func (ss *SQLStore) UpdateUserQuota(ctx context.Context, cmd *models.UpdateUserQ func (ss *SQLStore) GetGlobalQuotaByTarget(ctx context.Context, query *models.GetGlobalQuotaByTargetQuery) error { return ss.WithDbSession(ctx, func(sess *DBSession) error { var used int64 - if query.Target != alertRuleTarget || query.UnifiedAlertingEnabled { + + if query.Target == filesTarget { + // get quota used. + rawSQL := fmt.Sprintf("SELECT COUNT(*) AS count FROM %s", + dialect.Quote("file")) + + notFolderCondition := fmt.Sprintf(" WHERE path NOT LIKE '%s'", "%/") + resp := make([]*targetCount, 0) + if err := sess.SQL(rawSQL + notFolderCondition).Find(&resp); err != nil { + return err + } + used = resp[0].Count + } else if query.Target != alertRuleTarget || query.UnifiedAlertingEnabled { // get quota used. rawSQL := fmt.Sprintf("SELECT COUNT(*) AS count FROM %s", dialect.Quote(query.Target)) diff --git a/pkg/services/store/http.go b/pkg/services/store/http.go index 2523e500bc0..49ea3199481 100644 --- a/pkg/services/store/http.go +++ b/pkg/services/store/http.go @@ -11,6 +11,7 @@ import ( "github.com/grafana/grafana/pkg/api/response" "github.com/grafana/grafana/pkg/models" + "github.com/grafana/grafana/pkg/services/quota" "github.com/grafana/grafana/pkg/util" "github.com/grafana/grafana/pkg/web" ) @@ -26,12 +27,14 @@ type HTTPStorageService interface { } type httpStorage struct { - store StorageService + store StorageService + quotaService quota.Service } -func ProvideHTTPService(store StorageService) HTTPStorageService { +func ProvideHTTPService(store StorageService, quotaService quota.Service) HTTPStorageService { return &httpStorage{ - store: store, + store: store, + quotaService: quotaService, } } @@ -58,6 +61,16 @@ func UploadErrorToStatusCode(err error) int { } func (s *httpStorage) Upload(c *models.ReqContext) response.Response { + // assumes we are only uploading to the SQL database - TODO: refactor once we introduce object stores + quotaReached, err := s.quotaService.CheckQuotaReached(c.Req.Context(), "file", nil) + if err != nil { + return response.Error(500, "Internal server error", err) + } + + if quotaReached { + return response.Error(400, "File quota reached", errors.New("file quota reached")) + } + type rspInfo struct { Message string `json:"message,omitempty"` Path string `json:"path,omitempty"` diff --git a/pkg/setting/setting_quota.go b/pkg/setting/setting_quota.go index 139189f7e84..b3cd6d01115 100644 --- a/pkg/setting/setting_quota.go +++ b/pkg/setting/setting_quota.go @@ -24,6 +24,7 @@ type GlobalQuota struct { ApiKey int64 `target:"api_key"` Session int64 `target:"-"` AlertRule int64 `target:"alert_rule"` + File int64 `target:"file"` } func (q *OrgQuota) ToMap() map[string]int64 { @@ -94,6 +95,7 @@ func (cfg *Cfg) readQuotaSettings() { Dashboard: quota.Key("global_dashboard").MustInt64(-1), ApiKey: quota.Key("global_api_key").MustInt64(-1), Session: quota.Key("global_session").MustInt64(-1), + File: quota.Key("global_file").MustInt64(-1), AlertRule: alertGlobalQuota, } From 8ff152f98f0092f2a5c55da7d0b2be40dccdce75 Mon Sep 17 00:00:00 2001 From: Ivana Huckova <30407135+ivanahuckova@users.noreply.github.com> Date: Mon, 18 Jul 2022 14:13:34 +0200 Subject: [PATCH 032/116] ModifyQuery: Improve typing for modifyQuery and query hints (#52326) * ModifyQuery: Improve typing * Elasticsearch: Added `modifyQuery` method to add filters in Explore (#52313) * fixed elasticsearch `QueryFixAction` type Co-authored-by: Sven Grossmann --- .betterer.results | 6 ++---- packages/grafana-data/src/types/datasource.ts | 1 + public/app/features/explore/Explore.tsx | 4 ++-- .../elasticsearch/datasource.test.ts | 16 +++++++++------ .../datasource/elasticsearch/datasource.ts | 11 +++++++--- .../datasource/loki/datasource.test.ts | 20 +++++++++---------- .../app/plugins/datasource/loki/datasource.ts | 11 +++++++--- .../prometheus/components/PromQueryField.tsx | 5 +++-- .../datasource/prometheus/datasource.test.ts | 8 ++++---- .../datasource/prometheus/datasource.tsx | 18 ++++++++++++----- .../datasource/prometheus/query_hints.ts | 2 +- .../querybuilder/shared/QueryBuilderHints.tsx | 10 ++++++---- 12 files changed, 68 insertions(+), 44 deletions(-) diff --git a/.betterer.results b/.betterer.results index 0ae53be6813..ceabba5c265 100644 --- a/.betterer.results +++ b/.betterer.results @@ -7685,8 +7685,7 @@ exports[`better eslint`] = { [0, 0, 0, "Unexpected any. Specify a different type.", "3"], [0, 0, 0, "Unexpected any. Specify a different type.", "4"], [0, 0, 0, "Unexpected any. Specify a different type.", "5"], - [0, 0, 0, "Unexpected any. Specify a different type.", "6"], - [0, 0, 0, "Unexpected any. Specify a different type.", "7"] + [0, 0, 0, "Unexpected any. Specify a different type.", "6"] ], "public/app/plugins/datasource/loki/getDerivedFields.ts:5381": [ [0, 0, 0, "Unexpected any. Specify a different type.", "0"], @@ -8246,8 +8245,7 @@ exports[`better eslint`] = { [0, 0, 0, "Unexpected any. Specify a different type.", "32"], [0, 0, 0, "Unexpected any. Specify a different type.", "33"], [0, 0, 0, "Unexpected any. Specify a different type.", "34"], - [0, 0, 0, "Unexpected any. Specify a different type.", "35"], - [0, 0, 0, "Unexpected any. Specify a different type.", "36"] + [0, 0, 0, "Unexpected any. Specify a different type.", "35"] ], "public/app/plugins/datasource/prometheus/language_provider.test.ts:5381": [ [0, 0, 0, "Unexpected any. Specify a different type.", "0"], diff --git a/packages/grafana-data/src/types/datasource.ts b/packages/grafana-data/src/types/datasource.ts index 1be984a4dbd..83bbe98502e 100644 --- a/packages/grafana-data/src/types/datasource.ts +++ b/packages/grafana-data/src/types/datasource.ts @@ -506,6 +506,7 @@ export interface QueryFixAction { type: string; query?: string; preventSubmit?: boolean; + options?: KeyValue; } export interface QueryHint { diff --git a/public/app/features/explore/Explore.tsx b/public/app/features/explore/Explore.tsx index 4d0a513972b..55233403242 100644 --- a/public/app/features/explore/Explore.tsx +++ b/public/app/features/explore/Explore.tsx @@ -150,11 +150,11 @@ export class Explore extends React.PureComponent { }; onClickFilterLabel = (key: string, value: string) => { - this.onModifyQueries({ type: 'ADD_FILTER', key, value }); + this.onModifyQueries({ type: 'ADD_FILTER', options: { key, value } }); }; onClickFilterOutLabel = (key: string, value: string) => { - this.onModifyQueries({ type: 'ADD_FILTER_OUT', key, value }); + this.onModifyQueries({ type: 'ADD_FILTER_OUT', options: { key, value } }); }; onClickAddQueryRowButton = () => { diff --git a/public/app/plugins/datasource/elasticsearch/datasource.test.ts b/public/app/plugins/datasource/elasticsearch/datasource.test.ts index 78f622211cc..bdb6dd52cff 100644 --- a/public/app/plugins/datasource/elasticsearch/datasource.test.ts +++ b/public/app/plugins/datasource/elasticsearch/datasource.test.ts @@ -1033,15 +1033,19 @@ describe('modifyQuery', () => { }); it('should add the filter', () => { - expect(ds.modifyQuery(query, { type: 'ADD_FILTER', key: 'foo', value: 'bar' }).query).toBe('foo:"bar"'); + expect(ds.modifyQuery(query, { type: 'ADD_FILTER', options: { key: 'foo', value: 'bar' } }).query).toBe( + 'foo:"bar"' + ); }); it('should add the negative filter', () => { - expect(ds.modifyQuery(query, { type: 'ADD_FILTER_OUT', key: 'foo', value: 'bar' }).query).toBe('-foo:"bar"'); + expect(ds.modifyQuery(query, { type: 'ADD_FILTER_OUT', options: { key: 'foo', value: 'bar' } }).query).toBe( + '-foo:"bar"' + ); }); it('should do nothing on unknown type', () => { - expect(ds.modifyQuery(query, { type: 'unknown', key: 'foo', value: 'bar' }).query).toBe(query.query); + expect(ds.modifyQuery(query, { type: 'unknown', options: { key: 'foo', value: 'bar' } }).query).toBe(query.query); }); }); @@ -1052,19 +1056,19 @@ describe('modifyQuery', () => { }); it('should add the filter', () => { - expect(ds.modifyQuery(query, { type: 'ADD_FILTER', key: 'foo', value: 'bar' }).query).toBe( + expect(ds.modifyQuery(query, { type: 'ADD_FILTER', options: { key: 'foo', value: 'bar' } }).query).toBe( 'test:"value" AND foo:"bar"' ); }); it('should add the negative filter', () => { - expect(ds.modifyQuery(query, { type: 'ADD_FILTER_OUT', key: 'foo', value: 'bar' }).query).toBe( + expect(ds.modifyQuery(query, { type: 'ADD_FILTER_OUT', options: { key: 'foo', value: 'bar' } }).query).toBe( 'test:"value" AND -foo:"bar"' ); }); it('should do nothing on unknown type', () => { - expect(ds.modifyQuery(query, { type: 'unknown', key: 'foo', value: 'bar' }).query).toBe(query.query); + expect(ds.modifyQuery(query, { type: 'unknown', options: { key: 'foo', value: 'bar' } }).query).toBe(query.query); }); }); }); diff --git a/public/app/plugins/datasource/elasticsearch/datasource.ts b/public/app/plugins/datasource/elasticsearch/datasource.ts index 4aa1df7f555..84a55da6fc8 100644 --- a/public/app/plugins/datasource/elasticsearch/datasource.ts +++ b/public/app/plugins/datasource/elasticsearch/datasource.ts @@ -25,6 +25,7 @@ import { ScopedVars, TimeRange, toUtc, + QueryFixAction, } from '@grafana/data'; import { BackendSrvRequest, getBackendSrv, getDataSourceSrv } from '@grafana/runtime'; import { RowContextOptions } from '@grafana/ui/src/components/Logs/LogRowContextProvider'; @@ -953,21 +954,25 @@ export class ElasticDatasource return false; } - modifyQuery(query: ElasticsearchQuery, action: { type: string; key: string; value: string }): ElasticsearchQuery { + modifyQuery(query: ElasticsearchQuery, action: QueryFixAction): ElasticsearchQuery { + if (!action.options) { + return query; + } + let expression = query.query ?? ''; switch (action.type) { case 'ADD_FILTER': { if (expression.length > 0) { expression += ' AND '; } - expression += `${action.key}:"${action.value}"`; + expression += `${action.options.key}:"${action.options.value}"`; break; } case 'ADD_FILTER_OUT': { if (expression.length > 0) { expression += ' AND '; } - expression += `-${action.key}:"${action.value}"`; + expression += `-${action.options.key}:"${action.options.value}"`; break; } } diff --git a/public/app/plugins/datasource/loki/datasource.test.ts b/public/app/plugins/datasource/loki/datasource.test.ts index bf39da555b0..e34a051a02c 100644 --- a/public/app/plugins/datasource/loki/datasource.test.ts +++ b/public/app/plugins/datasource/loki/datasource.test.ts @@ -522,7 +522,7 @@ describe('LokiDatasource', () => { describe('and query has no parser', () => { it('then the correct label should be added for logs query', () => { const query: LokiQuery = { refId: 'A', expr: '{bar="baz"}' }; - const action = { key: 'job', value: 'grafana', type: 'ADD_FILTER' }; + const action = { options: { key: 'job', value: 'grafana' }, type: 'ADD_FILTER' }; const ds = createLokiDSForTests(); const result = ds.modifyQuery(query, action); @@ -532,7 +532,7 @@ describe('LokiDatasource', () => { it('then the correctly escaped label should be added for logs query', () => { const query: LokiQuery = { refId: 'A', expr: '{bar="baz"}' }; - const action = { key: 'job', value: '\\test', type: 'ADD_FILTER' }; + const action = { options: { key: 'job', value: '\\test' }, type: 'ADD_FILTER' }; const ds = createLokiDSForTests(); const result = ds.modifyQuery(query, action); @@ -542,7 +542,7 @@ describe('LokiDatasource', () => { it('then the correct label should be added for metrics query', () => { const query: LokiQuery = { refId: 'A', expr: 'rate({bar="baz"}[5m])' }; - const action = { key: 'job', value: 'grafana', type: 'ADD_FILTER' }; + const action = { options: { key: 'job', value: 'grafana' }, type: 'ADD_FILTER' }; const ds = createLokiDSForTests(); const result = ds.modifyQuery(query, action); @@ -552,7 +552,7 @@ describe('LokiDatasource', () => { describe('and query has parser', () => { it('then the correct label should be added for logs query', () => { const query: LokiQuery = { refId: 'A', expr: '{bar="baz"} | logfmt' }; - const action = { key: 'job', value: 'grafana', type: 'ADD_FILTER' }; + const action = { options: { key: 'job', value: 'grafana' }, type: 'ADD_FILTER' }; const ds = createLokiDSForTests(); const result = ds.modifyQuery(query, action); @@ -561,7 +561,7 @@ describe('LokiDatasource', () => { }); it('then the correct label should be added for metrics query', () => { const query: LokiQuery = { refId: 'A', expr: 'rate({bar="baz"} | logfmt [5m])' }; - const action = { key: 'job', value: 'grafana', type: 'ADD_FILTER' }; + const action = { options: { key: 'job', value: 'grafana' }, type: 'ADD_FILTER' }; const ds = createLokiDSForTests(); const result = ds.modifyQuery(query, action); @@ -576,7 +576,7 @@ describe('LokiDatasource', () => { describe('and query has no parser', () => { it('then the correct label should be added for logs query', () => { const query: LokiQuery = { refId: 'A', expr: '{bar="baz"}' }; - const action = { key: 'job', value: 'grafana', type: 'ADD_FILTER_OUT' }; + const action = { options: { key: 'job', value: 'grafana' }, type: 'ADD_FILTER_OUT' }; const ds = createLokiDSForTests(); const result = ds.modifyQuery(query, action); @@ -586,7 +586,7 @@ describe('LokiDatasource', () => { it('then the correctly escaped label should be added for logs query', () => { const query: LokiQuery = { refId: 'A', expr: '{bar="baz"}' }; - const action = { key: 'job', value: '"test', type: 'ADD_FILTER_OUT' }; + const action = { options: { key: 'job', value: '"test' }, type: 'ADD_FILTER_OUT' }; const ds = createLokiDSForTests(); const result = ds.modifyQuery(query, action); @@ -596,7 +596,7 @@ describe('LokiDatasource', () => { it('then the correct label should be added for metrics query', () => { const query: LokiQuery = { refId: 'A', expr: 'rate({bar="baz"}[5m])' }; - const action = { key: 'job', value: 'grafana', type: 'ADD_FILTER_OUT' }; + const action = { options: { key: 'job', value: 'grafana' }, type: 'ADD_FILTER_OUT' }; const ds = createLokiDSForTests(); const result = ds.modifyQuery(query, action); @@ -606,7 +606,7 @@ describe('LokiDatasource', () => { describe('and query has parser', () => { it('then the correct label should be added for logs query', () => { const query: LokiQuery = { refId: 'A', expr: '{bar="baz"} | logfmt' }; - const action = { key: 'job', value: 'grafana', type: 'ADD_FILTER_OUT' }; + const action = { options: { key: 'job', value: 'grafana' }, type: 'ADD_FILTER_OUT' }; const ds = createLokiDSForTests(); const result = ds.modifyQuery(query, action); @@ -615,7 +615,7 @@ describe('LokiDatasource', () => { }); it('then the correct label should be added for metrics query', () => { const query: LokiQuery = { refId: 'A', expr: 'rate({bar="baz"} | logfmt [5m])' }; - const action = { key: 'job', value: 'grafana', type: 'ADD_FILTER_OUT' }; + const action = { options: { key: 'job', value: 'grafana' }, type: 'ADD_FILTER_OUT' }; const ds = createLokiDSForTests(); const result = ds.modifyQuery(query, action); diff --git a/public/app/plugins/datasource/loki/datasource.ts b/public/app/plugins/datasource/loki/datasource.ts index d1b18db48d1..e91e0500297 100644 --- a/public/app/plugins/datasource/loki/datasource.ts +++ b/public/app/plugins/datasource/loki/datasource.ts @@ -32,6 +32,7 @@ import { toUtc, QueryHint, getDefaultTimeRange, + QueryFixAction, } from '@grafana/data'; import { FetchError, config, DataSourceWithBackend } from '@grafana/runtime'; import { RowContextOptions } from '@grafana/ui/src/components/Logs/LogRowContextProvider'; @@ -389,15 +390,19 @@ export class LokiDatasource return escapedValues.join('|'); } - modifyQuery(query: LokiQuery, action: any): LokiQuery { + modifyQuery(query: LokiQuery, action: QueryFixAction): LokiQuery { let expression = query.expr ?? ''; switch (action.type) { case 'ADD_FILTER': { - expression = this.addLabelToQuery(expression, action.key, '=', action.value); + if (action.options?.key && action.options?.value) { + expression = this.addLabelToQuery(expression, action.options.key, '=', action.options.value); + } break; } case 'ADD_FILTER_OUT': { - expression = this.addLabelToQuery(expression, action.key, '!=', action.value); + if (action.options?.key && action.options?.value) { + expression = this.addLabelToQuery(expression, action.options.key, '!=', action.options.value); + } break; } case 'ADD_LOGFMT_PARSER': { diff --git a/public/app/plugins/datasource/prometheus/components/PromQueryField.tsx b/public/app/plugins/datasource/prometheus/components/PromQueryField.tsx index 07e44b67135..7139ac2c7be 100644 --- a/public/app/plugins/datasource/prometheus/components/PromQueryField.tsx +++ b/public/app/plugins/datasource/prometheus/components/PromQueryField.tsx @@ -224,8 +224,9 @@ class PromQueryField extends React.PureComponent { const { datasource, query, onChange, onRunQuery } = this.props; const { hint } = this.state; - - onChange(datasource.modifyQuery(query, hint!.fix!.action)); + if (hint?.fix?.action) { + onChange(datasource.modifyQuery(query, hint.fix.action)); + } onRunQuery(); }; diff --git a/public/app/plugins/datasource/prometheus/datasource.test.ts b/public/app/plugins/datasource/prometheus/datasource.test.ts index a80f1f4279d..de61312c84b 100644 --- a/public/app/plugins/datasource/prometheus/datasource.test.ts +++ b/public/app/plugins/datasource/prometheus/datasource.test.ts @@ -2154,7 +2154,7 @@ describe('modifyQuery', () => { describe('and query has no labels', () => { it('then the correct label should be added', () => { const query: PromQuery = { refId: 'A', expr: 'go_goroutines' }; - const action = { key: 'cluster', value: 'us-cluster', type: 'ADD_FILTER' }; + const action = { options: { key: 'cluster', value: 'us-cluster' }, type: 'ADD_FILTER' }; const instanceSettings = { jsonData: {} } as unknown as DataSourceInstanceSettings; const ds = new PrometheusDatasource(instanceSettings, templateSrvStub as any, timeSrvStub as any); @@ -2168,7 +2168,7 @@ describe('modifyQuery', () => { describe('and query has labels', () => { it('then the correct label should be added', () => { const query: PromQuery = { refId: 'A', expr: 'go_goroutines{cluster="us-cluster"}' }; - const action = { key: 'pod', value: 'pod-123', type: 'ADD_FILTER' }; + const action = { options: { key: 'pod', value: 'pod-123' }, type: 'ADD_FILTER' }; const instanceSettings = { jsonData: {} } as unknown as DataSourceInstanceSettings; const ds = new PrometheusDatasource(instanceSettings, templateSrvStub as any, timeSrvStub as any); @@ -2184,7 +2184,7 @@ describe('modifyQuery', () => { describe('and query has no labels', () => { it('then the correct label should be added', () => { const query: PromQuery = { refId: 'A', expr: 'go_goroutines' }; - const action = { key: 'cluster', value: 'us-cluster', type: 'ADD_FILTER_OUT' }; + const action = { options: { key: 'cluster', value: 'us-cluster' }, type: 'ADD_FILTER_OUT' }; const instanceSettings = { jsonData: {} } as unknown as DataSourceInstanceSettings; const ds = new PrometheusDatasource(instanceSettings, templateSrvStub as any, timeSrvStub as any); @@ -2198,7 +2198,7 @@ describe('modifyQuery', () => { describe('and query has labels', () => { it('then the correct label should be added', () => { const query: PromQuery = { refId: 'A', expr: 'go_goroutines{cluster="us-cluster"}' }; - const action = { key: 'pod', value: 'pod-123', type: 'ADD_FILTER_OUT' }; + const action = { options: { key: 'pod', value: 'pod-123' }, type: 'ADD_FILTER_OUT' }; const instanceSettings = { jsonData: {} } as unknown as DataSourceInstanceSettings; const ds = new PrometheusDatasource(instanceSettings, templateSrvStub as any, timeSrvStub as any); diff --git a/public/app/plugins/datasource/prometheus/datasource.tsx b/public/app/plugins/datasource/prometheus/datasource.tsx index 5c834905386..c6a3f089e90 100644 --- a/public/app/plugins/datasource/prometheus/datasource.tsx +++ b/public/app/plugins/datasource/prometheus/datasource.tsx @@ -22,6 +22,7 @@ import { TimeRange, DataFrame, dateTime, + QueryFixAction, } from '@grafana/data'; import { BackendSrvRequest, @@ -1030,15 +1031,22 @@ export class PrometheusDatasource } } - modifyQuery(query: PromQuery, action: any): PromQuery { + modifyQuery(query: PromQuery, action: QueryFixAction): PromQuery { let expression = query.expr ?? ''; switch (action.type) { case 'ADD_FILTER': { - expression = addLabelToQuery(expression, action.key, action.value); + const { key, value } = action.options ?? {}; + if (key && value) { + expression = addLabelToQuery(expression, key, value); + } + break; } case 'ADD_FILTER_OUT': { - expression = addLabelToQuery(expression, action.key, action.value, '!='); + const { key, value } = action.options ?? {}; + if (key && value) { + expression = addLabelToQuery(expression, key, value, '!='); + } break; } case 'ADD_HISTOGRAM_QUANTILE': { @@ -1054,8 +1062,8 @@ export class PrometheusDatasource break; } case 'EXPAND_RULES': { - if (action.mapping) { - expression = expandRecordingRules(expression, action.mapping); + if (action.options) { + expression = expandRecordingRules(expression, action.options); } break; } diff --git a/public/app/plugins/datasource/prometheus/query_hints.ts b/public/app/plugins/datasource/prometheus/query_hints.ts index 0b6be60ed99..08dd3495099 100644 --- a/public/app/plugins/datasource/prometheus/query_hints.ts +++ b/public/app/plugins/datasource/prometheus/query_hints.ts @@ -103,7 +103,7 @@ export function getQueryHints(query: string, series?: any[], datasource?: Promet action: { type: 'EXPAND_RULES', query, - mapping: mappingForQuery, + options: mappingForQuery, }, } as any as QueryFix, }); diff --git a/public/app/plugins/datasource/prometheus/querybuilder/shared/QueryBuilderHints.tsx b/public/app/plugins/datasource/prometheus/querybuilder/shared/QueryBuilderHints.tsx index 93b0d9ba721..3deb9c3e13a 100644 --- a/public/app/plugins/datasource/prometheus/querybuilder/shared/QueryBuilderHints.tsx +++ b/public/app/plugins/datasource/prometheus/querybuilder/shared/QueryBuilderHints.tsx @@ -45,10 +45,12 @@ export const QueryBuilderHints = ({
+ diff --git a/public/app/features/dimensions/editors/ValueMappingsEditor/ValueMappingsEditorModal.tsx b/public/app/features/dimensions/editors/ValueMappingsEditor/ValueMappingsEditorModal.tsx index aacb26e065a..0c1eef75097 100644 --- a/public/app/features/dimensions/editors/ValueMappingsEditor/ValueMappingsEditorModal.tsx +++ b/public/app/features/dimensions/editors/ValueMappingsEditor/ValueMappingsEditorModal.tsx @@ -1,4 +1,5 @@ import { css } from '@emotion/css'; +import { uniqueId } from 'lodash'; import React, { useEffect, useState } from 'react'; import { DragDropContext, Droppable, DropResult } from 'react-beautiful-dnd'; @@ -54,18 +55,11 @@ export function ValueMappingsEditorModal({ value, onChange, onClose, showIconPic ]; const onAddValueMapping = (value: SelectableValue) => { - updateRows([ - ...rows, - { - type: value.value!, - isNew: true, - result: {}, - }, - ]); + updateRows([...rows, createRow({ type: value.value!, result: {}, isNew: true })]); }; const onDuplicateMapping = (index: number) => { - const sourceRow = rows[index]; + const sourceRow = duplicateRow(rows[index]); const copy = [...rows]; copy.splice(index, 0, { ...sourceRow }); @@ -111,7 +105,7 @@ export function ValueMappingsEditorModal({ value, onChange, onClose, showIconPic {rows.map((row, index) => ( ({ }), }); +function getRowUniqueId(): string { + return uniqueId('mapping-'); +} + +function createRow(row: Partial): ValueMappingEditRowModel { + return { + type: MappingType.ValueToText, + result: {}, + id: getRowUniqueId(), + ...row, + }; +} + +function duplicateRow(row: Partial): ValueMappingEditRowModel { + return { + ...createRow(row), + // provide a new unique id to the duplicated row, to preserve focus when dragging 2 duplicated rows + id: getRowUniqueId(), + }; +} + export function editModelToSaveModel(rows: ValueMappingEditRowModel[]) { const mappings: ValueMapping[] = []; const valueMaps: ValueMapping = { @@ -250,34 +265,42 @@ export function buildEditRowModels(value: ValueMapping[]) { switch (mapping.type) { case MappingType.ValueToText: for (const key of Object.keys(mapping.options)) { - editRows.push({ - type: mapping.type, - result: mapping.options[key], - key, - }); + editRows.push( + createRow({ + type: mapping.type, + result: mapping.options[key], + key, + }) + ); } break; case MappingType.RangeToText: - editRows.push({ - type: mapping.type, - result: mapping.options.result, - from: mapping.options.from ?? 0, - to: mapping.options.to ?? 0, - }); + editRows.push( + createRow({ + type: mapping.type, + result: mapping.options.result, + from: mapping.options.from ?? 0, + to: mapping.options.to ?? 0, + }) + ); break; case MappingType.RegexToText: - editRows.push({ - type: mapping.type, - result: mapping.options.result, - pattern: mapping.options.pattern, - }); + editRows.push( + createRow({ + type: mapping.type, + result: mapping.options.result, + pattern: mapping.options.pattern, + }) + ); break; case MappingType.SpecialValue: - editRows.push({ - type: mapping.type, - result: mapping.options.result, - specialMatch: mapping.options.match ?? SpecialValueMatch.Null, - }); + editRows.push( + createRow({ + type: mapping.type, + result: mapping.options.result, + specialMatch: mapping.options.match ?? SpecialValueMatch.Null, + }) + ); } } } From 6c89bf53f273ccbd0117cd9644d77c81636567ae Mon Sep 17 00:00:00 2001 From: Joey Tawadrous <90795735+joey-grafana@users.noreply.github.com> Date: Mon, 18 Jul 2022 15:36:16 +0100 Subject: [PATCH 036/116] Tempo: Tempo/Prometheus links select ds in new tab (cmd + click) (#52319) * Set tempo/prom ds name when clicking trace links * Updated tests * Removed vars --- .../app/plugins/datasource/tempo/datasource.test.ts | 12 ++++++++++-- public/app/plugins/datasource/tempo/datasource.ts | 13 +++++++------ 2 files changed, 17 insertions(+), 8 deletions(-) diff --git a/public/app/plugins/datasource/tempo/datasource.test.ts b/public/app/plugins/datasource/tempo/datasource.test.ts index 704b408bea4..081f1c88a33 100644 --- a/public/app/plugins/datasource/tempo/datasource.test.ts +++ b/public/app/plugins/datasource/tempo/datasource.test.ts @@ -578,6 +578,14 @@ const backendSrvWithPrometheus = { } throw new Error('unexpected uid'); }, + getDataSourceSettingsByUid(uid: string) { + if (uid === 'prom') { + return { name: 'Prometheus' }; + } else if (uid === 'gdev-tempo') { + return { name: 'Tempo' }; + } + return ''; + }, }; function setupBackendSrv(frame: DataFrame) { @@ -598,7 +606,7 @@ function setupBackendSrv(frame: DataFrame) { const defaultSettings: DataSourceInstanceSettings = { id: 0, - uid: '0', + uid: 'gdev-tempo', type: 'tracing', name: 'tempo', access: 'proxy', @@ -777,7 +785,7 @@ const serviceGraphLinks = [ queryType: 'nativeSearch', serviceName: '${__data.fields[0]}', } as TempoQuery, - datasourceUid: 'tempo', + datasourceUid: 'gdev-tempo', datasourceName: 'Tempo', }, }, diff --git a/public/app/plugins/datasource/tempo/datasource.ts b/public/app/plugins/datasource/tempo/datasource.ts index 3fa28b7cd99..dc2640c2951 100644 --- a/public/app/plugins/datasource/tempo/datasource.ts +++ b/public/app/plugins/datasource/tempo/datasource.ts @@ -237,18 +237,19 @@ export class TempoDatasource extends DataSourceWithBackend rateQuery(options, result, dsId).pipe( - concatMap((result) => errorAndDurationQuery(options, result, dsId, this.name)) + concatMap((result) => errorAndDurationQuery(options, result, dsId, tempoDsUid)) ) ) ) ); } else { - subQueries.push(serviceMapQuery(options, dsId, this.name)); + subQueries.push(serviceMapQuery(options, dsId, tempoDsUid)); } } @@ -585,7 +586,7 @@ function makePromLink(title: string, expr: string, datasourceUid: string, instan instant: instant, } as PromQuery, datasourceUid, - datasourceName: 'Prometheus', + datasourceName: getDatasourceSrv().getDataSourceSettingsByUid(datasourceUid)?.name ?? '', }, }; } @@ -604,8 +605,8 @@ export function makeTempoLink(title: string, serviceName: string, spanName: stri title, internal: { query, - datasourceUid: datasourceUid, - datasourceName: 'Tempo', + datasourceUid, + datasourceName: getDatasourceSrv().getDataSourceSettingsByUid(datasourceUid)?.name ?? '', }, }; } From 278f5b131b3083a7de657681661fef56f02441cf Mon Sep 17 00:00:00 2001 From: Michael Mandrus <41969079+mmandrus@users.noreply.github.com> Date: Mon, 18 Jul 2022 11:02:37 -0400 Subject: [PATCH 037/116] add secretsmanager type to plugin.json (#52408) --- docs/sources/developers/plugins/plugin.schema.json | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/docs/sources/developers/plugins/plugin.schema.json b/docs/sources/developers/plugins/plugin.schema.json index 41fe4dabaf3..358c159a590 100644 --- a/docs/sources/developers/plugins/plugin.schema.json +++ b/docs/sources/developers/plugins/plugin.schema.json @@ -14,12 +14,12 @@ "id": { "type": "string", "description": "Unique name of the plugin. If the plugin is published on grafana.com, then the plugin id has to follow the naming conventions.", - "pattern": "^[0-9a-z]+\\-([0-9a-z]+\\-)?(app|panel|datasource)$" + "pattern": "^[0-9a-z]+\\-([0-9a-z]+\\-)?(app|panel|datasource|secretsmanager)$" }, "type": { "type": "string", "description": "Plugin type.", - "enum": ["app", "datasource", "panel"] + "enum": ["app", "datasource", "panel", "secretsmanager"] }, "name": { "type": "string", @@ -68,7 +68,7 @@ }, "type": { "type": "string", - "enum": ["dashboard", "page", "panel", "datasource"] + "enum": ["dashboard", "page", "panel", "datasource", "secretsmanager"] }, "name": { "type": "string" @@ -159,11 +159,11 @@ "properties": { "id": { "type": "string", - "pattern": "^[0-9a-z]+\\-([0-9a-z]+\\-)?(app|panel|datasource)$" + "pattern": "^[0-9a-z]+\\-([0-9a-z]+\\-)?(app|panel|datasource|secretsmanager)$" }, "type": { "type": "string", - "enum": ["app", "datasource", "panel"] + "enum": ["app", "datasource", "panel", "secretsmanager"] }, "name": { "type": "string" From b3b64e24c2d3a62f5137310c368933f20f625a22 Mon Sep 17 00:00:00 2001 From: Ryan McKinley Date: Mon, 18 Jul 2022 08:03:08 -0700 Subject: [PATCH 038/116] Export: include section count in status updates (#52368) --- pkg/services/export/commit_helper.go | 3 +++ pkg/services/export/dummy_job.go | 12 +++++++----- pkg/services/export/git_export_job.go | 20 +++++++++++++++----- pkg/services/export/service.go | 10 ++++++++-- pkg/services/export/types.go | 18 +++++++++--------- 5 files changed, 42 insertions(+), 21 deletions(-) diff --git a/pkg/services/export/commit_helper.go b/pkg/services/export/commit_helper.go index 5848db9dd08..a244cc47ae7 100644 --- a/pkg/services/export/commit_helper.go +++ b/pkg/services/export/commit_helper.go @@ -27,6 +27,8 @@ type commitHelper struct { stopRequested bool broadcast func(path string) exporter string // key for the current exporter + + counter int } type commitBody struct { @@ -125,6 +127,7 @@ func (ch *commitHelper) add(opts commitOptions) error { fmt.Printf("STATUS: %+v\n", status) return fmt.Errorf("unable to add file: %s (%d)", sub, len(b.body)) } + ch.counter++ } copts := &git.CommitOptions{ diff --git a/pkg/services/export/dummy_job.go b/pkg/services/export/dummy_job.go index 58a38889398..eeb3a98865d 100644 --- a/pkg/services/export/dummy_job.go +++ b/pkg/services/export/dummy_job.go @@ -20,6 +20,7 @@ type dummyExportJob struct { cfg ExportConfig broadcaster statusBroadcaster stopRequested bool + total int } func startDummyExportJob(cfg ExportConfig, broadcaster statusBroadcaster) (Job, error) { @@ -31,9 +32,10 @@ func startDummyExportJob(cfg ExportConfig, broadcaster statusBroadcaster) (Job, Running: true, Target: "git export", Started: time.Now().UnixMilli(), - Count: int64(math.Round(10 + rand.Float64()*20)), - Current: 0, + Count: make(map[string]int, 10), + Index: 0, }, + total: int(math.Round(10 + rand.Float64()*20)), } broadcaster(job.status) @@ -74,12 +76,12 @@ func (e *dummyExportJob) start() { for t := range ticker.C { e.statusMu.Lock() e.status.Changed = t.UnixMilli() - e.status.Current++ - e.status.Last = fmt.Sprintf("ITEM: %d", e.status.Current) + e.status.Index++ + e.status.Last = fmt.Sprintf("ITEM: %d", e.status.Index) e.statusMu.Unlock() // Wait till we are done - shouldStop := e.stopRequested || e.status.Current >= e.status.Count + shouldStop := e.stopRequested || e.status.Index >= e.total e.broadcaster(e.status) if shouldStop { diff --git a/pkg/services/export/git_export_job.go b/pkg/services/export/git_export_job.go index 4b13b5e44c8..b676a38e12f 100644 --- a/pkg/services/export/git_export_job.go +++ b/pkg/services/export/git_export_job.go @@ -44,7 +44,7 @@ func startGitExportJob(cfg ExportConfig, sql *sqlstore.SQLStore, dashboardsnapsh Running: true, Target: "git export", Started: time.Now().UnixMilli(), - Current: 0, + Count: make(map[string]int, len(exporters)*2), }, } @@ -75,7 +75,6 @@ func (e *gitExportJob) requestStop() { func (e *gitExportJob) start() { defer func() { e.logger.Info("Finished git export job") - e.statusMu.Lock() defer e.statusMu.Unlock() s := e.status @@ -128,6 +127,7 @@ func (e *gitExportJob) doExportWithHistory() error { workDir: e.rootDir, orgDir: e.rootDir, broadcast: func(p string) { + e.status.Index++ e.status.Last = p[len(e.rootDir):] e.status.Changed = time.Now().UnixMilli() e.broadcaster(e.status) @@ -144,6 +144,7 @@ func (e *gitExportJob) doExportWithHistory() error { for _, org := range cmd.Result { if len(cmd.Result) > 1 { e.helper.orgDir = path.Join(e.rootDir, fmt.Sprintf("org_%d", org.Id)) + e.status.Count["orgs"] += 1 } err = e.helper.initOrg(e.sql, org.Id) if err != nil { @@ -180,18 +181,27 @@ func (e *gitExportJob) process(exporters []Exporter) error { continue } + e.status.Target = exp.Key + e.helper.exporter = exp.Key + + before := e.helper.counter if exp.process != nil { - e.status.Target = exp.Key - e.helper.exporter = exp.Key err := exp.process(e.helper, e) + if err != nil { return err } } if exp.Exporters != nil { - return e.process(exp.Exporters) + err := e.process(exp.Exporters) + if err != nil { + return err + } } + + // Aggregate the counts for each org in the same report + e.status.Count[exp.Key] += (e.helper.counter - before) } return nil } diff --git a/pkg/services/export/service.go b/pkg/services/export/service.go index e8f5e185c7e..41c7c0ca678 100644 --- a/pkg/services/export/service.go +++ b/pkg/services/export/service.go @@ -67,36 +67,42 @@ var exporters = []Exporter{ process: exportDataSources, }, { - Key: "services", - Name: "Services", + Key: "system", + Name: "System", Description: "Save service settings", Exporters: []Exporter{ { + Key: "system_preferences", Name: "Preferences", Description: "User and team preferences", process: exportSystemPreferences, }, { + Key: "system_stars", Name: "Stars", Description: "User stars", process: exportSystemStars, }, { + Key: "system_playlists", Name: "Playlists", Description: "Playlists", process: exportSystemPlaylists, }, { + Key: "system_kv_store", Name: "Key Value store", Description: "Internal KV store", process: exportKVStore, }, { + Key: "system_short_url", Name: "Short URLs", Description: "saved links", process: exportSystemShortURL, }, { + Key: "system_live", Name: "Grafana live", Description: "archived messages", process: exportLive, diff --git a/pkg/services/export/types.go b/pkg/services/export/types.go index 5deade2fd69..a64cabecfef 100644 --- a/pkg/services/export/types.go +++ b/pkg/services/export/types.go @@ -2,15 +2,15 @@ package export // Export status. Only one running at a time type ExportStatus struct { - Running bool `json:"running"` - Target string `json:"target"` // description of where it is going (no secrets) - Started int64 `json:"started,omitempty"` - Finished int64 `json:"finished,omitempty"` - Changed int64 `json:"update,omitempty"` - Count int64 `json:"count,omitempty"` - Current int64 `json:"current,omitempty"` - Last string `json:"last,omitempty"` - Status string `json:"status"` // ERROR, SUCCESS, ETC + Running bool `json:"running"` + Target string `json:"target"` // description of where it is going (no secrets) + Started int64 `json:"started,omitempty"` + Finished int64 `json:"finished,omitempty"` + Changed int64 `json:"update,omitempty"` + Last string `json:"last,omitempty"` + Status string `json:"status"` // ERROR, SUCCESS, ETC + Index int `json:"index,omitempty"` + Count map[string]int `json:"count,omitempty"` } // Basic export config (for now) From 1bedf33e3da5ebd61f1416e3e842fc9c6afb5ff5 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Torkel=20=C3=96degaard?= Date: Mon, 18 Jul 2022 17:23:49 +0200 Subject: [PATCH 039/116] ServiceAccount: New service account modal to follow design system (#52373) * ServiceAccount: New service account modal to follow design system * Fixing spacing and removing double Field label * Fixing imports * Update comment --- .../components/CreateTokenModal.tsx | 137 +++++++----------- 1 file changed, 53 insertions(+), 84 deletions(-) diff --git a/public/app/features/serviceaccounts/components/CreateTokenModal.tsx b/public/app/features/serviceaccounts/components/CreateTokenModal.tsx index ba48c05a717..5a90a71839b 100644 --- a/public/app/features/serviceaccounts/components/CreateTokenModal.tsx +++ b/public/app/features/serviceaccounts/components/CreateTokenModal.tsx @@ -8,11 +8,7 @@ import { ClipboardButton, DatePickerWithInput, Field, - FieldSet, - HorizontalGroup, - Icon, Input, - Label, Modal, RadioButtonGroup, useStyles2, @@ -76,12 +72,7 @@ export const CreateTokenModal = ({ isOpen, token, serviceAccountLogin, onCreateT onClose(); }; - const modalTitle = ( -
- - {!token ? 'Add service account token' : 'Service account token created'} -
- ); + const modalTitle = !token ? 'Add service account token' : 'Service account token created'; return ( {!token ? (
-
- - { - setNewTokenName(e.currentTarget.value); - }} - /> - + + { + setNewTokenName(e.currentTarget.value); + }} + /> + + - {isWithExpirationDate && ( - - - - )} -
- + + {isWithExpirationDate && ( + + + + )} + + +
) : ( <> -
- - -
- - token} - > - Copy to clipboard - -
-
-
- + +
+ + token} + > + Copy clipboard + +
+
+ token} onClipboardCopy={onCloseInternal}> Copy to clipboard and close -
+ )}
@@ -185,29 +172,11 @@ const getStyles = (theme: GrafanaTheme2) => { modalContent: css` overflow: visible; `, - modalRow: css` - margin-bottom: ${theme.spacing(4)}; - `, modalTokenRow: css` display: flex; `, modalCopyToClipboardButton: css` margin-left: ${theme.spacing(0.5)}; `, - modalHeaderTitle: css` - font-size: ${theme.typography.size.lg}; - margin: ${theme.spacing(0, 4, 0, 1)}; - display: flex; - align-items: center; - position: relative; - top: 2px; - `, - modalHeaderIcon: css` - margin-right: ${theme.spacing(2)}; - font-size: inherit; - &:before { - vertical-align: baseline; - } - `, }; }; From 38ec4c0a09803964d67e5aa52079041204f08e71 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Torkel=20=C3=96degaard?= Date: Mon, 18 Jul 2022 17:24:21 +0200 Subject: [PATCH 040/116] UnsavedChanges: Should not be triggered when only going into panel edit without changing anything (#52363) --- .../PanelEditor/state/actions.test.ts | 29 +++++++++++++++++++ .../components/PanelEditor/state/actions.ts | 2 +- 2 files changed, 30 insertions(+), 1 deletion(-) diff --git a/public/app/features/dashboard/components/PanelEditor/state/actions.test.ts b/public/app/features/dashboard/components/PanelEditor/state/actions.test.ts index b52df7d3ae9..6ea4ca3ff9f 100644 --- a/public/app/features/dashboard/components/PanelEditor/state/actions.test.ts +++ b/public/app/features/dashboard/components/PanelEditor/state/actions.test.ts @@ -134,6 +134,35 @@ describe('panelEditor actions', () => { expect(dispatchedActions.length).toBe(2); expect(sourcePanel.getOptions()).toEqual({}); }); + + it('should not increment configRev when no changes made and leaving panel edit', async () => { + const sourcePanel = new PanelModel({ id: 12, type: 'graph' }); + sourcePanel.plugin = getPanelPlugin({}); + + const dashboard = new DashboardModel({ + panels: [{ id: 12, type: 'graph' }], + }); + + const panel = dashboard.initEditPanel(sourcePanel); + + const state: PanelEditorState = { + ...initialState(), + getPanel: () => panel, + getSourcePanel: () => sourcePanel, + }; + + await thunkTester({ + panelEditor: state, + panels: {}, + dashboard: { + getModel: () => dashboard, + }, + }) + .givenThunk(exitPanelEditor) + .whenThunkIsDispatched(); + + expect(sourcePanel.configRev).toEqual(0); + }); }); describe('skipPanelUpdate', () => { diff --git a/public/app/features/dashboard/components/PanelEditor/state/actions.ts b/public/app/features/dashboard/components/PanelEditor/state/actions.ts index a2c037843eb..fca553ea7da 100644 --- a/public/app/features/dashboard/components/PanelEditor/state/actions.ts +++ b/public/app/features/dashboard/components/PanelEditor/state/actions.ts @@ -116,7 +116,7 @@ export function exitPanelEditor(): ThunkResult { dashboard.exitPanelEditor(); } - if (!shouldDiscardChanges) { + if (panel.hasChanged && !shouldDiscardChanges) { const modifiedSaveModel = panel.getSaveModel(); const sourcePanel = getSourcePanel(); const panelTypeChanged = sourcePanel.type !== panel.type; From e7feff6d99a364598b4b913da91152670063a6f7 Mon Sep 17 00:00:00 2001 From: George Robinson Date: Mon, 18 Jul 2022 16:27:06 +0100 Subject: [PATCH 041/116] Alerting: Move debug log line to where alert rules are updated (#52318) --- pkg/services/ngalert/schedule/fetcher.go | 1 + pkg/services/ngalert/schedule/schedule.go | 2 -- 2 files changed, 1 insertion(+), 2 deletions(-) diff --git a/pkg/services/ngalert/schedule/fetcher.go b/pkg/services/ngalert/schedule/fetcher.go index dd5a4de78b8..0c9b2b9b608 100644 --- a/pkg/services/ngalert/schedule/fetcher.go +++ b/pkg/services/ngalert/schedule/fetcher.go @@ -47,6 +47,7 @@ func (sch *schedule) updateSchedulableAlertRules(ctx context.Context) error { if err := sch.ruleStore.GetAlertRulesForScheduling(ctx, &q); err != nil { return fmt.Errorf("failed to get alert rules: %w", err) } + sch.log.Debug("alert rules fetched", "count", len(q.Result)) sch.schedulableAlertRules.set(q.Result) return nil } diff --git a/pkg/services/ngalert/schedule/schedule.go b/pkg/services/ngalert/schedule/schedule.go index ebf71545b34..07467ac45a5 100644 --- a/pkg/services/ngalert/schedule/schedule.go +++ b/pkg/services/ngalert/schedule/schedule.go @@ -228,8 +228,6 @@ func (sch *schedule) schedulePeriodic(ctx context.Context) error { } alertRules := sch.schedulableAlertRules.all() - sch.log.Debug("alert rules fetched", "count", len(alertRules)) - // registeredDefinitions is a map used for finding deleted alert rules // initially it is assigned to all known alert rules from the previous cycle // each alert rule found also in this cycle is removed From 841e5ae8ad20d9d698e9ed4758adf337e28e4a07 Mon Sep 17 00:00:00 2001 From: Artur Wierzbicki Date: Mon, 18 Jul 2022 19:37:22 +0400 Subject: [PATCH 042/116] Storage: `system/branding` storage (#51987) * Storage: add `system` branding storage root, expose `system/branding` subfolder * Storage: merge --- pkg/services/store/service.go | 16 ++++++++++++++++ 1 file changed, 16 insertions(+) diff --git a/pkg/services/store/service.go b/pkg/services/store/service.go index 50ab7da657d..f251484956b 100644 --- a/pkg/services/store/service.go +++ b/pkg/services/store/service.go @@ -28,6 +28,9 @@ var ErrAccessDenied = errors.New("access denied") const RootPublicStatic = "public-static" const RootResources = "resources" const RootDevenv = "devenv" +const RootSystem = "system" + +const SystemBrandingStorage = "system/branding" const MAX_UPLOAD_SIZE = 1 * 1024 * 1024 // 3MB @@ -114,6 +117,13 @@ func ProvideService(sql *sqlstore.SQLStore, features featuremgmt.FeatureToggles, setDescription("Upload custom resource files")) } + storages = append(storages, + newSQLStorage(RootSystem, + "System", + &StorageSQLConfig{orgId: orgId}, + sql, + ).setBuiltin(true).setDescription("Grafana system storage")) + return storages } @@ -141,6 +151,12 @@ func ProvideService(sql *sqlstore.SQLStore, features featuremgmt.FeatureToggles, ActionFilesWrite: allowAllPathFilter, ActionFilesDelete: allowAllPathFilter, } + case RootSystem: + return map[string]filestorage.PathFilter{ + ActionFilesRead: allowAllPathFilter, + ActionFilesWrite: allowAllPathFilter, + ActionFilesDelete: allowAllPathFilter, + } default: return nil } From 824f12a99319b443ac2d837d5c25fffb55816a1a Mon Sep 17 00:00:00 2001 From: Ryan McKinley Date: Mon, 18 Jul 2022 10:44:42 -0700 Subject: [PATCH 043/116] Storage: Remove storageLocalUpload flag (#52413) --- .../src/types/featureToggles.gen.ts | 1 - pkg/api/api.go | 19 +++++++++---------- pkg/services/featuremgmt/registry.go | 6 ------ pkg/services/featuremgmt/toggles_gen.go | 4 ---- pkg/services/store/service.go | 17 +++++++++-------- .../editors/ResourcePickerPopover.tsx | 10 ---------- 6 files changed, 18 insertions(+), 39 deletions(-) diff --git a/packages/grafana-data/src/types/featureToggles.gen.ts b/packages/grafana-data/src/types/featureToggles.gen.ts index 13624b12500..01feba7cc2b 100644 --- a/packages/grafana-data/src/types/featureToggles.gen.ts +++ b/packages/grafana-data/src/types/featureToggles.gen.ts @@ -43,7 +43,6 @@ export interface FeatureToggles { storage?: boolean; dashboardsFromStorage?: boolean; export?: boolean; - storageLocalUpload?: boolean; azureMonitorResourcePickerForMetrics?: boolean; explore2Dashboard?: boolean; tracing?: boolean; diff --git a/pkg/api/api.go b/pkg/api/api.go index 619afbcfb73..9ab32650ac4 100644 --- a/pkg/api/api.go +++ b/pkg/api/api.go @@ -227,17 +227,16 @@ func (hs *HTTPServer) registerRoutes() { }) if hs.Features.IsEnabled(featuremgmt.FlagStorage) { - apiRoute.Group("/storage", func(orgRoute routing.RouteRegister) { - orgRoute.Get("/list/", routing.Wrap(hs.StorageService.List)) - orgRoute.Get("/list/*", routing.Wrap(hs.StorageService.List)) - orgRoute.Get("/read/*", routing.Wrap(hs.StorageService.Read)) + apiRoute.Group("/storage", func(storageRoute routing.RouteRegister) { + storageRoute.Get("/list/", routing.Wrap(hs.StorageService.List)) + storageRoute.Get("/list/*", routing.Wrap(hs.StorageService.List)) + storageRoute.Get("/read/*", routing.Wrap(hs.StorageService.Read)) - if hs.Features.IsEnabled(featuremgmt.FlagStorageLocalUpload) { - orgRoute.Post("/delete/*", reqGrafanaAdmin, routing.Wrap(hs.StorageService.Delete)) - orgRoute.Post("/upload", reqGrafanaAdmin, routing.Wrap(hs.StorageService.Upload)) - orgRoute.Post("/createFolder", reqGrafanaAdmin, routing.Wrap(hs.StorageService.CreateFolder)) - orgRoute.Post("/deleteFolder", reqGrafanaAdmin, routing.Wrap(hs.StorageService.DeleteFolder)) - } + // Write paths + storageRoute.Post("/delete/*", reqGrafanaAdmin, routing.Wrap(hs.StorageService.Delete)) + storageRoute.Post("/upload", reqGrafanaAdmin, routing.Wrap(hs.StorageService.Upload)) + storageRoute.Post("/createFolder", reqGrafanaAdmin, routing.Wrap(hs.StorageService.CreateFolder)) + storageRoute.Post("/deleteFolder", reqGrafanaAdmin, routing.Wrap(hs.StorageService.DeleteFolder)) }) } diff --git a/pkg/services/featuremgmt/registry.go b/pkg/services/featuremgmt/registry.go index 2ce3890e4d0..f596d919ae4 100644 --- a/pkg/services/featuremgmt/registry.go +++ b/pkg/services/featuremgmt/registry.go @@ -152,12 +152,6 @@ var ( State: FeatureStateAlpha, RequiresDevMode: true, }, - { - Name: "storageLocalUpload", - Description: "allow uploads to local storage", - State: FeatureStateAlpha, - RequiresDevMode: true, - }, { Name: "azureMonitorResourcePickerForMetrics", Description: "New UI for Azure Monitor Metrics Query", diff --git a/pkg/services/featuremgmt/toggles_gen.go b/pkg/services/featuremgmt/toggles_gen.go index aa9c44edccf..a0684dcc9c7 100644 --- a/pkg/services/featuremgmt/toggles_gen.go +++ b/pkg/services/featuremgmt/toggles_gen.go @@ -115,10 +115,6 @@ const ( // Export grafana instance (to git, etc) FlagExport = "export" - // FlagStorageLocalUpload - // allow uploads to local storage - FlagStorageLocalUpload = "storageLocalUpload" - // FlagAzureMonitorResourcePickerForMetrics // New UI for Azure Monitor Metrics Query FlagAzureMonitorResourcePickerForMetrics = "azureMonitorResourcePickerForMetrics" diff --git a/pkg/services/store/service.go b/pkg/services/store/service.go index f251484956b..480fee1b6eb 100644 --- a/pkg/services/store/service.go +++ b/pkg/services/store/service.go @@ -108,15 +108,16 @@ func ProvideService(sql *sqlstore.SQLStore, features featuremgmt.FeatureToggles, initializeOrgStorages := func(orgId int64) []storageRuntime { storages := make([]storageRuntime, 0) - if features.IsEnabled(featuremgmt.FlagStorageLocalUpload) { - storages = append(storages, - newSQLStorage(RootResources, - "Resources", - &StorageSQLConfig{orgId: orgId}, sql). - setBuiltin(true). - setDescription("Upload custom resource files")) - } + // Custom upload files + storages = append(storages, + newSQLStorage(RootResources, + "Resources", + &StorageSQLConfig{orgId: orgId}, sql). + setBuiltin(true). + setDescription("Upload custom resource files")) + + // System settings storages = append(storages, newSQLStorage(RootSystem, "System", diff --git a/public/app/features/dimensions/editors/ResourcePickerPopover.tsx b/public/app/features/dimensions/editors/ResourcePickerPopover.tsx index 3627a3de511..aa1d2f3b93b 100644 --- a/public/app/features/dimensions/editors/ResourcePickerPopover.tsx +++ b/public/app/features/dimensions/editors/ResourcePickerPopover.tsx @@ -94,16 +94,6 @@ export const ResourcePickerPopover = (props: Props) => { - {config.featureToggles['storageLocalUpload'] ? ( - - ) : ( - '' - )}
{renderPicker()} From 4aae9d156719bd5bef76ae55b1834fd2178b2059 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Torkel=20=C3=96degaard?= Date: Mon, 18 Jul 2022 20:26:10 +0200 Subject: [PATCH 044/116] Scene: Support for collapsable rows via a nested scene object (#52367) * initial row test * Updated * Row is more of a nested collapsable scene * Updated * Added test for nested scene * Added test for nested scene --- .betterer.results | 17 ++- .../scenes/components/NestedScene.test.tsx | 51 ++++++++ .../scenes/components/NestedScene.tsx | 119 ++++++++++++++++++ .../features/scenes/core/SceneObjectBase.tsx | 7 +- public/app/features/scenes/core/types.ts | 8 +- public/app/features/scenes/scenes/index.tsx | 3 +- public/app/features/scenes/scenes/nested.tsx | 6 +- public/app/features/scenes/scenes/queries.ts | 16 +++ .../features/scenes/scenes/sceneWithRows.tsx | 62 +++++++++ 9 files changed, 273 insertions(+), 16 deletions(-) create mode 100644 public/app/features/scenes/components/NestedScene.test.tsx create mode 100644 public/app/features/scenes/components/NestedScene.tsx create mode 100644 public/app/features/scenes/scenes/queries.ts create mode 100644 public/app/features/scenes/scenes/sceneWithRows.tsx diff --git a/.betterer.results b/.betterer.results index ceabba5c265..914a3afa637 100644 --- a/.betterer.results +++ b/.betterer.results @@ -5716,17 +5716,16 @@ exports[`better eslint`] = { [0, 0, 0, "Unexpected any. Specify a different type.", "1"] ], "public/app/features/scenes/core/SceneObjectBase.tsx:5381": [ - [0, 0, 0, "Unexpected any. Specify a different type.", "0"], + [0, 0, 0, "Do not use any type assertions.", "0"], [0, 0, 0, "Do not use any type assertions.", "1"], - [0, 0, 0, "Do not use any type assertions.", "2"], - [0, 0, 0, "Unexpected any. Specify a different type.", "3"], + [0, 0, 0, "Unexpected any. Specify a different type.", "2"], + [0, 0, 0, "Do not use any type assertions.", "3"], [0, 0, 0, "Do not use any type assertions.", "4"], - [0, 0, 0, "Do not use any type assertions.", "5"], - [0, 0, 0, "Unexpected any. Specify a different type.", "6"], - [0, 0, 0, "Do not use any type assertions.", "7"], - [0, 0, 0, "Unexpected any. Specify a different type.", "8"], - [0, 0, 0, "Do not use any type assertions.", "9"], - [0, 0, 0, "Unexpected any. Specify a different type.", "10"] + [0, 0, 0, "Unexpected any. Specify a different type.", "5"], + [0, 0, 0, "Do not use any type assertions.", "6"], + [0, 0, 0, "Unexpected any. Specify a different type.", "7"], + [0, 0, 0, "Do not use any type assertions.", "8"], + [0, 0, 0, "Unexpected any. Specify a different type.", "9"] ], "public/app/features/scenes/core/SceneTimeRange.tsx:5381": [ [0, 0, 0, "Do not use any type assertions.", "0"], diff --git a/public/app/features/scenes/components/NestedScene.test.tsx b/public/app/features/scenes/components/NestedScene.test.tsx new file mode 100644 index 00000000000..7a400e8447d --- /dev/null +++ b/public/app/features/scenes/components/NestedScene.test.tsx @@ -0,0 +1,51 @@ +import { screen, render } from '@testing-library/react'; +import React from 'react'; + +import { NestedScene } from './NestedScene'; +import { Scene } from './Scene'; +import { SceneCanvasText } from './SceneCanvasText'; +import { SceneFlexLayout } from './SceneFlexLayout'; + +function setup() { + const scene = new Scene({ + title: 'Hello', + layout: new SceneFlexLayout({ + children: [ + new NestedScene({ + title: 'Nested title', + canRemove: true, + canCollapse: true, + layout: new SceneFlexLayout({ + children: [new SceneCanvasText({ text: 'SceneCanvasText' })], + }), + }), + ], + }), + }); + + render(); +} + +describe('NestedScene', () => { + it('Renders heading and layout', () => { + setup(); + expect(screen.getByRole('heading', { name: 'Nested title' })).toBeInTheDocument(); + expect(screen.getByText('SceneCanvasText')).toBeInTheDocument(); + }); + + it('Can remove', async () => { + setup(); + screen.getByRole('button', { name: 'Remove scene' }).click(); + expect(screen.queryByRole('heading', { name: 'Nested title' })).not.toBeInTheDocument(); + }); + + it('Can collapse and expand', async () => { + setup(); + + screen.getByRole('button', { name: 'Collapse scene' }).click(); + expect(screen.queryByText('SceneCanvasText')).not.toBeInTheDocument(); + + screen.getByRole('button', { name: 'Expand scene' }).click(); + expect(screen.getByText('SceneCanvasText')).toBeInTheDocument(); + }); +}); diff --git a/public/app/features/scenes/components/NestedScene.tsx b/public/app/features/scenes/components/NestedScene.tsx new file mode 100644 index 00000000000..b36d40d7b40 --- /dev/null +++ b/public/app/features/scenes/components/NestedScene.tsx @@ -0,0 +1,119 @@ +import { css } from '@emotion/css'; +import React from 'react'; + +import { GrafanaTheme2 } from '@grafana/data'; +import { Stack } from '@grafana/experimental'; +import { Button, ToolbarButton, useStyles2 } from '@grafana/ui'; + +import { SceneObjectBase } from '../core/SceneObjectBase'; +import { + SceneObject, + SceneObjectState, + SceneLayoutState, + SceneComponentProps, + isSceneLayoutObject, +} from '../core/types'; + +interface NestedSceneState extends SceneObjectState { + title: string; + isCollapsed?: boolean; + canCollapse?: boolean; + canRemove?: boolean; + layout: SceneObject; + actions?: SceneObject[]; +} + +export class NestedScene extends SceneObjectBase { + static Component = NestedSceneRenderer; + + onToggle = () => { + this.setState({ + isCollapsed: !this.state.isCollapsed, + size: { + ...this.state.size, + ySizing: this.state.isCollapsed ? 'fill' : 'content', + }, + }); + }; + + /** Removes itself from it's parent's children array */ + onRemove = () => { + const parent = this.parent!; + if (isSceneLayoutObject(parent)) { + parent.setState({ + children: parent.state.children.filter((x) => x !== this), + }); + } + }; +} + +export function NestedSceneRenderer({ model, isEditing }: SceneComponentProps) { + const { title, isCollapsed, canCollapse, canRemove, layout, actions } = model.useState(); + const styles = useStyles2(getStyles); + + const toolbarActions = (actions ?? []).map((action) => ); + + if (canRemove) { + toolbarActions.push( + + ); + } + + return ( +
+
+ +
+ {title} +
+ {canCollapse && ( +
+
+ )} +
+
{toolbarActions}
+
+ {!isCollapsed && } +
+ ); +} + +const getStyles = (theme: GrafanaTheme2) => ({ + row: css({ + display: 'flex', + flexDirection: 'column', + flexGrow: 1, + gap: theme.spacing(1), + cursor: 'pointer', + }), + toggle: css({}), + title: css({ + fontSize: theme.typography.h5.fontSize, + }), + rowHeader: css({ + display: 'flex', + alignItems: 'center', + gap: theme.spacing(2), + }), + actions: css({ + display: 'flex', + alignItems: 'center', + gap: theme.spacing(1), + justifyContent: 'flex-end', + flexGrow: 1, + }), +}); diff --git a/public/app/features/scenes/core/SceneObjectBase.tsx b/public/app/features/scenes/core/SceneObjectBase.tsx index 56eca2e0b0e..28bdd477b78 100644 --- a/public/app/features/scenes/core/SceneObjectBase.tsx +++ b/public/app/features/scenes/core/SceneObjectBase.tsx @@ -15,12 +15,13 @@ import { SceneEditor, SceneObjectList, SceneTimeRange, + isSceneObject, } from './types'; export abstract class SceneObjectBase implements SceneObject { subject = new Subject(); state: TState; - parent?: SceneObjectBase; + parent?: SceneObjectBase; subs = new Subscription(); isActive?: boolean; events = new EventBusSrv(); @@ -52,13 +53,13 @@ export abstract class SceneObjectBase impl private setParent() { for (const propValue of Object.values(this.state)) { - if (propValue instanceof SceneObjectBase) { + if (isSceneObject(propValue)) { propValue.parent = this; } if (Array.isArray(propValue)) { for (const child of propValue) { - if (child instanceof SceneObjectBase) { + if (isSceneObject(child)) { child.parent = this; } } diff --git a/public/app/features/scenes/core/types.ts b/public/app/features/scenes/core/types.ts index bb32a56b38f..673bdb44490 100644 --- a/public/app/features/scenes/core/types.ts +++ b/public/app/features/scenes/core/types.ts @@ -74,7 +74,7 @@ export interface SceneObject Editor(props: SceneComponentProps>): React.ReactElement | null; } -export type SceneObjectList = Array>; +export type SceneObjectList = Array>; export interface SceneLayoutState extends SceneObjectState { children: SceneObjectList; @@ -117,3 +117,9 @@ export interface SceneObjectWithUrlSync extends SceneObject { export function isSceneObjectWithUrlSync(obj: any): obj is SceneObjectWithUrlSync { return obj.getUrlState !== undefined; } + +export function isSceneLayoutObject( + obj: SceneObject +): obj is SceneObject { + return 'children' in obj.state && obj.state.children !== undefined; +} diff --git a/public/app/features/scenes/scenes/index.tsx b/public/app/features/scenes/scenes/index.tsx index 150d6b2b77b..c28184ab78d 100644 --- a/public/app/features/scenes/scenes/index.tsx +++ b/public/app/features/scenes/scenes/index.tsx @@ -2,9 +2,10 @@ import { Scene } from '../components/Scene'; import { getFlexLayoutTest, getScenePanelRepeaterTest } from './demo'; import { getNestedScene } from './nested'; +import { getSceneWithRows } from './sceneWithRows'; export function getScenes(): Scene[] { - return [getFlexLayoutTest(), getScenePanelRepeaterTest(), getNestedScene()]; + return [getFlexLayoutTest(), getScenePanelRepeaterTest(), getNestedScene(), getSceneWithRows()]; } const cache: Record = {}; diff --git a/public/app/features/scenes/scenes/nested.tsx b/public/app/features/scenes/scenes/nested.tsx index 64076f1ca4c..77684fc4684 100644 --- a/public/app/features/scenes/scenes/nested.tsx +++ b/public/app/features/scenes/scenes/nested.tsx @@ -1,5 +1,6 @@ import { getDefaultTimeRange } from '@grafana/data'; +import { NestedScene } from '../components/NestedScene'; import { Scene } from '../components/Scene'; import { SceneFlexLayout } from '../components/SceneFlexLayout'; import { SceneTimePicker } from '../components/SceneTimePicker'; @@ -40,9 +41,10 @@ export function getNestedScene(): Scene { return scene; } -export function getInnerScene(title: string): Scene { - const scene = new Scene({ +export function getInnerScene(title: string) { + const scene = new NestedScene({ title: title, + canRemove: true, layout: new SceneFlexLayout({ direction: 'row', children: [ diff --git a/public/app/features/scenes/scenes/queries.ts b/public/app/features/scenes/scenes/queries.ts new file mode 100644 index 00000000000..82a60eae3fe --- /dev/null +++ b/public/app/features/scenes/scenes/queries.ts @@ -0,0 +1,16 @@ +import { SceneQueryRunner } from '../querying/SceneQueryRunner'; + +export function getQueryRunnerWithRandomWalkQuery() { + return new SceneQueryRunner({ + queries: [ + { + refId: 'A', + datasource: { + uid: 'gdev-testdata', + type: 'testdata', + }, + scenarioId: 'random_walk', + }, + ], + }); +} diff --git a/public/app/features/scenes/scenes/sceneWithRows.tsx b/public/app/features/scenes/scenes/sceneWithRows.tsx new file mode 100644 index 00000000000..a45282778d0 --- /dev/null +++ b/public/app/features/scenes/scenes/sceneWithRows.tsx @@ -0,0 +1,62 @@ +import { getDefaultTimeRange } from '@grafana/data'; + +import { NestedScene } from '../components/NestedScene'; +import { Scene } from '../components/Scene'; +import { SceneFlexLayout } from '../components/SceneFlexLayout'; +import { SceneTimePicker } from '../components/SceneTimePicker'; +import { VizPanel } from '../components/VizPanel'; +import { SceneTimeRange } from '../core/SceneTimeRange'; +import { SceneEditManager } from '../editor/SceneEditManager'; + +import { getQueryRunnerWithRandomWalkQuery } from './queries'; + +export function getSceneWithRows(): Scene { + const scene = new Scene({ + title: 'Scene with rows', + layout: new SceneFlexLayout({ + direction: 'column', + children: [ + new NestedScene({ + title: 'Overview', + canCollapse: true, + layout: new SceneFlexLayout({ + direction: 'row', + children: [ + new VizPanel({ + pluginId: 'timeseries', + title: 'Fill height', + }), + new VizPanel({ + pluginId: 'timeseries', + title: 'Fill height', + }), + ], + }), + }), + new NestedScene({ + title: 'More server details', + canCollapse: true, + layout: new SceneFlexLayout({ + direction: 'row', + children: [ + new VizPanel({ + pluginId: 'timeseries', + title: 'Fill height', + }), + new VizPanel({ + pluginId: 'timeseries', + title: 'Fill height', + }), + ], + }), + }), + ], + }), + $editor: new SceneEditManager({}), + $timeRange: new SceneTimeRange(getDefaultTimeRange()), + $data: getQueryRunnerWithRandomWalkQuery(), + actions: [new SceneTimePicker({})], + }); + + return scene; +} From 524948515c127b9d913475eb8ee4e866faefaf17 Mon Sep 17 00:00:00 2001 From: Adela Almasan <88068998+adela-almasan@users.noreply.github.com> Date: Mon, 18 Jul 2022 16:09:24 -0500 Subject: [PATCH 045/116] Storage: Upload button (#52346) --- .../src/components/FileUpload/FileUpload.tsx | 2 + public/app/features/storage/FolderView.tsx | 21 +-- public/app/features/storage/StoragePage.tsx | 48 ++++-- public/app/features/storage/UploadButton.tsx | 118 +++++++++++++ public/app/features/storage/UploadView.tsx | 156 ------------------ public/app/features/storage/types.ts | 1 - 6 files changed, 156 insertions(+), 190 deletions(-) create mode 100644 public/app/features/storage/UploadButton.tsx delete mode 100644 public/app/features/storage/UploadView.tsx diff --git a/packages/grafana-ui/src/components/FileUpload/FileUpload.tsx b/packages/grafana-ui/src/components/FileUpload/FileUpload.tsx index 55f46a064b5..e2c0d343437 100644 --- a/packages/grafana-ui/src/components/FileUpload/FileUpload.tsx +++ b/packages/grafana-ui/src/components/FileUpload/FileUpload.tsx @@ -21,6 +21,8 @@ export interface Props { className?: string; /** Button size */ size?: ComponentSize; + /** Show the file name */ + showFileName?: boolean; } export const FileUpload: FC = ({ diff --git a/public/app/features/storage/FolderView.tsx b/public/app/features/storage/FolderView.tsx index 02d5d7a6545..8c48090f7f7 100644 --- a/public/app/features/storage/FolderView.tsx +++ b/public/app/features/storage/FolderView.tsx @@ -5,18 +5,14 @@ import AutoSizer from 'react-virtualized-auto-sizer'; import { DataFrame, GrafanaTheme2 } from '@grafana/data'; import { Table, useStyles2 } from '@grafana/ui'; -import { UploadView } from './UploadView'; import { StorageView } from './types'; interface Props { listing: DataFrame; - path: string; - onPathChange: (p: string, view?: StorageView) => void; view: StorageView; - fileNames: string[]; } -export function FolderView({ listing, path, onPathChange, view, fileNames }: Props) { +export function FolderView({ listing, view }: Props) { const styles = useStyles2(getStyles); switch (view) { @@ -24,21 +20,6 @@ export function FolderView({ listing, path, onPathChange, view, fileNames }: Pro return
CONFIGURE?
; case StorageView.Perms: return
Permissions
; - case StorageView.Upload: - return ( - { - console.log('Uploaded: ' + path); - if (rsp.path) { - onPathChange(rsp.path); - } else { - onPathChange(path); // back to data - } - }} - fileNames={fileNames} - /> - ); } return ( diff --git a/public/app/features/storage/StoragePage.tsx b/public/app/features/storage/StoragePage.tsx index ca6dd94bd8f..2e7019ea92f 100644 --- a/public/app/features/storage/StoragePage.tsx +++ b/public/app/features/storage/StoragePage.tsx @@ -4,7 +4,7 @@ import { useAsync } from 'react-use'; import { DataFrame, GrafanaTheme2, isDataFrame, ValueLinkConfig } from '@grafana/data'; import { config, locationService } from '@grafana/runtime'; -import { useStyles2, IconName, Spinner, TabsBar, Tab, Button, HorizontalGroup, LinkButton } from '@grafana/ui'; +import { useStyles2, IconName, Spinner, TabsBar, Tab, Button, HorizontalGroup, LinkButton, Alert } from '@grafana/ui'; import appEvents from 'app/core/app_events'; import { Page } from 'app/core/components/Page/Page'; import { useNavModel } from 'app/core/hooks/useNavModel'; @@ -18,6 +18,7 @@ import { ExportView } from './ExportView'; import { FileView } from './FileView'; import { FolderView } from './FolderView'; import { RootView } from './RootView'; +import { UploadButton } from './UploadButton'; import { getGrafanaStorage, filenameAlreadyExists } from './storage'; import { StorageView } from './types'; @@ -57,6 +58,7 @@ export default function StoragePage(props: Props) { }; const [isAddingNewFolder, setIsAddingNewFolder] = useState(false); + const [errorMessages, setErrorMessages] = useState([]); const listing = useAsync((): Promise => { return getGrafanaStorage() @@ -153,18 +155,27 @@ export default function StoragePage(props: Props) { opts.push({ what: StorageView.History, text: 'History' }); } - // Hardcode the uploadable folder :) - if (isFolder && path.startsWith('resources')) { - opts.push({ - what: StorageView.Upload, - text: 'Upload', - }); - } const canAddFolder = isFolder && path.startsWith('resources'); const canDelete = path.startsWith('resources/'); const canViewDashboard = path.startsWith('devenv/') && config.featureToggles.dashboardsFromStorage && (isFolder || path.endsWith('.json')); + const getErrorMessages = () => { + return ( +
+ + {errorMessages.map((error) => { + return
{error}
; + })} +
+
+ ); + }; + + const clearAlert = () => { + setErrorMessages([]); + }; + return (
@@ -175,7 +186,13 @@ export default function StoragePage(props: Props) { Dashboard )} - {canAddFolder && } + + {canAddFolder && ( + <> + + + + )} {canDelete && (
+ } + title={'This file already exists'} + confirmText={'Replace'} + onConfirm={onOverwriteConfirm} + onDismiss={onOverwriteDismiss} + /> + )} + + ); +} + +const getStyles = (theme: GrafanaTheme2) => ({ + uploadButton: css` + margin-right: ${theme.spacing(2)}; + `, +}); diff --git a/public/app/features/storage/UploadView.tsx b/public/app/features/storage/UploadView.tsx deleted file mode 100644 index d7f0a7ec457..00000000000 --- a/public/app/features/storage/UploadView.tsx +++ /dev/null @@ -1,156 +0,0 @@ -import { css } from '@emotion/css'; -import React, { useState } from 'react'; -import SVG from 'react-inlinesvg'; - -import { GrafanaTheme2 } from '@grafana/data'; -import { Alert, Button, ButtonGroup, Checkbox, Field, FileDropzone, useStyles2 } from '@grafana/ui'; - -import { filenameAlreadyExists, getGrafanaStorage } from './storage'; -import { UploadReponse } from './types'; - -interface Props { - folder: string; - onUpload: (rsp: UploadReponse) => void; - fileNames: string[]; -} - -interface ErrorResponse { - message: string; -} - -const FileDropzoneCustomChildren = ({ secondaryText = 'Drag and drop here or browse' }) => { - const styles = useStyles2(getStyles); - - return ( -
- {secondaryText} -
- ); -}; - -export const UploadView = ({ folder, onUpload, fileNames }: Props) => { - const [file, setFile] = useState(undefined); - - const styles = useStyles2(getStyles); - - const [error, setError] = useState({ message: '' }); - const [overwriteExistingFile, setOverwriteExistingFile] = useState(false); - - const Preview = () => { - if (!file) { - return <>; - } - const isImage = file.type?.startsWith('image/'); - const isSvg = file.name?.endsWith('.svg'); - - const src = URL.createObjectURL(file); - return ( - -
- {isSvg && } - {isImage && !isSvg && } -
-
- ); - }; - - const doUpload = async () => { - if (!file) { - setError({ message: 'please select a file' }); - return; - } - - const rsp = await getGrafanaStorage().upload(folder, file, overwriteExistingFile); - if (rsp.status !== 200) { - setError(rsp); - } else { - onUpload(rsp); - } - }; - - const filenameExists = file ? filenameAlreadyExists(file.name, fileNames) : false; - const isUploadDisabled = !file || (filenameExists && !overwriteExistingFile); - - return ( -
- { - setFile(undefined); - }} - options={{ - accept: { 'image/*': ['.jpg', '.jpeg', '.png', '.gif', '.webp'] }, - multiple: false, - onDrop: (acceptedFiles: File[]) => { - setFile(acceptedFiles[0]); - }, - }} - > - {error.message !== '' ?

{error.message}

: Boolean(file) ? : } -
- - {file && filenameExists && ( -
- - setOverwriteExistingFile(!overwriteExistingFile)} - label="Overwrite existing file" - /> - -
- )} - - - - -
- ); -}; - -const getStyles = (theme: GrafanaTheme2) => ({ - resourcePickerPopover: css` - border-radius: ${theme.shape.borderRadius()}; - box-shadow: ${theme.shadows.z3}; - background: ${theme.colors.background.primary}; - border: 1px solid ${theme.colors.border.medium}; - `, - resourcePickerPopoverContent: css` - width: 315px; - font-size: ${theme.typography.bodySmall.fontSize}; - min-height: 184px; - padding: ${theme.spacing(1)}; - display: flex; - flex-direction: column; - `, - button: css` - margin: 12px 20px 5px; - `, - iconPreview: css` - width: 238px; - height: 198px; - border: 1px solid ${theme.colors.border.medium}; - display: flex; - align-items: center; - justify-content: center; - `, - img: css` - width: 147px; - height: 147px; - fill: ${theme.colors.text.primary}; - `, - iconWrapper: css` - display: flex; - flex-direction: column; - align-items: center; - `, - small: css` - color: ${theme.colors.text.secondary}; - margin-bottom: ${theme.spacing(2)}; - `, - alert: css` - padding-top: 10px; - `, -}); diff --git a/public/app/features/storage/types.ts b/public/app/features/storage/types.ts index 9a729dee3b9..120da9f7dec 100644 --- a/public/app/features/storage/types.ts +++ b/public/app/features/storage/types.ts @@ -2,7 +2,6 @@ export enum StorageView { Data = 'data', Config = 'config', Perms = 'perms', - Upload = 'upload', Export = 'export', History = 'history', AddRoot = 'add', From a5a8052916e079728a885ea86c8f64eb622e3572 Mon Sep 17 00:00:00 2001 From: Leon Sorokin Date: Mon, 18 Jul 2022 17:50:07 -0500 Subject: [PATCH 046/116] TimeSeries: Add option to match axis color to series color (#51437) --- .../grafana-schema/src/schema/mudball.cue | 4 +++ .../grafana-schema/src/schema/mudball.gen.ts | 6 ++++ .../src/components/TimeSeries/utils.ts | 34 +++++++++++++++++++ .../uPlot/config/UPlotAxisBuilder.ts | 10 +++++- .../grafana-ui/src/options/builder/axis.tsx | 14 +++++++- 5 files changed, 66 insertions(+), 2 deletions(-) diff --git a/packages/grafana-schema/src/schema/mudball.cue b/packages/grafana-schema/src/schema/mudball.cue index 7d62ba661df..8e9f6694ec9 100644 --- a/packages/grafana-schema/src/schema/mudball.cue +++ b/packages/grafana-schema/src/schema/mudball.cue @@ -5,6 +5,9 @@ package schema // TODO docs AxisPlacement: "auto" | "top" | "right" | "bottom" | "left" | "hidden" @cuetsy(kind="enum") +// TODO docs +AxisColorMode: "text" | "series" @cuetsy(kind="enum") + // TODO docs VisibilityMode: "auto" | "never" | "always" @cuetsy(kind="enum") @@ -85,6 +88,7 @@ ScaleDistributionConfig: { // TODO docs AxisConfig: { axisPlacement?: AxisPlacement + axisColorMode?: AxisColorMode axisLabel?: string axisWidth?: number axisSoftMin?: number diff --git a/packages/grafana-schema/src/schema/mudball.gen.ts b/packages/grafana-schema/src/schema/mudball.gen.ts index 589d8606d39..70229e5fc5b 100644 --- a/packages/grafana-schema/src/schema/mudball.gen.ts +++ b/packages/grafana-schema/src/schema/mudball.gen.ts @@ -15,6 +15,11 @@ export enum AxisPlacement { Top = 'top', } +export enum AxisColorMode { + Series = 'series', + Text = 'text', +} + export enum VisibilityMode { Always = 'always', Auto = 'auto', @@ -118,6 +123,7 @@ export interface ScaleDistributionConfig { } export interface AxisConfig { + axisColorMode?: AxisColorMode; axisGridShow?: boolean; axisLabel?: string; axisPlacement?: AxisPlacement; diff --git a/packages/grafana-ui/src/components/TimeSeries/utils.ts b/packages/grafana-ui/src/components/TimeSeries/utils.ts index 1128ee2a0d9..8682a00c976 100644 --- a/packages/grafana-ui/src/components/TimeSeries/utils.ts +++ b/packages/grafana-ui/src/components/TimeSeries/utils.ts @@ -26,10 +26,13 @@ import { ScaleOrientation, StackingMode, GraphTransform, + AxisColorMode, + GraphGradientMode, } from '@grafana/schema'; import { buildScaleKey } from '../GraphNG/utils'; import { UPlotConfigBuilder, UPlotConfigPrepFn } from '../uPlot/config/UPlotConfigBuilder'; +import { getScaleGradientFn } from '../uPlot/config/gradientFills'; import { getStackingGroups, preparePlotData2 } from '../uPlot/utils'; const defaultFormatter = (v: any) => (v == null ? '-' : v.toFixed(1)); @@ -192,6 +195,36 @@ export const preparePlotConfigBuilder: UPlotConfigPrepFn<{ } if (customConfig.axisPlacement !== AxisPlacement.Hidden) { + let axisColor: uPlot.Axis.Stroke | undefined; + + if (customConfig.axisColorMode === AxisColorMode.Series) { + if ( + colorMode.isByValue && + field.config.custom?.gradientMode === GraphGradientMode.Scheme && + colorMode.id === FieldColorModeId.Thresholds + ) { + axisColor = getScaleGradientFn(1, theme, colorMode, field.config.thresholds); + } else { + axisColor = seriesColor; + } + } + + let axisColorOpts = {}; + + if (axisColor) { + axisColorOpts = { + border: { + show: true, + width: 1, + stroke: axisColor, + }, + ticks: { + stroke: axisColor, + }, + color: customConfig.axisColorMode === AxisColorMode.Series ? axisColor : undefined, + }; + } + builder.addAxis( tweakAxis( { @@ -203,6 +236,7 @@ export const preparePlotConfigBuilder: UPlotConfigPrepFn<{ theme, grid: { show: customConfig.axisGridShow }, show: customConfig.hideFrom?.viz === false, + ...axisColorOpts, }, field ) diff --git a/packages/grafana-ui/src/components/uPlot/config/UPlotAxisBuilder.ts b/packages/grafana-ui/src/components/uPlot/config/UPlotAxisBuilder.ts index 3eafa453f10..96f6f02e1eb 100644 --- a/packages/grafana-ui/src/components/uPlot/config/UPlotAxisBuilder.ts +++ b/packages/grafana-ui/src/components/uPlot/config/UPlotAxisBuilder.ts @@ -27,6 +27,8 @@ export interface AxisProps { values?: Axis.Values; isTime?: boolean; timeZone?: TimeZone; + color?: uPlot.Axis.Stroke; + border?: uPlot.Axis.Border; } export const UPLOT_AXIS_FONT_SIZE = 12; @@ -106,6 +108,8 @@ export class UPlotAxisBuilder extends PlotConfigBuilder { theme, tickLabelRotation, size, + color, + border, } = this.props; const font = `${UPLOT_AXIS_FONT_SIZE}px ${theme.typography.fontFamily}`; @@ -119,7 +123,7 @@ export class UPlotAxisBuilder extends PlotConfigBuilder { let config: Axis = { scale: scaleKey, show, - stroke: theme.colors.text.primary, + stroke: color ?? theme.colors.text.primary, side: getUPlotSideFromAxis(placement), font, size: @@ -156,6 +160,10 @@ export class UPlotAxisBuilder extends PlotConfigBuilder { filter, }; + if (border != null) { + config.border = border; + } + if (label != null && label.length > 0) { config.label = label; config.labelSize = UPLOT_AXIS_FONT_SIZE + labelPad; diff --git a/packages/grafana-ui/src/options/builder/axis.tsx b/packages/grafana-ui/src/options/builder/axis.tsx index d5dc9753d68..5e498f68498 100644 --- a/packages/grafana-ui/src/options/builder/axis.tsx +++ b/packages/grafana-ui/src/options/builder/axis.tsx @@ -7,7 +7,7 @@ import { SelectableValue, StandardEditorProps, } from '@grafana/data'; -import { AxisConfig, AxisPlacement, ScaleDistribution, ScaleDistributionConfig } from '@grafana/schema'; +import { AxisColorMode, AxisConfig, AxisPlacement, ScaleDistribution, ScaleDistributionConfig } from '@grafana/schema'; import { graphFieldOptions, Select, HorizontalGroup, RadioButtonGroup } from '../../index'; @@ -81,6 +81,18 @@ export function addAxisConfig( { value: false, label: 'Off' }, ], }, + }) + .addRadio({ + path: 'axisColorMode', + name: 'Color', + category, + defaultValue: AxisColorMode.Text, + settings: { + options: [ + { value: AxisColorMode.Text, label: 'Text' }, + { value: AxisColorMode.Series, label: 'Series' }, + ], + }, }); if (!hideScale) { From 9a72ebcd9959c8248f657ce283e6a2db81d796ca Mon Sep 17 00:00:00 2001 From: sunb3am <57086182+sunb3am@users.noreply.github.com> Date: Tue, 19 Jul 2022 07:43:51 +0530 Subject: [PATCH 047/116] Docs: Updated links to landing pages for the notifiers (#51875) * Documentation: Updated links to landing pages for the respective notifiers in alerting Updated links for listed notifiers to correctly redirect to their landing pages * Ran prettier --- .../contact-points/notifiers/_index.md | 44 +++++++++---------- 1 file changed, 22 insertions(+), 22 deletions(-) diff --git a/docs/sources/alerting/contact-points/notifiers/_index.md b/docs/sources/alerting/contact-points/notifiers/_index.md index 164a74e93a3..4f588ad3e71 100644 --- a/docs/sources/alerting/contact-points/notifiers/_index.md +++ b/docs/sources/alerting/contact-points/notifiers/_index.md @@ -18,25 +18,25 @@ weight: 130 The following table lists the notifiers (contact point types) supported by Grafana. -| Name | Type | Grafana Alertmanager | Other Alertmanagers | -| --------------------------------------------- | ------------------------- | -------------------- | -------------------------------------------------------------------------------------------------------- | -| [DingDing](#dingdingdingtalk) | `dingding` | Supported | N/A | -| [Discord](#discord) | `discord` | Supported | N/A | -| [Email](#email) | `email` | Supported | Supported | -| [Google Hangouts Chat](#google-hangouts-chat) | `googlechat` | Supported | N/A | -| [Kafka](#kafka) | `kafka` | Supported | N/A | -| Line | `line` | Supported | N/A | -| Microsoft Teams | `teams` | Supported | N/A | -| [Opsgenie](#opsgenie) | `opsgenie` | Supported | Supported | -| [Pagerduty](#pagerduty) | `pagerduty` | Supported | Supported | -| Prometheus Alertmanager | `prometheus-alertmanager` | Supported | N/A | -| [Pushover](#pushover) | `pushover` | Supported | Supported | -| Sensu | `sensu` | Supported | N/A | -| [Sensu Go](#sensu-go) | `sensugo` | Supported | N/A | -| [Slack](#slack) | `slack` | Supported | Supported | -| Telegram | `telegram` | Supported | N/A | -| Threema | `threema` | Supported | N/A | -| VictorOps | `victorops` | Supported | Supported | -| [Webhook](#webhook) | `webhook` | Supported | Supported ([different format](https://prometheus.io/docs/alerting/latest/configuration/#webhook_config)) | -| [WeCom](#wecom) | `wecom` | Supported | N/A | -| [Zenduty](#zenduty) | `webhook` | Supported | N/A | +| Name | Type | Grafana Alertmanager | Other Alertmanagers | +| ------------------------------------------------ | ------------------------- | -------------------- | -------------------------------------------------------------------------------------------------------- | +| [DingDing](https://www.dingtalk.com/en) | `dingding` | Supported | N/A | +| [Discord](https://discord.com/) | `discord` | Supported | N/A | +| [Email](#email) | `email` | Supported | Supported | +| [Google Hangouts](https://hangouts.google.com/) | `googlechat` | Supported | N/A | +| [Kafka](https://kafka.apache.org/) | `kafka` | Supported | N/A | +| [Line](https://line.me/en/) | `line` | Supported | N/A | +| [Microsoft Teams](https://teams.microsoft.com/) | `teams` | Supported | N/A | +| [Opsgenie](https://atlassian.com/opsgenie/) | `opsgenie` | Supported | Supported | +| [Pagerduty](https://www.pagerduty.com/) | `pagerduty` | Supported | Supported | +| [Prometheus Alertmanager](https://prometheus.io) | `prometheus-alertmanager` | Supported | N/A | +| [Pushover](https://pushover.net/) | `pushover` | Supported | Supported | +| [Sensu](https://sensu.io/) | `sensu` | Supported | N/A | +| [Sensu Go](https://docs.sensu.io/sensu-go/) | `sensugo` | Supported | N/A | +| [Slack](https://slack.com/) | `slack` | Supported | Supported | +| [Telegram](https://telegram.org/) | `telegram` | Supported | N/A | +| [Threema](https://threema.ch/) | `threema` | Supported | N/A | +| [VictorOps](https://help.victorops.com/) | `victorops` | Supported | Supported | +| [Webhook](#webhook) | `webhook` | Supported | Supported ([different format](https://prometheus.io/docs/alerting/latest/configuration/#webhook_config)) | +| [WeCom](#wecom) | `wecom` | Supported | N/A | +| [Zenduty](https://www.zenduty.com/) | `webhook` | Supported | N/A | From ba543343c4ddd529b4988e31ae8c69b9b44fe3e2 Mon Sep 17 00:00:00 2001 From: Alex Khomenko Date: Tue, 19 Jul 2022 08:09:09 +0300 Subject: [PATCH 048/116] Grafana/UI: Add ColorPickerInput component (#52222) * ColorPickerInput: Setup componnet * ColorPickerInput: Add onChange callback * ColorPickerInput: Conver ColorInput to functional component * ColorPickerInput: Extend Input props * ColorPickerInput: Upate value prop * ColorPickerInput: Allow customising color format for onChange * ColorPickerInput: Add docs * ColorPickerInput: Update docs * ColorPickerInput: Memoize debounced callback * ColorPickerInput: Add tests and use theme for spacing * Apply styles from SpectrumPalette * Cleanup --- .../src/themes/colorManipulator.test.ts | 8 ++ .../src/themes/colorManipulator.ts | 11 +++ .../src/components/ColorPicker/ColorInput.tsx | 91 +++++++------------ .../components/ColorPicker/ColorPicker.mdx | 7 ++ .../ColorPicker/ColorPicker.story.tsx | 13 ++- .../ColorPicker/ColorPickerInput.tsx | 81 +++++++++++++++++ .../ColorPicker/SpectrumPalette.tsx | 6 +- packages/grafana-ui/src/components/index.ts | 1 + 8 files changed, 156 insertions(+), 62 deletions(-) create mode 100644 packages/grafana-ui/src/components/ColorPicker/ColorPickerInput.tsx diff --git a/packages/grafana-data/src/themes/colorManipulator.test.ts b/packages/grafana-data/src/themes/colorManipulator.test.ts index f4ca0652065..4f65f50c4d5 100644 --- a/packages/grafana-data/src/themes/colorManipulator.test.ts +++ b/packages/grafana-data/src/themes/colorManipulator.test.ts @@ -10,6 +10,7 @@ import { getContrastRatio, getLuminance, lighten, + asRgbString, } from './colorManipulator'; describe('utils/colorManipulator', () => { @@ -415,4 +416,11 @@ describe('utils/colorManipulator', () => { expect(lighten('color(display-p3 1 0 0)', 0)).toEqual('color(display-p3 1 0 0)'); }); }); + + describe('asRgbString', () => { + it('should convert hex color to rgb', () => { + expect(asRgbString('#FFFFFF')).toEqual('rgb(255, 255, 255)'); + expect(asRgbString('#000000')).toEqual('rgb(0, 0, 0)'); + }); + }); }); diff --git a/packages/grafana-data/src/themes/colorManipulator.ts b/packages/grafana-data/src/themes/colorManipulator.ts index b74c745e075..4bc4f8c3f3d 100644 --- a/packages/grafana-data/src/themes/colorManipulator.ts +++ b/packages/grafana-data/src/themes/colorManipulator.ts @@ -81,6 +81,17 @@ export function asHexString(color: string): string { return tColor.getAlpha() === 1 ? tColor.toHexString() : tColor.toHex8String(); } +/** + * Converts a color to rgb string + */ +export function asRgbString(color: string) { + if (color.startsWith('rgb')) { + return color; + } + + return tinycolor(color).toRgbString(); +} + /** * Converts a color from hsl format to rgb format. * @param color - HSL color values diff --git a/packages/grafana-ui/src/components/ColorPicker/ColorInput.tsx b/packages/grafana-ui/src/components/ColorPicker/ColorInput.tsx index 5877ba3f959..354ee19bf9b 100644 --- a/packages/grafana-ui/src/components/ColorPicker/ColorInput.tsx +++ b/packages/grafana-ui/src/components/ColorPicker/ColorInput.tsx @@ -1,87 +1,62 @@ import { cx, css } from '@emotion/css'; import { debounce } from 'lodash'; -import React from 'react'; +import React, { forwardRef, useState, useEffect, useMemo } from 'react'; import tinycolor from 'tinycolor2'; import { GrafanaTheme2 } from '@grafana/data'; import { useStyles2 } from '../../themes'; -import { Input } from '../Input/Input'; +import { Input, Props as InputProps } from '../Input/Input'; import { ColorPickerProps } from './ColorPickerPopover'; -interface ColorInputState { - previousColor: string; - value: string; -} +interface ColorInputProps extends ColorPickerProps, Omit {} -interface ColorInputProps extends ColorPickerProps { - style?: React.CSSProperties; - className?: string; -} +const ColorInput = forwardRef(({ color, onChange, ...inputProps }, ref) => { + const [value, setValue] = useState(color); + const [previousColor, setPreviousColor] = useState(color); + // eslint-disable-next-line react-hooks/exhaustive-deps + const updateColor = useMemo(() => debounce(onChange, 100), []); -class ColorInput extends React.PureComponent { - constructor(props: ColorInputProps) { - super(props); - this.state = { - previousColor: props.color, - value: props.color, - }; - - this.updateColor = debounce(this.updateColor, 100); - } - - static getDerivedStateFromProps(props: ColorPickerProps, state: ColorInputState) { - const newColor = tinycolor(props.color); - if (newColor.isValid() && props.color !== state.previousColor) { - return { - ...state, - previousColor: props.color, - value: newColor.toString(), - }; + useEffect(() => { + const newColor = tinycolor(color); + if (newColor.isValid() && color !== previousColor) { + setValue(newColor.toString()); + setPreviousColor(color); } + }, [color, previousColor]); - return state; - } - updateColor = (color: string) => { - this.props.onChange(color); - }; - - onChange = (event: React.SyntheticEvent) => { + const onChangeColor = (event: React.SyntheticEvent) => { const newColor = tinycolor(event.currentTarget.value); - this.setState({ - value: event.currentTarget.value, - }); + setValue(event.currentTarget.value); if (newColor.isValid()) { - this.updateColor(newColor.toString()); + updateColor(newColor.toString()); } }; - onBlur = () => { - const newColor = tinycolor(this.state.value); + const onBlur = () => { + const newColor = tinycolor(value); if (!newColor.isValid()) { - this.setState({ - value: this.props.color, - }); + setValue(color); } }; - render() { - const { value } = this.state; - return ( - } - /> - ); - } -} + return ( + } + ref={ref} + /> + ); +}); + +ColorInput.displayName = 'ColorInput'; export default ColorInput; diff --git a/packages/grafana-ui/src/components/ColorPicker/ColorPicker.mdx b/packages/grafana-ui/src/components/ColorPicker/ColorPicker.mdx index 6f1b631c526..cb45dbedc20 100644 --- a/packages/grafana-ui/src/components/ColorPicker/ColorPicker.mdx +++ b/packages/grafana-ui/src/components/ColorPicker/ColorPicker.mdx @@ -1,5 +1,6 @@ import { Meta, Props } from '@storybook/addon-docs/blocks'; import { ColorPicker } from './ColorPicker'; +import { ColorPickerInput } from './ColorPickerInput'; @@ -11,4 +12,10 @@ The `Popover` is a tabbed view where you can switch between `Palettes`. The `Nam The `Pickers` are single circular color fields that show the currently picked color. On click, they open the `Popover`. +## ColorPickerInput + +Color picker component, modified to be used inside forms. Supports all usual input props. Allows manually typing in color value as well as selecting it from the popover. + +The format in which the color is returned to the `onChange` callback can be customised via `returnColorAs` prop. + diff --git a/packages/grafana-ui/src/components/ColorPicker/ColorPicker.story.tsx b/packages/grafana-ui/src/components/ColorPicker/ColorPicker.story.tsx index 200154f0ff0..d582bbcd2fd 100644 --- a/packages/grafana-ui/src/components/ColorPicker/ColorPicker.story.tsx +++ b/packages/grafana-ui/src/components/ColorPicker/ColorPicker.story.tsx @@ -9,12 +9,13 @@ import { withCenteredStory } from '../../utils/storybook/withCenteredStory'; import { renderComponentWithTheme } from '../../utils/storybook/withTheme'; import mdx from './ColorPicker.mdx'; +import { ColorPickerInput, ColorPickerInputProps } from './ColorPickerInput'; import { ColorPickerProps } from './ColorPickerPopover'; export default { title: 'Pickers and Editors/ColorPicker', component: ColorPicker, - subcomponents: { SeriesColorPicker }, + subcomponents: { SeriesColorPicker, ColorPickerInput }, decorators: [withCenteredStory], parameters: { docs: { @@ -74,3 +75,13 @@ export const SeriesPicker: Story = ({ enableNamedColors }) => ); }; + +export const Input: Story = () => { + return ( + + {(value, onChange) => { + return ; + }} + + ); +}; diff --git a/packages/grafana-ui/src/components/ColorPicker/ColorPickerInput.tsx b/packages/grafana-ui/src/components/ColorPicker/ColorPickerInput.tsx new file mode 100644 index 00000000000..fd26ab1c8c5 --- /dev/null +++ b/packages/grafana-ui/src/components/ColorPicker/ColorPickerInput.tsx @@ -0,0 +1,81 @@ +import { css, cx } from '@emotion/css'; +import React, { useState, forwardRef } from 'react'; +import { RgbaStringColorPicker } from 'react-colorful'; +import { useThrottleFn } from 'react-use'; + +import { colorManipulator, GrafanaTheme2 } from '@grafana/data'; + +import { useStyles2, useTheme2 } from '../../themes'; +import { ClickOutsideWrapper } from '../ClickOutsideWrapper/ClickOutsideWrapper'; +import { Props as InputProps } from '../Input/Input'; + +import ColorInput from './ColorInput'; +import { getStyles as getPaletteStyles } from './SpectrumPalette'; + +export interface ColorPickerInputProps extends Omit { + value?: string; + onChange: (color: string) => void; + /** Format for returning the color in onChange callback, defaults to 'rgb' */ + returnColorAs?: 'rgb' | 'hex'; +} + +export const ColorPickerInput = forwardRef( + ({ value = '', onChange, returnColorAs = 'rgb', ...inputProps }, ref) => { + const [currentColor, setColor] = useState(value); + const [isOpen, setIsOpen] = useState(false); + const theme = useTheme2(); + const styles = useStyles2(getStyles); + const paletteStyles = useStyles2(getPaletteStyles); + + useThrottleFn( + (c) => { + const color = theme.visualization.getColorByName(c); + if (returnColorAs === 'rgb') { + onChange(colorManipulator.asRgbString(color)); + } else { + onChange(colorManipulator.asHexString(color)); + } + }, + 500, + [currentColor] + ); + + return ( + setIsOpen(false)}> +
+ {isOpen && ( + + )} +
setIsOpen(true)}> + +
+
+
+ ); + } +); + +ColorPickerInput.displayName = 'ColorPickerInput'; + +const getStyles = (theme: GrafanaTheme2) => { + return { + wrapper: css` + position: relative; + `, + picker: css` + &.react-colorful { + position: absolute; + width: 100%; + z-index: 11; + bottom: 36px; + } + `, + inner: css` + position: absolute; + `, + }; +}; diff --git a/packages/grafana-ui/src/components/ColorPicker/SpectrumPalette.tsx b/packages/grafana-ui/src/components/ColorPicker/SpectrumPalette.tsx index 7006a1e8636..f477c3d41b2 100644 --- a/packages/grafana-ui/src/components/ColorPicker/SpectrumPalette.tsx +++ b/packages/grafana-ui/src/components/ColorPicker/SpectrumPalette.tsx @@ -1,4 +1,4 @@ -import { css, cx } from '@emotion/css'; +import { css } from '@emotion/css'; import React, { useMemo, useState } from 'react'; import { RgbaStringColorPicker } from 'react-colorful'; import { useThrottleFn } from 'react-use'; @@ -37,13 +37,13 @@ const SpectrumPalette: React.FunctionComponent = ({ color, return (
- +
); }; -const getStyles = (theme: GrafanaTheme2) => ({ +export const getStyles = (theme: GrafanaTheme2) => ({ wrapper: css` flex-grow: 1; `, diff --git a/packages/grafana-ui/src/components/index.ts b/packages/grafana-ui/src/components/index.ts index 7d5605ebc2c..bf6931b8a3d 100644 --- a/packages/grafana-ui/src/components/index.ts +++ b/packages/grafana-ui/src/components/index.ts @@ -24,6 +24,7 @@ export { ButtonCascader } from './ButtonCascader/ButtonCascader'; export { LoadingPlaceholder, LoadingPlaceholderProps } from './LoadingPlaceholder/LoadingPlaceholder'; export { ColorPicker, SeriesColorPicker } from './ColorPicker/ColorPicker'; +export { ColorPickerInput } from './ColorPicker/ColorPickerInput'; export { SeriesColorPickerPopover, SeriesColorPickerPopoverWithTheme } from './ColorPicker/SeriesColorPickerPopover'; export { EmptySearchResult } from './EmptySearchResult/EmptySearchResult'; export { UnitPicker } from './UnitPicker/UnitPicker'; From 46eec85b1a0e15c68540fab9fd746a890b8a41a1 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?G=C3=A1bor=20Farkas?= Date: Tue, 19 Jul 2022 08:13:38 +0200 Subject: [PATCH 049/116] loki: better handle an empty-response (#52397) --- pkg/tsdb/loki/api.go | 8 ++++++++ pkg/tsdb/loki/framing_test.go | 2 ++ pkg/tsdb/loki/testdata/empty.golden.jsonc | 5 +++++ pkg/tsdb/loki/testdata/empty.json | 1 + 4 files changed, 16 insertions(+) create mode 100644 pkg/tsdb/loki/testdata/empty.golden.jsonc create mode 100644 pkg/tsdb/loki/testdata/empty.json diff --git a/pkg/tsdb/loki/api.go b/pkg/tsdb/loki/api.go index d94ecf6b714..553cc9bdf4d 100644 --- a/pkg/tsdb/loki/api.go +++ b/pkg/tsdb/loki/api.go @@ -163,6 +163,14 @@ func (api *LokiAPI) DataQuery(ctx context.Context, query lokiQuery) (data.Frames iter := jsoniter.Parse(jsoniter.ConfigDefault, resp.Body, 1024) res := converter.ReadPrometheusStyleResult(iter, converter.Options{MatrixWideSeries: false, VectorWideSeries: false}) + if res == nil { + // it's hard to say if this is an error-case or not. + // we know the http-response was a success-response + // (otherwise we wouldn't be here in the code), + // so we will go with a success, with no data. + return data.Frames{}, nil + } + if res.Error != nil { return nil, res.Error } diff --git a/pkg/tsdb/loki/framing_test.go b/pkg/tsdb/loki/framing_test.go index bb2b3bf16a7..cd057d375d5 100644 --- a/pkg/tsdb/loki/framing_test.go +++ b/pkg/tsdb/loki/framing_test.go @@ -48,6 +48,8 @@ func TestSuccessResponse(t *testing.T) { {name: "parse a simple streams response", filepath: "streams_simple", query: streamsQuery}, {name: "parse a streams response with parse errors", filepath: "streams_parse_errors", query: streamsQuery}, + + {name: "parse an empty response", filepath: "empty", query: matrixQuery}, } for _, test := range tt { diff --git a/pkg/tsdb/loki/testdata/empty.golden.jsonc b/pkg/tsdb/loki/testdata/empty.golden.jsonc new file mode 100644 index 00000000000..8850a3f1b08 --- /dev/null +++ b/pkg/tsdb/loki/testdata/empty.golden.jsonc @@ -0,0 +1,5 @@ +// 🌟 This was machine generated. Do not edit. 🌟 +// 🌟 This was machine generated. Do not edit. 🌟 +{ + "frames": [] +} \ No newline at end of file diff --git a/pkg/tsdb/loki/testdata/empty.json b/pkg/tsdb/loki/testdata/empty.json new file mode 100644 index 00000000000..9e26dfeeb6e --- /dev/null +++ b/pkg/tsdb/loki/testdata/empty.json @@ -0,0 +1 @@ +{} \ No newline at end of file From 11b743ecd90352f0b3892e9c297caf9c53cffe0a Mon Sep 17 00:00:00 2001 From: Joey Tawadrous <90795735+joey-grafana@users.noreply.github.com> Date: Tue, 19 Jul 2022 08:00:58 +0100 Subject: [PATCH 050/116] Traces: Remove serviceMap feature flag (#52375) * Remove serviceMap feature flag * Remove import * Add serviceMap to queryTypeOptions --- .../grafana-data/src/types/featureToggles.gen.ts | 1 - pkg/services/featuremgmt/registry.go | 6 ------ pkg/services/featuremgmt/toggles_gen.go | 4 ---- .../datasource/tempo/QueryEditor/QueryField.tsx | 12 +++++------- .../datasource/tempo/configuration/ConfigEditor.tsx | 8 +++----- 5 files changed, 8 insertions(+), 23 deletions(-) diff --git a/packages/grafana-data/src/types/featureToggles.gen.ts b/packages/grafana-data/src/types/featureToggles.gen.ts index 01feba7cc2b..c59f8e728f0 100644 --- a/packages/grafana-data/src/types/featureToggles.gen.ts +++ b/packages/grafana-data/src/types/featureToggles.gen.ts @@ -26,7 +26,6 @@ export interface FeatureToggles { ['live-service-web-worker']?: boolean; queryOverLive?: boolean; panelTitleSearch?: boolean; - tempoServiceGraph?: boolean; tempoApmTable?: boolean; prometheus_azure_auth?: boolean; prometheusAzureOverrideAudience?: boolean; diff --git a/pkg/services/featuremgmt/registry.go b/pkg/services/featuremgmt/registry.go index f596d919ae4..e38dc69157e 100644 --- a/pkg/services/featuremgmt/registry.go +++ b/pkg/services/featuremgmt/registry.go @@ -62,12 +62,6 @@ var ( Description: "Search for dashboards using panel title", State: FeatureStateAlpha, }, - { - Name: "tempoServiceGraph", - Description: "show service", - State: FeatureStateBeta, - FrontendOnly: true, - }, { Name: "tempoApmTable", Description: "Show APM table", diff --git a/pkg/services/featuremgmt/toggles_gen.go b/pkg/services/featuremgmt/toggles_gen.go index a0684dcc9c7..35a6b40560c 100644 --- a/pkg/services/featuremgmt/toggles_gen.go +++ b/pkg/services/featuremgmt/toggles_gen.go @@ -47,10 +47,6 @@ const ( // Search for dashboards using panel title FlagPanelTitleSearch = "panelTitleSearch" - // FlagTempoServiceGraph - // show service - FlagTempoServiceGraph = "tempoServiceGraph" - // FlagTempoApmTable // Show APM table FlagTempoApmTable = "tempoApmTable" diff --git a/public/app/plugins/datasource/tempo/QueryEditor/QueryField.tsx b/public/app/plugins/datasource/tempo/QueryEditor/QueryField.tsx index 16575b152e5..f9bfb4bc56e 100644 --- a/public/app/plugins/datasource/tempo/QueryEditor/QueryField.tsx +++ b/public/app/plugins/datasource/tempo/QueryEditor/QueryField.tsx @@ -3,7 +3,7 @@ import React from 'react'; import useAsync from 'react-use/lib/useAsync'; import { QueryEditorProps, SelectableValue } from '@grafana/data'; -import { config, reportInteraction } from '@grafana/runtime'; +import { reportInteraction } from '@grafana/runtime'; import { FileDropzone, InlineField, @@ -75,14 +75,12 @@ class TempoQueryFieldComponent extends React.PureComponent { const queryTypeOptions: Array> = [ { value: 'traceId', label: 'TraceID' }, { value: 'upload', label: 'JSON file' }, + { value: 'serviceMap', label: 'Service Graph' }, ]; - if (config.featureToggles.tempoServiceGraph) { - queryTypeOptions.push({ value: 'serviceMap', label: 'Service Graph' }); - // span names in Tempo search links (generated on the service graph page) are in camel case (for Prometheus queries) - // but the span name dropdown menu in the search tab is lower case - query.spanName = query.spanName?.toLowerCase(); - } + // span names in Tempo search links (generated on the service graph page) are in camel case (for Prometheus queries) + // but the span name dropdown menu in the search tab is lower case + query.spanName = query.spanName?.toLowerCase(); if (!datasource?.search?.hide) { queryTypeOptions.unshift({ value: 'nativeSearch', label: 'Search' }); diff --git a/public/app/plugins/datasource/tempo/configuration/ConfigEditor.tsx b/public/app/plugins/datasource/tempo/configuration/ConfigEditor.tsx index ee2763991d5..d9662af5828 100644 --- a/public/app/plugins/datasource/tempo/configuration/ConfigEditor.tsx +++ b/public/app/plugins/datasource/tempo/configuration/ConfigEditor.tsx @@ -34,11 +34,9 @@ export const ConfigEditor: React.FC = ({ options, onOptionsChange }) => {
) : null} - {config.featureToggles.tempoServiceGraph && ( -
- -
- )} +
+ +
From 2617a25fb5c2bf5d4ad66017a82973ba99d3776d Mon Sep 17 00:00:00 2001 From: Gabriel MABILLE Date: Tue, 19 Jul 2022 09:30:54 +0200 Subject: [PATCH 051/116] Fix: Use ac.parameter for the scope protecting plugins routes (#52389) * Fix: Use parameter for the scope protecting /a routes * Fix: Use parameter for the scope protecting /plugins/resources routes --- pkg/api/api.go | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pkg/api/api.go b/pkg/api/api.go index 9ab32650ac4..8eac11dac35 100644 --- a/pkg/api/api.go +++ b/pkg/api/api.go @@ -91,7 +91,7 @@ func (hs *HTTPServer) registerRoutes() { r.Get("/plugins/:id/edit", reqSignedIn, hs.Index) // deprecated r.Get("/plugins/:id/page/:page", reqSignedIn, hs.Index) // App Root Page - appPluginIDScope := plugins.ScopeProvider.GetResourceScope(":id") + appPluginIDScope := plugins.ScopeProvider.GetResourceScope(ac.Parameter(":id")) r.Get("/a/:id/*", authorize(reqSignedIn, ac.EvalPermission(plugins.ActionAppAccess, appPluginIDScope)), hs.Index) r.Get("/a/:id", authorize(reqSignedIn, ac.EvalPermission(plugins.ActionAppAccess, appPluginIDScope)), hs.Index) @@ -334,7 +334,7 @@ func (hs *HTTPServer) registerRoutes() { datasourceRoute.Get("/id/:name", authorize(reqSignedIn, ac.EvalPermission(datasources.ActionIDRead, nameScope)), routing.Wrap(hs.GetDataSourceIdByName)) }) - pluginIDScope := plugins.ScopeProvider.GetResourceScope(":pluginId") + pluginIDScope := plugins.ScopeProvider.GetResourceScope(ac.Parameter(":pluginId")) apiRoute.Get("/plugins", routing.Wrap(hs.GetPluginList)) apiRoute.Get("/plugins/:pluginId/settings", routing.Wrap(hs.GetPluginSettingByID)) // RBAC check performed in handler for App Plugins apiRoute.Get("/plugins/:pluginId/markdown/:name", routing.Wrap(hs.GetPluginMarkdown)) From ab8ad1bb42a641014ce8184397ce78fb7b380b87 Mon Sep 17 00:00:00 2001 From: Kat Yang <69819079+yangkb09@users.noreply.github.com> Date: Tue, 19 Jul 2022 04:04:38 -0400 Subject: [PATCH 052/116] Chore: Add new go test commands for unit, integration, and pro tests to makefile (#51202) * Chore: Add new go test commands for unit, integration tests to makefile * Add test-go-unit and test-go-integration targets as dependencies of the test-go target * Add makefile target for mysql & postgres backends * Set GRAFANA_TEST_DB variable for the xargs postgres command Co-authored-by: Sofia Papagiannaki <1632407+papagian@users.noreply.github.com> * Set GRAFANA_TEST_DB variable for the xargs mysql command Co-authored-by: Sofia Papagiannaki <1632407+papagian@users.noreply.github.com> * Use postgres_tests as source Co-authored-by: Sofia Papagiannaki <1632407+papagian@users.noreply.github.com> * Set postgres_tests as source Co-authored-by: Sofia Papagiannaki <1632407+papagian@users.noreply.github.com> * Clean test cache before postgres and mysql integration test makefile commands Co-authored-by: Sofia Papagiannaki <1632407+papagian@users.noreply.github.com> --- Makefile | 35 ++++++++++++++++++++++++++++++++--- 1 file changed, 32 insertions(+), 3 deletions(-) diff --git a/Makefile b/Makefile index 262dc3e3458..1279a7ae45c 100644 --- a/Makefile +++ b/Makefile @@ -94,9 +94,30 @@ run-frontend: deps-js ## Fetch js dependencies and watch frontend for rebuild ##@ Testing -test-go: ## Run tests for backend. - @echo "test backend" - $(GO) test -v ./pkg/... +.PHONY: test-go +test-go: test-go-unit test-go-integration + +.PHONY: test-go-unit +test-go-unit: ## Run unit tests for backend with flags. + @echo "test backend unit tests" + $(GO) test -short -covermode=atomic -timeout=30m ./pkg/... + +.PHONY: test-go-integration +test-go-integration: ## Run integration tests for backend with flags. + @echo "test backend integration tests" + $(GO) test -run Integration -covermode=atomic -timeout=30m ./pkg/... + +.PHONY: test-go-integration-postgres +test-go-integration-postgres: devenv-postgres ## Run integration tests for postgres backend with flags. + @echo "test backend integration postgres tests" + $(GO) clean -testcache + $(GO) list './pkg/...' | xargs -I {} sh -c 'GRAFANA_TEST_DB=postgres go test -run Integration -covermode=atomic -timeout=30m {}' + +.PHONY: test-go-integration-mysql +test-go-integration-mysql: devenv-mysql ## Run integration tests for mysql backend with flags. + @echo "test backend integration mysql tests" + $(GO) clean -testcache + $(GO) list './pkg/...' | xargs -I {} sh -c 'GRAFANA_TEST_DB=mysql go test -run Integration -covermode=atomic -timeout=30m {}' test-js: ## Run tests for frontend. @echo "test frontend" @@ -153,6 +174,14 @@ devenv-down: ## Stop optional services. test -f docker-compose.yaml && \ docker-compose down || exit 0; +devenv-postgres: + @cd devenv; \ + sources=postgres_tests + +devenv-mysql: + @cd devenv; \ + sources=mysql_tests + ##@ Helpers # We separate the protobuf generation because most development tasks on From e74c2390de9081304f7a5e77dcb36b86ecc4a0d8 Mon Sep 17 00:00:00 2001 From: Peter Holmberg Date: Tue, 19 Jul 2022 10:30:26 +0200 Subject: [PATCH 053/116] Alerting: Prevent evaluation if "for" shorter than "evaluate" (#51797) Co-authored-by: Armand Grillet --- pkg/services/ngalert/CHANGELOG.md | 1 + .../GrafanaConditionEvalWarning.tsx | 34 ------------------- .../rule-editor/GrafanaEvaluationBehavior.tsx | 21 +++++++++--- 3 files changed, 17 insertions(+), 39 deletions(-) delete mode 100644 public/app/features/alerting/unified/components/rule-editor/GrafanaConditionEvalWarning.tsx diff --git a/pkg/services/ngalert/CHANGELOG.md b/pkg/services/ngalert/CHANGELOG.md index a94b240f666..7ec0abe243b 100644 --- a/pkg/services/ngalert/CHANGELOG.md +++ b/pkg/services/ngalert/CHANGELOG.md @@ -52,6 +52,7 @@ Scopes must have an order to ensure consistency and ease of search, this helps u - [ENHANCEMENT] Scheduler: Drop ticks if rule evaluation is too slow and adds a metric grafana_alerting_schedule_rule_evaluations_missed_total to track missed evaluations per rule #48885 - [ENHANCEMENT] Ticker to tick at predictable time #50197 - [ENHANCEMENT] Migration: Don't stop the migration when failing to parse alert rule tags #51253 +- [ENHANCEMENT] Prevent evaluation if "for" shorter than "evaluate" #51797 ## 9.0.0 diff --git a/public/app/features/alerting/unified/components/rule-editor/GrafanaConditionEvalWarning.tsx b/public/app/features/alerting/unified/components/rule-editor/GrafanaConditionEvalWarning.tsx deleted file mode 100644 index 0de1c1d6122..00000000000 --- a/public/app/features/alerting/unified/components/rule-editor/GrafanaConditionEvalWarning.tsx +++ /dev/null @@ -1,34 +0,0 @@ -import { isEmpty } from 'lodash'; -import React, { FC } from 'react'; -import { useFormContext } from 'react-hook-form'; - -import { durationToMilliseconds, parseDuration } from '@grafana/data'; -import { Alert } from '@grafana/ui'; - -import { RuleFormValues } from '../../types/rule-form'; - -// a warning that will be shown if a problematic yet technically valid combination of "evaluate every" and "evaluate for" is enetered -export const GrafanaConditionEvalWarning: FC = () => { - const { watch } = useFormContext(); - const evaluateFor = watch('evaluateFor'); - const evaluateEvery = watch('evaluateEvery'); - if (evaluateFor === '0') { - return null; - } - const durationFor = parseDuration(evaluateFor); - const durationEvery = parseDuration(evaluateEvery); - if (isEmpty(durationFor) || isEmpty(durationEvery)) { - return null; - } - const millisFor = durationToMilliseconds(durationFor); - const millisEvery = durationToMilliseconds(durationEvery); - if (millisFor && millisEvery && millisFor <= millisEvery) { - return ( - - Setting a "for" duration that is less than or equal to the evaluation interval will result in the - evaluation interval being used to calculate when an alert that has stopped receiving data will be closed. - - ); - } - return null; -}; diff --git a/public/app/features/alerting/unified/components/rule-editor/GrafanaEvaluationBehavior.tsx b/public/app/features/alerting/unified/components/rule-editor/GrafanaEvaluationBehavior.tsx index 056c6f7fcb6..d696ca6a97b 100644 --- a/public/app/features/alerting/unified/components/rule-editor/GrafanaEvaluationBehavior.tsx +++ b/public/app/features/alerting/unified/components/rule-editor/GrafanaEvaluationBehavior.tsx @@ -10,19 +10,26 @@ import { positiveDurationValidationPattern, durationValidationPattern } from '.. import { CollapseToggle } from '../CollapseToggle'; import { GrafanaAlertStatePicker } from './GrafanaAlertStatePicker'; -import { GrafanaConditionEvalWarning } from './GrafanaConditionEvalWarning'; import { PreviewRule } from './PreviewRule'; import { RuleEditorSection } from './RuleEditorSection'; const MIN_TIME_RANGE_STEP_S = 10; // 10 seconds -const forValidationOptions: RegisterOptions = { +const forValidationOptions = (evaluateEvery: string): RegisterOptions => ({ required: { value: true, message: 'Required.', }, pattern: durationValidationPattern, -}; + validate: (value) => { + const evaluateEveryDuration = parseDuration(evaluateEvery); + const forDuration = parseDuration(value); + const millisFor = durationToMilliseconds(forDuration); + const millisEvery = durationToMilliseconds(evaluateEveryDuration); + + return millisFor >= millisEvery ? true : 'For must be greater than or equal to evaluate every.'; + }, +}); const evaluateEveryValidationOptions: RegisterOptions = { required: { @@ -51,6 +58,7 @@ export const GrafanaEvaluationBehavior: FC = () => { const { register, formState: { errors }, + watch, } = useFormContext(); const evaluateEveryId = 'eval-every-input'; @@ -85,11 +93,14 @@ export const GrafanaEvaluationBehavior: FC = () => { invalid={!!errors.evaluateFor?.message} validationMessageHorizontalOverflow={true} > - +
- setShowErrorHandling(!collapsed)} From a0f96ed4e150a59e254538cbf1458b991f24076c Mon Sep 17 00:00:00 2001 From: Joe Blubaugh Date: Tue, 19 Jul 2022 16:42:48 +0800 Subject: [PATCH 054/116] SQLStore: Support Upserting multiple rows. (#52228) This will be used to reduce write load when the alerting system writes a large number of events. --- pkg/services/sqlstore/migrator/dialect.go | 1 + .../sqlstore/migrator/mysql_dialect.go | 26 ++++++- .../sqlstore/migrator/postgres_dialect.go | 25 ++++++- .../sqlstore/migrator/sqlite_dialect.go | 25 ++++++- pkg/services/sqlstore/migrator/upsert_test.go | 74 +++++++++++++++++++ 5 files changed, 141 insertions(+), 10 deletions(-) create mode 100644 pkg/services/sqlstore/migrator/upsert_test.go diff --git a/pkg/services/sqlstore/migrator/dialect.go b/pkg/services/sqlstore/migrator/dialect.go index 740704c6f40..fa3c8587ec5 100644 --- a/pkg/services/sqlstore/migrator/dialect.go +++ b/pkg/services/sqlstore/migrator/dialect.go @@ -49,6 +49,7 @@ type Dialect interface { ColumnCheckSQL(tableName, columnName string) (string, []interface{}) // UpsertSQL returns the upsert sql statement for a dialect UpsertSQL(tableName string, keyCols, updateCols []string) string + UpsertMultipleSQL(tableName string, keyCols, updateCols []string, count int) (string, error) ColString(*Column) string ColStringNoPk(*Column) string diff --git a/pkg/services/sqlstore/migrator/mysql_dialect.go b/pkg/services/sqlstore/migrator/mysql_dialect.go index aefdc81c67c..425928d05da 100644 --- a/pkg/services/sqlstore/migrator/mysql_dialect.go +++ b/pkg/services/sqlstore/migrator/mysql_dialect.go @@ -210,8 +210,16 @@ func (db *MySQLDialect) IsDeadlock(err error) bool { return db.isThisError(err, mysqlerr.ER_LOCK_DEADLOCK) } -// UpsertSQL returns the upsert sql statement for PostgreSQL dialect +// UpsertSQL returns the upsert sql statement for MySQL dialect func (db *MySQLDialect) UpsertSQL(tableName string, keyCols, updateCols []string) string { + q, _ := db.UpsertMultipleSQL(tableName, keyCols, updateCols, 1) + return q +} + +func (db *MySQLDialect) UpsertMultipleSQL(tableName string, keyCols, updateCols []string, count int) (string, error) { + if count < 1 { + return "", fmt.Errorf("upsert statement must have count >= 1. Got %v", count) + } columnsStr := strings.Builder{} colPlaceHoldersStr := strings.Builder{} setStr := strings.Builder{} @@ -226,13 +234,23 @@ func (db *MySQLDialect) UpsertSQL(tableName string, keyCols, updateCols []string setStr.WriteString(fmt.Sprintf("%s=VALUES(%s)%s", db.Quote(c), db.Quote(c), separator)) } - s := fmt.Sprintf(`INSERT INTO %s (%s) VALUES (%s) ON DUPLICATE KEY UPDATE %s`, + valuesStr := strings.Builder{} + separator = ", " + colPlaceHolders := colPlaceHoldersStr.String() + for i := 0; i < count; i++ { + if i == count-1 { + separator = "" + } + valuesStr.WriteString(fmt.Sprintf("(%s)%s", colPlaceHolders, separator)) + } + + s := fmt.Sprintf(`INSERT INTO %s (%s) VALUES %s ON DUPLICATE KEY UPDATE %s`, tableName, columnsStr.String(), - colPlaceHoldersStr.String(), + valuesStr.String(), setStr.String(), ) - return s + return s, nil } func (db *MySQLDialect) Lock(cfg LockCfg) error { diff --git a/pkg/services/sqlstore/migrator/postgres_dialect.go b/pkg/services/sqlstore/migrator/postgres_dialect.go index dbe661d0be8..b2b0e53884c 100644 --- a/pkg/services/sqlstore/migrator/postgres_dialect.go +++ b/pkg/services/sqlstore/migrator/postgres_dialect.go @@ -224,6 +224,15 @@ func (db *PostgresDialect) PostInsertId(table string, sess *xorm.Session) error // UpsertSQL returns the upsert sql statement for PostgreSQL dialect func (db *PostgresDialect) UpsertSQL(tableName string, keyCols, updateCols []string) string { + str, _ := db.UpsertMultipleSQL(tableName, keyCols, updateCols, 1) + return str +} + +// UpsertMultipleSQL returns the upsert sql statement for PostgreSQL dialect +func (db *PostgresDialect) UpsertMultipleSQL(tableName string, keyCols, updateCols []string, count int) (string, error) { + if count < 1 { + return "", fmt.Errorf("upsert statement must have count >= 1. Got %v", count) + } columnsStr := strings.Builder{} onConflictStr := strings.Builder{} colPlaceHoldersStr := strings.Builder{} @@ -249,14 +258,24 @@ func (db *PostgresDialect) UpsertSQL(tableName string, keyCols, updateCols []str onConflictStr.WriteString(fmt.Sprintf("%s%s", db.Quote(c), separatorVar)) } - s := fmt.Sprintf(`INSERT INTO %s (%s) VALUES (%s) ON CONFLICT(%s) DO UPDATE SET %s`, + valuesStr := strings.Builder{} + separatorVar = separator + colPlaceHolders := colPlaceHoldersStr.String() + for i := 0; i < count; i++ { + if i == count-1 { + separatorVar = "" + } + valuesStr.WriteString(fmt.Sprintf("(%s)%s", colPlaceHolders, separatorVar)) + } + + s := fmt.Sprintf(`INSERT INTO %s (%s) VALUES %s ON CONFLICT(%s) DO UPDATE SET %s`, tableName, columnsStr.String(), - colPlaceHoldersStr.String(), + valuesStr.String(), onConflictStr.String(), setStr.String(), ) - return s + return s, nil } func (db *PostgresDialect) Lock(cfg LockCfg) error { diff --git a/pkg/services/sqlstore/migrator/sqlite_dialect.go b/pkg/services/sqlstore/migrator/sqlite_dialect.go index 2ea68656af4..25d3976a56c 100644 --- a/pkg/services/sqlstore/migrator/sqlite_dialect.go +++ b/pkg/services/sqlstore/migrator/sqlite_dialect.go @@ -151,6 +151,15 @@ func (db *SQLite3) IsDeadlock(err error) bool { // UpsertSQL returns the upsert sql statement for SQLite dialect func (db *SQLite3) UpsertSQL(tableName string, keyCols, updateCols []string) string { + str, _ := db.UpsertMultipleSQL(tableName, keyCols, updateCols, 1) + return str +} + +// UpsertMultipleSQL returns the upsert sql statement for PostgreSQL dialect +func (db *SQLite3) UpsertMultipleSQL(tableName string, keyCols, updateCols []string, count int) (string, error) { + if count < 1 { + return "", fmt.Errorf("upsert statement must have count >= 1. Got %v", count) + } columnsStr := strings.Builder{} onConflictStr := strings.Builder{} colPlaceHoldersStr := strings.Builder{} @@ -176,12 +185,22 @@ func (db *SQLite3) UpsertSQL(tableName string, keyCols, updateCols []string) str onConflictStr.WriteString(fmt.Sprintf("%s%s", db.Quote(c), separatorVar)) } - s := fmt.Sprintf(`INSERT INTO %s (%s) VALUES (%s) ON CONFLICT(%s) DO UPDATE SET %s`, + valuesStr := strings.Builder{} + separatorVar = separator + colPlaceHolders := colPlaceHoldersStr.String() + for i := 0; i < count; i++ { + if i == count-1 { + separatorVar = "" + } + valuesStr.WriteString(fmt.Sprintf("(%s)%s", colPlaceHolders, separatorVar)) + } + + s := fmt.Sprintf(`INSERT INTO %s (%s) VALUES %s ON CONFLICT(%s) DO UPDATE SET %s`, tableName, columnsStr.String(), - colPlaceHoldersStr.String(), + valuesStr.String(), onConflictStr.String(), setStr.String(), ) - return s + return s, nil } diff --git a/pkg/services/sqlstore/migrator/upsert_test.go b/pkg/services/sqlstore/migrator/upsert_test.go new file mode 100644 index 00000000000..9c39673d8d6 --- /dev/null +++ b/pkg/services/sqlstore/migrator/upsert_test.go @@ -0,0 +1,74 @@ +package migrator + +import ( + "testing" + + "github.com/stretchr/testify/require" +) + +func TestUpsertMultiple(t *testing.T) { + tests := []struct { + name string + keyCols []string + updateCols []string + count int + expectedErr bool + expectedPostgresQuery string + expectedMySQLQuery string + expectedSQLiteQuery string + }{ + { + "upsert one", + []string{"key1", "key2"}, + []string{"key1", "key2", "val1", "val2"}, + 1, + false, + "INSERT INTO test_table (\"key1\", \"key2\", \"val1\", \"val2\") VALUES (?, ?, ?, ?) ON CONFLICT(\"key1\", \"key2\") DO UPDATE SET \"key1\"=excluded.\"key1\", \"key2\"=excluded.\"key2\", \"val1\"=excluded.\"val1\", \"val2\"=excluded.\"val2\"", + "INSERT INTO test_table (`key1`, `key2`, `val1`, `val2`) VALUES (?, ?, ?, ?) ON DUPLICATE KEY UPDATE `key1`=VALUES(`key1`), `key2`=VALUES(`key2`), `val1`=VALUES(`val1`), `val2`=VALUES(`val2`)", + "INSERT INTO test_table (`key1`, `key2`, `val1`, `val2`) VALUES (?, ?, ?, ?) ON CONFLICT(`key1`, `key2`) DO UPDATE SET `key1`=excluded.`key1`, `key2`=excluded.`key2`, `val1`=excluded.`val1`, `val2`=excluded.`val2`", + }, + { + "upsert two", + []string{"key1", "key2"}, + []string{"key1", "key2", "val1", "val2"}, + 2, + false, + "INSERT INTO test_table (\"key1\", \"key2\", \"val1\", \"val2\") VALUES (?, ?, ?, ?), (?, ?, ?, ?) ON CONFLICT(\"key1\", \"key2\") DO UPDATE SET \"key1\"=excluded.\"key1\", \"key2\"=excluded.\"key2\", \"val1\"=excluded.\"val1\", \"val2\"=excluded.\"val2\"", + "INSERT INTO test_table (`key1`, `key2`, `val1`, `val2`) VALUES (?, ?, ?, ?), (?, ?, ?, ?) ON DUPLICATE KEY UPDATE `key1`=VALUES(`key1`), `key2`=VALUES(`key2`), `val1`=VALUES(`val1`), `val2`=VALUES(`val2`)", + "INSERT INTO test_table (`key1`, `key2`, `val1`, `val2`) VALUES (?, ?, ?, ?), (?, ?, ?, ?) ON CONFLICT(`key1`, `key2`) DO UPDATE SET `key1`=excluded.`key1`, `key2`=excluded.`key2`, `val1`=excluded.`val1`, `val2`=excluded.`val2`", + }, + { + "count error", + []string{"key1", "key2"}, + []string{"key1", "key2", "val1", "val2"}, + 0, + true, + "", + "", + "", + }, + } + + for _, tc := range tests { + t.Run(tc.name, func(t *testing.T) { + var db Dialect + db = &PostgresDialect{} + q, err := db.UpsertMultipleSQL("test_table", tc.keyCols, tc.updateCols, tc.count) + + require.True(t, (err != nil) == tc.expectedErr) + require.Equal(t, tc.expectedPostgresQuery, q, "Postgres query incorrect") + + db = &MySQLDialect{} + q, err = db.UpsertMultipleSQL("test_table", tc.keyCols, tc.updateCols, tc.count) + + require.True(t, (err != nil) == tc.expectedErr) + require.Equal(t, tc.expectedMySQLQuery, q, "MySQL query incorrect") + + db = &SQLite3{} + q, err = db.UpsertMultipleSQL("test_table", tc.keyCols, tc.updateCols, tc.count) + + require.True(t, (err != nil) == tc.expectedErr) + require.Equal(t, tc.expectedSQLiteQuery, q, "SQLite query incorrect") + }) + } +} From e6b9ded949bb11316f36efcd1981ea508e1845ca Mon Sep 17 00:00:00 2001 From: Sofia Papagiannaki <1632407+papagian@users.noreply.github.com> Date: Tue, 19 Jul 2022 12:52:51 +0300 Subject: [PATCH 055/116] API: Add service account routes to the swagger (#52398) * API: Add service account routes to the swagger --- .../developers/http_api/serviceaccount.md | 34 +- .../definitions/service_account_tokens.go | 90 +++ pkg/api/docs/definitions/service_accounts.go | 154 ++++ pkg/api/docs/tags.json | 4 + pkg/api/dtos/apikey.go | 8 +- pkg/services/serviceaccounts/api/token.go | 22 +- pkg/services/serviceaccounts/models.go | 69 +- public/api-merged.json | 732 +++++++++++++++++- public/api-spec.json | 728 ++++++++++++++++- 9 files changed, 1772 insertions(+), 69 deletions(-) create mode 100644 pkg/api/docs/definitions/service_account_tokens.go create mode 100644 pkg/api/docs/definitions/service_accounts.go diff --git a/docs/sources/developers/http_api/serviceaccount.md b/docs/sources/developers/http_api/serviceaccount.md index a2c5d6fbfae..41ad0787b0f 100644 --- a/docs/sources/developers/http_api/serviceaccount.md +++ b/docs/sources/developers/http_api/serviceaccount.md @@ -115,7 +115,7 @@ Requires basic authentication and that the authenticated user is a Grafana Admin **Example Response**: ```http -HTTP/1.1 200 +HTTP/1.1 201 Content-Type: application/json { @@ -140,9 +140,9 @@ Content-Type: application/json See note in the [introduction]({{< ref "#service-account-api" >}}) for an explanation. -| Action | Scope | -| -------------------- | ------------------ | -| serviceaccounts:read | serviceaccounts:\* | +| Action | Scope | +| -------------------- | -------------------- | +| serviceaccounts:read | serviceaccounts:id:1 | **Example Request**: @@ -183,14 +183,14 @@ Content-Type: application/json See note in the [introduction]({{< ref "#service-account-api" >}}) for an explanation. -| Action | Scope | -| --------------------- | ------------------ | -| serviceaccounts:write | serviceaccounts:\* | +| Action | Scope | +| --------------------- | -------------------- | +| serviceaccounts:write | serviceaccounts:id:1 | **Example Request**: ```http -PUT /api/serviceaccounts/2 HTTP/1.1 +PATCH /api/serviceaccounts/2 HTTP/1.1 Accept: application/json Content-Type: application/json Authorization: Basic YWRtaW46YWRtaW4= @@ -235,9 +235,9 @@ Content-Type: application/json See note in the [introduction]({{< ref "#service-account-api" >}}) for an explanation. -| Action | Scope | -| -------------------- | ------------------ | -| serviceaccounts:read | serviceaccounts:\* | +| Action | Scope | +| -------------------- | -------------------- | +| serviceaccounts:read | serviceaccounts:id:1 | **Example Request**: @@ -277,9 +277,9 @@ Content-Type: application/json See note in the [introduction]({{< ref "#service-account-api" >}}) for an explanation. -| Action | Scope | -| --------------------- | ------------------ | -| serviceaccounts:write | serviceaccounts:\* | +| Action | Scope | +| --------------------- | -------------------- | +| serviceaccounts:write | serviceaccounts:id:1 | **Example Request**: @@ -318,9 +318,9 @@ Content-Type: application/json See note in the [introduction]({{< ref "#service-account-api" >}}) for an explanation. -| Action | Scope | -| --------------------- | ------------------ | -| serviceaccounts:write | serviceaccounts:\* | +| Action | Scope | +| --------------------- | -------------------- | +| serviceaccounts:write | serviceaccounts:id:1 | **Example Request**: diff --git a/pkg/api/docs/definitions/service_account_tokens.go b/pkg/api/docs/definitions/service_account_tokens.go new file mode 100644 index 00000000000..a7fc74e0134 --- /dev/null +++ b/pkg/api/docs/definitions/service_account_tokens.go @@ -0,0 +1,90 @@ +package definitions + +import ( + "github.com/grafana/grafana/pkg/api/dtos" + "github.com/grafana/grafana/pkg/services/serviceaccounts" + "github.com/grafana/grafana/pkg/services/serviceaccounts/api" +) + +// swagger:route GET /serviceaccounts/{serviceAccountId}/tokens service_accounts listTokens +// +// Get service account tokens +// +// Required permissions (See note in the [introduction](https://grafana.com/docs/grafana/latest/developers/http_api/serviceaccount/#service-account-api) for an explanation): +// action: `serviceaccounts:read` scope: `global:serviceaccounts:id:1` (single service account) +// +// Requires basic authentication and that the authenticated user is a Grafana Admin. +// +// Responses: +// 200: listTokensResponse +// 400: badRequestError +// 401: unauthorisedError +// 403: forbiddenError +// 500: internalServerError + +// swagger:route POST /serviceaccounts/{serviceAccountId}/tokens service_accounts createToken +// +// Create service account tokens +// +// Required permissions (See note in the [introduction](https://grafana.com/docs/grafana/latest/developers/http_api/serviceaccount/#service-account-api) for an explanation): +// action: `serviceaccounts:write` scope: `serviceaccounts:id:1` (single service account) +// +// Responses: +// 200: createTokenResponse +// 400: badRequestError +// 401: unauthorisedError +// 403: forbiddenError +// 404: notFoundError +// 409: conflictError +// 500: internalServerError + +// swagger:route DELETE /serviceaccounts/{serviceAccountId}/tokens/{tokenId} service_accounts deleteToken +// +// Delete service account tokens +// +// Required permissions (See note in the [introduction](https://grafana.com/docs/grafana/latest/developers/http_api/serviceaccount/#service-account-api) for an explanation): +// action: `serviceaccounts:write` scope: `serviceaccounts:id:1` (single service account) +// +// Requires basic authentication and that the authenticated user is a Grafana Admin. +// +// Responses: +// 200: okResponse +// 400: badRequestError +// 401: unauthorisedError +// 403: forbiddenError +// 404: notFoundError +// 500: internalServerError + +// swagger:parameters listTokens +type ListTokensParams struct { + // in:path + ServiceAccountId int64 `json:"serviceAccountId"` +} + +// swagger:parameters createToken +type CreateTokenParams struct { + // in:path + ServiceAccountId int64 `json:"serviceAccountId"` + // in:body + Body serviceaccounts.AddServiceAccountTokenCommand +} + +// swagger:parameters deleteToken +type DeleteTokenParams struct { + // in:path + TokenId int64 `json:"tokenId"` + // in:path + ServiceAccountId int64 `json:"serviceAccountId"` +} + +// swagger:response listTokensResponse +type ListTokensResponse struct { + // in:body + Body *api.TokenDTO +} + +// swagger:response createTokenResponse +type CreateTokenResponse struct { + // in:body + Body *dtos.NewApiKeyResult +} diff --git a/pkg/api/docs/definitions/service_accounts.go b/pkg/api/docs/definitions/service_accounts.go new file mode 100644 index 00000000000..52455fe3569 --- /dev/null +++ b/pkg/api/docs/definitions/service_accounts.go @@ -0,0 +1,154 @@ +package definitions + +import "github.com/grafana/grafana/pkg/services/serviceaccounts" + +// swagger:route GET /serviceaccounts/search service_accounts searchOrgServiceAccountsWithPaging +// +// Search service accounts with Paging +// +// Required permissions (See note in the [introduction](https://grafana.com/docs/grafana/latest/developers/http_api/serviceaccount/#service-account-api) for an explanation): +// action: `serviceaccounts:read` scope: `serviceaccounts:*` +// +// Responses: +// 200: searchOrgServiceAccountsWithPagingResponse +// 401: unauthorisedError +// 403: forbiddenError +// 500: internalServerError + +// swagger:route POST /serviceaccounts service_accounts createServiceAccount +// +// Create service account +// +// Required permissions (See note in the [introduction](https://grafana.com/docs/grafana/latest/developers/http_api/serviceaccount/#service-account-api) for an explanation): +// action: `serviceaccounts:write` scope: `serviceaccounts:*` +// +// Requires basic authentication and that the authenticated user is a Grafana Admin. +// +// Responses: +// 201: createServiceAccountResponse +// 400: badRequestError +// 401: unauthorisedError +// 403: forbiddenError +// 500: internalServerError + +// swagger:route GET /serviceaccounts/{serviceAccountId} service_accounts retrieveServiceAccount +// +// Get single serviceaccount by Id +// +// Required permissions (See note in the [introduction](https://grafana.com/docs/grafana/latest/developers/http_api/serviceaccount/#service-account-api) for an explanation): +// action: `serviceaccounts:read` scope: `serviceaccounts:id:1` (single service account) +// +// Responses: +// 200: retrieveServiceAccountResponse +// 400: badRequestError +// 401: unauthorisedError +// 403: forbiddenError +// 404: notFoundError +// 500: internalServerError + +// swagger:route PATCH /serviceaccounts/{serviceAccountId} service_accounts updateServiceAccount +// +// Update service account +// +// Required permissions (See note in the [introduction](https://grafana.com/docs/grafana/latest/developers/http_api/serviceaccount/#service-account-api) for an explanation): +// action: `serviceaccounts:write` scope: `serviceaccounts:id:1` (single service account) +// +// Responses: +// 200: updateServiceAccountResponse +// 400: badRequestError +// 401: unauthorisedError +// 403: forbiddenError +// 404: notFoundError +// 500: internalServerError + +// swagger:route DELETE /serviceaccounts/{serviceAccountId} service_accounts deleteServiceAccount +// +// Delete service account +// +// Required permissions (See note in the [introduction](https://grafana.com/docs/grafana/latest/developers/http_api/serviceaccount/#service-account-api) for an explanation): +// action: `serviceaccounts:delete` scope: `serviceaccounts:id:1` (single service account) +// +// Responses: +// 200: okResponse +// 400: badRequestError +// 401: unauthorisedError +// 403: forbiddenError +// 500: internalServerError + +// swagger:parameters searchOrgServiceAccountsWithPaging +type SearchOrgServiceAccountsWithPagingParams struct { + // in:query + // required:false + Disabled bool `jsson:"disabled"` + // in:query + // required:false + ExpiredTokens bool `json:"expiredTokens"` + // It will return results where the query value is contained in one of the name. + // Query values with spaces need to be URL encoded. + // in:query + // required:false + Query string `json:"query"` + // The default value is 1000. + // in:query + // required:false + PerPage int `json:"perpage"` + // The default value is 1. + // in:query + // required:false + Page int `json:"page"` +} + +// swagger:parameters createServiceAccount +type CreateServiceAccountParams struct { + //in:body + Body serviceaccounts.CreateServiceAccountForm +} + +// swagger:parameters retrieveServiceAccount +type RetrieveServiceAccountParams struct { + // in:path + ServiceAccountId int64 `json:"serviceAccountId"` +} + +// swagger:parameters updateServiceAccount +type UpdateServiceAccountParams struct { + // in:path + ServiceAccountId int64 `json:"serviceAccountId"` + // in:body + Body serviceaccounts.UpdateServiceAccountForm +} + +// swagger:parameters deleteServiceAccount +type DeleteServiceAccountParams struct { + // in:path + ServiceAccountId int64 `json:"serviceAccountId"` +} + +// swagger:response searchOrgServiceAccountsWithPagingResponse +type SearchOrgServiceAccountsWithPagingResponse struct { + // in:body + Body *serviceaccounts.SearchServiceAccountsResult +} + +// swagger:response createServiceAccountResponse +type CreateServiceAccountResponse struct { + // in:body + Body *serviceaccounts.ServiceAccountDTO +} + +// swagger:response retrieveServiceAccountResponse +type RetrieveServiceAccountResponse struct { + // in:body + Body *serviceaccounts.ServiceAccountDTO +} + +// swagger:response updateServiceAccountResponse +type UpdateServiceAccountResponse struct { + // in:body + Body struct { + Message string `json:"message"` + ID int64 `json:"id"` + Name string `json:"name"` + ServiceAccount *serviceaccounts.ServiceAccountProfileDTO `json:"serviceaccount"` + } +} diff --git a/pkg/api/docs/tags.json b/pkg/api/docs/tags.json index b46177db930..38a36cb79bc 100644 --- a/pkg/api/docs/tags.json +++ b/pkg/api/docs/tags.json @@ -83,6 +83,10 @@ { "name": "prometheus", "description": "Grafana Alerting Prometheus-compatible endpoints" + }, + { + "name": "service_accounts", + "description": "If you are running Grafana Enterprise, for some endpoints you'll need to have specific permissions. Refer to [Role-based access control permissions](https://grafana.com/docs/grafana/latest/administration/roles-and-permissions/access-control/custom-role-actions-scopes/) for more information." } ] } diff --git a/pkg/api/dtos/apikey.go b/pkg/api/dtos/apikey.go index d96a5201679..6353deccd60 100644 --- a/pkg/api/dtos/apikey.go +++ b/pkg/api/dtos/apikey.go @@ -7,10 +7,14 @@ import ( "github.com/grafana/grafana/pkg/services/accesscontrol" ) +// swagger:model type NewApiKeyResult struct { - ID int64 `json:"id"` + // example: 1 + ID int64 `json:"id"` + // example: grafana Name string `json:"name"` - Key string `json:"key"` + // example: glsa_yscW25imSKJIuav8zF37RZmnbiDvB05G_fcaaf58a + Key string `json:"key"` } type ApiKeyDTO struct { diff --git a/pkg/services/serviceaccounts/api/token.go b/pkg/services/serviceaccounts/api/token.go index 263e16b6e5f..334b874ffe5 100644 --- a/pkg/services/serviceaccounts/api/token.go +++ b/pkg/services/serviceaccounts/api/token.go @@ -20,14 +20,22 @@ const ( ServiceID = "sa" ) +// swagger:model type TokenDTO struct { - Id int64 `json:"id"` - Name string `json:"name"` - Created *time.Time `json:"created"` - LastUsedAt *time.Time `json:"lastUsedAt"` - Expiration *time.Time `json:"expiration"` - SecondsUntilExpiration *float64 `json:"secondsUntilExpiration"` - HasExpired bool `json:"hasExpired"` + // example: 1 + Id int64 `json:"id"` + // example: grafana + Name string `json:"name"` + // example: 2022-03-23T10:31:02Z + Created *time.Time `json:"created"` + // example: 2022-03-23T10:31:02Z + LastUsedAt *time.Time `json:"lastUsedAt"` + // example: 2022-03-23T10:31:02Z + Expiration *time.Time `json:"expiration"` + // example: 0 + SecondsUntilExpiration *float64 `json:"secondsUntilExpiration"` + // example: false + HasExpired bool `json:"hasExpired"` } func hasExpired(expiration *int64) bool { diff --git a/pkg/services/serviceaccounts/models.go b/pkg/services/serviceaccounts/models.go index 1eab5a23b68..3a93348e1b8 100644 --- a/pkg/services/serviceaccounts/models.go +++ b/pkg/services/serviceaccounts/models.go @@ -25,27 +25,41 @@ type ServiceAccount struct { Id int64 } +// swagger:model type CreateServiceAccountForm struct { - Name string `json:"name" binding:"Required"` - Role *models.RoleType `json:"role"` - IsDisabled *bool `json:"isDisabled"` + // example: grafana + Name string `json:"name" binding:"Required"` + // example: Admin + Role *models.RoleType `json:"role"` + // example: false + IsDisabled *bool `json:"isDisabled"` } +// swagger:model type UpdateServiceAccountForm struct { Name *string `json:"name"` Role *models.RoleType `json:"role"` IsDisabled *bool `json:"isDisabled"` } +// swagger: model type ServiceAccountDTO struct { - Id int64 `json:"id" xorm:"user_id"` - Name string `json:"name" xorm:"name"` - Login string `json:"login" xorm:"login"` - OrgId int64 `json:"orgId" xorm:"org_id"` - IsDisabled bool `json:"isDisabled" xorm:"is_disabled"` - Role string `json:"role" xorm:"role"` - Tokens int64 `json:"tokens"` - AvatarUrl string `json:"avatarUrl"` + Id int64 `json:"id" xorm:"user_id"` + // example: grafana + Name string `json:"name" xorm:"name"` + // example: sa-grafana + Login string `json:"login" xorm:"login"` + // example: 1 + OrgId int64 `json:"orgId" xorm:"org_id"` + // example: false + IsDisabled bool `json:"isDisabled" xorm:"is_disabled"` + // example: Viewer + Role string `json:"role" xorm:"role"` + // example: 0 + Tokens int64 `json:"tokens"` + // example: /avatar/85ec38023d90823d3e5b43ef35646af9 + AvatarUrl string `json:"avatarUrl"` + // example: {"serviceaccounts:delete": true, "serviceaccounts:read": true, "serviceaccounts:write": true} AccessControl map[string]bool `json:"accessControl,omitempty"` } @@ -57,23 +71,38 @@ type AddServiceAccountTokenCommand struct { Result *models.ApiKey `json:"-"` } +// swagger: model type SearchServiceAccountsResult struct { + // It can be used for pagination of the user list + // E.g. if totalCount is equal to 100 users and + // the perpage parameter is set to 10 then there are 10 pages of users. TotalCount int64 `json:"totalCount"` ServiceAccounts []*ServiceAccountDTO `json:"serviceAccounts"` Page int `json:"page"` PerPage int `json:"perPage"` } +// swagger:model type ServiceAccountProfileDTO struct { - Id int64 `json:"id" xorm:"user_id"` - Name string `json:"name" xorm:"name"` - Login string `json:"login" xorm:"login"` - OrgId int64 `json:"orgId" xorm:"org_id"` - IsDisabled bool `json:"isDisabled" xorm:"is_disabled"` - Created time.Time `json:"createdAt" xorm:"created"` - Updated time.Time `json:"updatedAt" xorm:"updated"` - AvatarUrl string `json:"avatarUrl" xorm:"-"` - Role string `json:"role" xorm:"role"` + // example: 2 + Id int64 `json:"id" xorm:"user_id"` + // example: test + Name string `json:"name" xorm:"name"` + // example: sa-grafana + Login string `json:"login" xorm:"login"` + // example: 1 + OrgId int64 `json:"orgId" xorm:"org_id"` + // example: false + IsDisabled bool `json:"isDisabled" xorm:"is_disabled"` + // example: 2022-03-21T14:35:33Z + Created time.Time `json:"createdAt" xorm:"created"` + // example: 2022-03-21T14:35:33Z + Updated time.Time `json:"updatedAt" xorm:"updated"` + // example: /avatar/8ea890a677d6a223c591a1beea6ea9d2 + AvatarUrl string `json:"avatarUrl" xorm:"-"` + // example: Editor + Role string `json:"role" xorm:"role"` + // example: [] Teams []string `json:"teams" xorm:"-"` Tokens int64 `json:"tokens,omitempty"` AccessControl map[string]bool `json:"accessControl,omitempty" xorm:"-"` diff --git a/public/api-merged.json b/public/api-merged.json index 98ab31b0a08..246dc26eb01 100644 --- a/public/api-merged.json +++ b/public/api-merged.json @@ -119,6 +119,13 @@ "tags": ["access_control", "enterprise"], "summary": "Get all roles.", "operationId": "getAllRoles", + "parameters": [ + { + "type": "boolean", + "name": "delegatable", + "in": "query" + } + ], "responses": { "200": { "$ref": "#/responses/getAllRolesResponse" @@ -142,7 +149,7 @@ "in": "body", "required": true, "schema": { - "$ref": "#/definitions/CreateRoleWithPermissionsCommand" + "$ref": "#/definitions/CreateRoleForm" } } ], @@ -233,6 +240,16 @@ "summary": "Delete a custom role.", "operationId": "deleteCustomRole", "parameters": [ + { + "type": "boolean", + "name": "force", + "in": "query" + }, + { + "type": "boolean", + "name": "global", + "in": "query" + }, { "type": "string", "name": "roleUID", @@ -7073,6 +7090,328 @@ } } }, + "/serviceaccounts": { + "post": { + "description": "Required permissions (See note in the [introduction](https://grafana.com/docs/grafana/latest/developers/http_api/serviceaccount/#service-account-api) for an explanation):\naction: `serviceaccounts:write` scope: `serviceaccounts:*`\n\nRequires basic authentication and that the authenticated user is a Grafana Admin.", + "tags": ["service_accounts"], + "summary": "Create service account", + "operationId": "createServiceAccount", + "parameters": [ + { + "name": "Body", + "in": "body", + "schema": { + "$ref": "#/definitions/CreateServiceAccountForm" + } + } + ], + "responses": { + "201": { + "$ref": "#/responses/createServiceAccountResponse" + }, + "400": { + "$ref": "#/responses/badRequestError" + }, + "401": { + "$ref": "#/responses/unauthorisedError" + }, + "403": { + "$ref": "#/responses/forbiddenError" + }, + "500": { + "$ref": "#/responses/internalServerError" + } + } + } + }, + "/serviceaccounts/search": { + "get": { + "description": "Required permissions (See note in the [introduction](https://grafana.com/docs/grafana/latest/developers/http_api/serviceaccount/#service-account-api) for an explanation):\naction: `serviceaccounts:read` scope: `serviceaccounts:*`", + "tags": ["service_accounts"], + "summary": "Search service accounts with Paging", + "operationId": "searchOrgServiceAccountsWithPaging", + "parameters": [ + { + "type": "boolean", + "name": "Disabled", + "in": "query" + }, + { + "type": "boolean", + "name": "expiredTokens", + "in": "query" + }, + { + "type": "string", + "description": "It will return results where the query value is contained in one of the name.\nQuery values with spaces need to be URL encoded.", + "name": "query", + "in": "query" + }, + { + "type": "integer", + "format": "int64", + "description": "The default value is 1000.", + "name": "perpage", + "in": "query" + }, + { + "type": "integer", + "format": "int64", + "description": "The default value is 1.", + "name": "page", + "in": "query" + } + ], + "responses": { + "200": { + "$ref": "#/responses/searchOrgServiceAccountsWithPagingResponse" + }, + "401": { + "$ref": "#/responses/unauthorisedError" + }, + "403": { + "$ref": "#/responses/forbiddenError" + }, + "500": { + "$ref": "#/responses/internalServerError" + } + } + } + }, + "/serviceaccounts/{serviceAccountId}": { + "get": { + "description": "Required permissions (See note in the [introduction](https://grafana.com/docs/grafana/latest/developers/http_api/serviceaccount/#service-account-api) for an explanation):\naction: `serviceaccounts:read` scope: `serviceaccounts:id:1` (single service account)", + "tags": ["service_accounts"], + "summary": "Get single serviceaccount by Id", + "operationId": "retrieveServiceAccount", + "parameters": [ + { + "type": "integer", + "format": "int64", + "name": "serviceAccountId", + "in": "path", + "required": true + } + ], + "responses": { + "200": { + "$ref": "#/responses/retrieveServiceAccountResponse" + }, + "400": { + "$ref": "#/responses/badRequestError" + }, + "401": { + "$ref": "#/responses/unauthorisedError" + }, + "403": { + "$ref": "#/responses/forbiddenError" + }, + "404": { + "$ref": "#/responses/notFoundError" + }, + "500": { + "$ref": "#/responses/internalServerError" + } + } + }, + "delete": { + "description": "Required permissions (See note in the [introduction](https://grafana.com/docs/grafana/latest/developers/http_api/serviceaccount/#service-account-api) for an explanation):\naction: `serviceaccounts:delete` scope: `serviceaccounts:id:1` (single service account)", + "tags": ["service_accounts"], + "summary": "Delete service account", + "operationId": "deleteServiceAccount", + "parameters": [ + { + "type": "integer", + "format": "int64", + "name": "serviceAccountId", + "in": "path", + "required": true + } + ], + "responses": { + "200": { + "$ref": "#/responses/okResponse" + }, + "400": { + "$ref": "#/responses/badRequestError" + }, + "401": { + "$ref": "#/responses/unauthorisedError" + }, + "403": { + "$ref": "#/responses/forbiddenError" + }, + "500": { + "$ref": "#/responses/internalServerError" + } + } + }, + "patch": { + "description": "Required permissions (See note in the [introduction](https://grafana.com/docs/grafana/latest/developers/http_api/serviceaccount/#service-account-api) for an explanation):\naction: `serviceaccounts:write` scope: `serviceaccounts:id:1` (single service account)", + "tags": ["service_accounts"], + "summary": "Update service account", + "operationId": "updateServiceAccount", + "parameters": [ + { + "type": "integer", + "format": "int64", + "name": "serviceAccountId", + "in": "path", + "required": true + }, + { + "name": "Body", + "in": "body", + "schema": { + "$ref": "#/definitions/UpdateServiceAccountForm" + } + } + ], + "responses": { + "200": { + "$ref": "#/responses/updateServiceAccountResponse" + }, + "400": { + "$ref": "#/responses/badRequestError" + }, + "401": { + "$ref": "#/responses/unauthorisedError" + }, + "403": { + "$ref": "#/responses/forbiddenError" + }, + "404": { + "$ref": "#/responses/notFoundError" + }, + "500": { + "$ref": "#/responses/internalServerError" + } + } + } + }, + "/serviceaccounts/{serviceAccountId}/tokens": { + "get": { + "description": "Required permissions (See note in the [introduction](https://grafana.com/docs/grafana/latest/developers/http_api/serviceaccount/#service-account-api) for an explanation):\naction: `serviceaccounts:read` scope: `global:serviceaccounts:id:1` (single service account)\n\nRequires basic authentication and that the authenticated user is a Grafana Admin.", + "tags": ["service_accounts"], + "summary": "Get service account tokens", + "operationId": "listTokens", + "parameters": [ + { + "type": "integer", + "format": "int64", + "name": "serviceAccountId", + "in": "path", + "required": true + } + ], + "responses": { + "200": { + "$ref": "#/responses/listTokensResponse" + }, + "400": { + "$ref": "#/responses/badRequestError" + }, + "401": { + "$ref": "#/responses/unauthorisedError" + }, + "403": { + "$ref": "#/responses/forbiddenError" + }, + "500": { + "$ref": "#/responses/internalServerError" + } + } + }, + "post": { + "description": "Required permissions (See note in the [introduction](https://grafana.com/docs/grafana/latest/developers/http_api/serviceaccount/#service-account-api) for an explanation):\naction: `serviceaccounts:write` scope: `serviceaccounts:id:1` (single service account)", + "tags": ["service_accounts"], + "summary": "Create service account tokens", + "operationId": "createToken", + "parameters": [ + { + "type": "integer", + "format": "int64", + "name": "serviceAccountId", + "in": "path", + "required": true + }, + { + "name": "Body", + "in": "body", + "schema": { + "$ref": "#/definitions/AddServiceAccountTokenCommand" + } + } + ], + "responses": { + "200": { + "$ref": "#/responses/createTokenResponse" + }, + "400": { + "$ref": "#/responses/badRequestError" + }, + "401": { + "$ref": "#/responses/unauthorisedError" + }, + "403": { + "$ref": "#/responses/forbiddenError" + }, + "404": { + "$ref": "#/responses/notFoundError" + }, + "409": { + "$ref": "#/responses/conflictError" + }, + "500": { + "$ref": "#/responses/internalServerError" + } + } + } + }, + "/serviceaccounts/{serviceAccountId}/tokens/{tokenId}": { + "delete": { + "description": "Required permissions (See note in the [introduction](https://grafana.com/docs/grafana/latest/developers/http_api/serviceaccount/#service-account-api) for an explanation):\naction: `serviceaccounts:write` scope: `serviceaccounts:id:1` (single service account)\n\nRequires basic authentication and that the authenticated user is a Grafana Admin.", + "tags": ["service_accounts"], + "summary": "Delete service account tokens", + "operationId": "deleteToken", + "parameters": [ + { + "type": "integer", + "format": "int64", + "name": "tokenId", + "in": "path", + "required": true + }, + { + "type": "integer", + "format": "int64", + "name": "serviceAccountId", + "in": "path", + "required": true + } + ], + "responses": { + "200": { + "$ref": "#/responses/okResponse" + }, + "400": { + "$ref": "#/responses/badRequestError" + }, + "401": { + "$ref": "#/responses/unauthorisedError" + }, + "403": { + "$ref": "#/responses/forbiddenError" + }, + "404": { + "$ref": "#/responses/notFoundError" + }, + "500": { + "$ref": "#/responses/internalServerError" + } + } + } + }, "/snapshot/shared-options": { "get": { "tags": ["snapshots"], @@ -7374,16 +7713,15 @@ "operationId": "removeTeamGroupApi", "parameters": [ { - "type": "integer", - "format": "int64", - "name": "teamId", + "type": "string", + "name": "groupId", "in": "path", "required": true }, { "type": "integer", "format": "int64", - "name": "groupId", + "name": "teamId", "in": "path", "required": true } @@ -9169,6 +9507,18 @@ } } }, + "AddServiceAccountTokenCommand": { + "type": "object", + "properties": { + "name": { + "type": "string" + }, + "secondsToLive": { + "type": "integer", + "format": "int64" + } + } + }, "AddTeamMemberCommand": { "type": "object", "properties": { @@ -10319,7 +10669,7 @@ } } }, - "CreateRoleWithPermissionsCommand": { + "CreateRoleForm": { "type": "object", "properties": { "description": { @@ -10328,6 +10678,9 @@ "displayName": { "type": "string" }, + "global": { + "type": "boolean" + }, "group": { "type": "string" }, @@ -10352,6 +10705,24 @@ } } }, + "CreateServiceAccountForm": { + "type": "object", + "properties": { + "isDisabled": { + "type": "boolean", + "example": false + }, + "name": { + "type": "string", + "example": "grafana" + }, + "role": { + "type": "string", + "enum": ["Viewer", "Editor", "Admin"], + "example": "Admin" + } + } + }, "CreateTeamCommand": { "type": "object", "properties": { @@ -10991,6 +11362,83 @@ } } }, + "DataSourcePermissionRuleDTO": { + "type": "object", + "properties": { + "builtInRole": { + "type": "string" + }, + "created": { + "type": "string", + "format": "date-time" + }, + "datasourceId": { + "type": "integer", + "format": "int64" + }, + "id": { + "type": "integer", + "format": "int64" + }, + "isManaged": { + "type": "boolean" + }, + "permission": { + "$ref": "#/definitions/DsPermissionType" + }, + "permissionName": { + "type": "string" + }, + "team": { + "type": "string" + }, + "teamAvatarUrl": { + "type": "string" + }, + "teamEmail": { + "type": "string" + }, + "teamId": { + "type": "integer", + "format": "int64" + }, + "updated": { + "type": "string", + "format": "date-time" + }, + "userAvatarUrl": { + "type": "string" + }, + "userEmail": { + "type": "string" + }, + "userId": { + "type": "integer", + "format": "int64" + }, + "userLogin": { + "type": "string" + } + } + }, + "DataSourcePermissionsDTO": { + "type": "object", + "properties": { + "datasourceId": { + "type": "integer", + "format": "int64" + }, + "enabled": { + "type": "boolean" + }, + "permissions": { + "type": "array", + "items": { + "$ref": "#/definitions/DataSourcePermissionRuleDTO" + } + } + } + }, "DateTime": { "description": "DateTime is a time but it serializes to ISO8601 format with millis\nIt knows how to read 3 different variations of a RFC3339 date time.\nMost APIs we encounter want either millisecond or second precision times.\nThis just tries to make it worry-free.", "type": "string", @@ -12763,13 +13211,16 @@ "properties": { "id": { "type": "integer", - "format": "int64" + "format": "int64", + "example": 1 }, "key": { - "type": "string" + "type": "string", + "example": "glsa_yscW25imSKJIuav8zF37RZmnbiDvB05G_fcaaf58a" }, "name": { - "type": "string" + "type": "string", + "example": "grafana" } } }, @@ -14684,6 +15135,31 @@ } } }, + "SearchServiceAccountsResult": { + "description": "swagger: model", + "type": "object", + "properties": { + "page": { + "type": "integer", + "format": "int64" + }, + "perPage": { + "type": "integer", + "format": "int64" + }, + "serviceAccounts": { + "type": "array", + "items": { + "$ref": "#/definitions/ServiceAccountDTO" + } + }, + "totalCount": { + "description": "It can be used for pagination of the user list\nE.g. if totalCount is equal to 100 users and\nthe perpage parameter is set to 10 then there are 10 pages of users.", + "type": "integer", + "format": "int64" + } + } + }, "SearchTeamQueryResult": { "type": "object", "properties": { @@ -14738,6 +15214,119 @@ "title": "SecretURL is a URL that must not be revealed on marshaling.", "$ref": "#/definitions/URL" }, + "ServiceAccountDTO": { + "description": "swagger: model", + "type": "object", + "properties": { + "accessControl": { + "type": "object", + "additionalProperties": { + "type": "boolean" + }, + "example": { + "serviceaccounts:delete": true, + "serviceaccounts:read": true, + "serviceaccounts:write": true + } + }, + "avatarUrl": { + "type": "string", + "example": "/avatar/85ec38023d90823d3e5b43ef35646af9" + }, + "id": { + "type": "integer", + "format": "int64" + }, + "isDisabled": { + "type": "boolean", + "example": false + }, + "login": { + "type": "string", + "example": "sa-grafana" + }, + "name": { + "type": "string", + "example": "grafana" + }, + "orgId": { + "type": "integer", + "format": "int64", + "example": 1 + }, + "role": { + "type": "string", + "example": "Viewer" + }, + "tokens": { + "type": "integer", + "format": "int64", + "example": 0 + } + } + }, + "ServiceAccountProfileDTO": { + "type": "object", + "properties": { + "accessControl": { + "type": "object", + "additionalProperties": { + "type": "boolean" + } + }, + "avatarUrl": { + "type": "string", + "example": "/avatar/8ea890a677d6a223c591a1beea6ea9d2" + }, + "createdAt": { + "type": "string", + "format": "date-time", + "example": "2022-03-21T14:35:33Z" + }, + "id": { + "type": "integer", + "format": "int64", + "example": 2 + }, + "isDisabled": { + "type": "boolean", + "example": false + }, + "login": { + "type": "string", + "example": "sa-grafana" + }, + "name": { + "type": "string", + "example": "test" + }, + "orgId": { + "type": "integer", + "format": "int64", + "example": 1 + }, + "role": { + "type": "string", + "example": "Editor" + }, + "teams": { + "type": "array", + "items": { + "type": "string" + }, + "example": [] + }, + "tokens": { + "type": "integer", + "format": "int64" + }, + "updatedAt": { + "type": "string", + "format": "date-time", + "example": "2022-03-21T14:35:33Z" + } + } + }, "SetUserRolesCommand": { "type": "object", "properties": { @@ -15485,6 +16074,44 @@ } } }, + "TokenDTO": { + "type": "object", + "properties": { + "created": { + "type": "string", + "format": "date-time", + "example": "2022-03-23T10:31:02Z" + }, + "expiration": { + "type": "string", + "format": "date-time", + "example": "2022-03-23T10:31:02Z" + }, + "hasExpired": { + "type": "boolean", + "example": false + }, + "id": { + "type": "integer", + "format": "int64", + "example": 1 + }, + "lastUsedAt": { + "type": "string", + "format": "date-time", + "example": "2022-03-23T10:31:02Z" + }, + "name": { + "type": "string", + "example": "grafana" + }, + "secondsUntilExpiration": { + "type": "number", + "format": "double", + "example": 0 + } + } + }, "TokenStatus": { "type": "integer", "format": "int64" @@ -15515,8 +16142,9 @@ "type": "string" }, "URL": { + "description": "The general form represented is:\n\n[scheme:][//[userinfo@]host][/]path[?query][#fragment]\n\nURLs that do not start with a slash after the scheme are interpreted as:\n\nscheme:opaque[?query][#fragment]\n\nNote that the Path field is stored in decoded form: /%47%6f%2f becomes /Go/.\nA consequence is that it is impossible to tell which slashes in the Path were\nslashes in the raw URL and which were %2f. This distinction is rarely important,\nbut when it is, the code should use RawPath, an optional field which only gets\nset if the default encoding is different from Path.\n\nURL's String method uses the EscapedPath method to obtain the path. See the\nEscapedPath method for more details.", "type": "object", - "title": "URL is a custom URL type that allows validation at configuration load time.", + "title": "A URL represents a parsed URL (technically, a URI reference).", "properties": { "ForceQuery": { "type": "boolean" @@ -15870,6 +16498,21 @@ } } }, + "UpdateServiceAccountForm": { + "type": "object", + "properties": { + "isDisabled": { + "type": "boolean" + }, + "name": { + "type": "string" + }, + "role": { + "type": "string", + "enum": ["Viewer", "Editor", "Admin"] + } + } + }, "UpdateTeamCommand": { "type": "object", "properties": { @@ -16650,6 +17293,7 @@ } }, "receiver": { + "description": "Receiver receiver", "type": "object", "required": ["name"], "properties": { @@ -16859,7 +17503,22 @@ "createReportResponse": { "description": "", "schema": { - "type": "object" + "type": "object", + "properties": { + "id": { + "type": "integer", + "format": "int64" + }, + "message": { + "type": "string" + } + } + } + }, + "createServiceAccountResponse": { + "description": "", + "schema": { + "$ref": "#/definitions/ServiceAccountDTO" } }, "createSnapshotResponse": { @@ -16904,6 +17563,12 @@ } } }, + "createTokenResponse": { + "description": "", + "schema": { + "$ref": "#/definitions/NewApiKeyResult" + } + }, "createUserResponse": { "description": "", "schema": { @@ -17273,7 +17938,7 @@ "getPermissionseResponse": { "description": "", "schema": { - "$ref": "#/definitions/AddPermissionDTO" + "$ref": "#/definitions/DataSourcePermissionsDTO" } }, "getPlaylistDashboardsResponse": { @@ -17491,6 +18156,12 @@ } } }, + "listTokensResponse": { + "description": "", + "schema": { + "$ref": "#/definitions/TokenDTO" + } + }, "lookupAlertNotificationChannelsResponse": { "description": "", "schema": { @@ -17641,6 +18312,12 @@ "$ref": "#/definitions/ActiveUserStats" } }, + "retrieveServiceAccountResponse": { + "description": "", + "schema": { + "$ref": "#/definitions/ServiceAccountDTO" + } + }, "searchOrgResponse": { "description": "", "schema": { @@ -17650,6 +18327,12 @@ } } }, + "searchOrgServiceAccountsWithPagingResponse": { + "description": "", + "schema": { + "$ref": "#/definitions/SearchServiceAccountsResult" + } + }, "searchPlaylistsResponse": { "description": "", "schema": { @@ -17727,6 +18410,27 @@ "$ref": "#/definitions/PlaylistDTO" } }, + "updateServiceAccountResponse": { + "description": "", + "schema": { + "type": "object", + "properties": { + "id": { + "type": "integer", + "format": "int64" + }, + "message": { + "type": "string" + }, + "name": { + "type": "string" + }, + "serviceaccount": { + "$ref": "#/definitions/ServiceAccountProfileDTO" + } + } + } + }, "userResponse": { "description": "", "schema": { @@ -17836,6 +18540,10 @@ { "description": "Grafana Alerting Prometheus-compatible endpoints", "name": "prometheus" + }, + { + "description": "If you are running Grafana Enterprise, for some endpoints you'll need to have specific permissions. Refer to [Role-based access control permissions](https://grafana.com/docs/grafana/latest/administration/roles-and-permissions/access-control/custom-role-actions-scopes/) for more information.", + "name": "service_accounts" } ] } diff --git a/public/api-spec.json b/public/api-spec.json index e9275cccdff..1a3a181b650 100644 --- a/public/api-spec.json +++ b/public/api-spec.json @@ -119,6 +119,13 @@ "tags": ["access_control", "enterprise"], "summary": "Get all roles.", "operationId": "getAllRoles", + "parameters": [ + { + "type": "boolean", + "name": "delegatable", + "in": "query" + } + ], "responses": { "200": { "$ref": "#/responses/getAllRolesResponse" @@ -142,7 +149,7 @@ "in": "body", "required": true, "schema": { - "$ref": "#/definitions/CreateRoleWithPermissionsCommand" + "$ref": "#/definitions/CreateRoleForm" } } ], @@ -233,6 +240,16 @@ "summary": "Delete a custom role.", "operationId": "deleteCustomRole", "parameters": [ + { + "type": "boolean", + "name": "force", + "in": "query" + }, + { + "type": "boolean", + "name": "global", + "in": "query" + }, { "type": "string", "name": "roleUID", @@ -7073,6 +7090,328 @@ } } }, + "/serviceaccounts": { + "post": { + "description": "Required permissions (See note in the [introduction](https://grafana.com/docs/grafana/latest/developers/http_api/serviceaccount/#service-account-api) for an explanation):\naction: `serviceaccounts:write` scope: `serviceaccounts:*`\n\nRequires basic authentication and that the authenticated user is a Grafana Admin.", + "tags": ["service_accounts"], + "summary": "Create service account", + "operationId": "createServiceAccount", + "parameters": [ + { + "name": "Body", + "in": "body", + "schema": { + "$ref": "#/definitions/CreateServiceAccountForm" + } + } + ], + "responses": { + "201": { + "$ref": "#/responses/createServiceAccountResponse" + }, + "400": { + "$ref": "#/responses/badRequestError" + }, + "401": { + "$ref": "#/responses/unauthorisedError" + }, + "403": { + "$ref": "#/responses/forbiddenError" + }, + "500": { + "$ref": "#/responses/internalServerError" + } + } + } + }, + "/serviceaccounts/search": { + "get": { + "description": "Required permissions (See note in the [introduction](https://grafana.com/docs/grafana/latest/developers/http_api/serviceaccount/#service-account-api) for an explanation):\naction: `serviceaccounts:read` scope: `serviceaccounts:*`", + "tags": ["service_accounts"], + "summary": "Search service accounts with Paging", + "operationId": "searchOrgServiceAccountsWithPaging", + "parameters": [ + { + "type": "boolean", + "name": "Disabled", + "in": "query" + }, + { + "type": "boolean", + "name": "expiredTokens", + "in": "query" + }, + { + "type": "string", + "description": "It will return results where the query value is contained in one of the name.\nQuery values with spaces need to be URL encoded.", + "name": "query", + "in": "query" + }, + { + "type": "integer", + "format": "int64", + "description": "The default value is 1000.", + "name": "perpage", + "in": "query" + }, + { + "type": "integer", + "format": "int64", + "description": "The default value is 1.", + "name": "page", + "in": "query" + } + ], + "responses": { + "200": { + "$ref": "#/responses/searchOrgServiceAccountsWithPagingResponse" + }, + "401": { + "$ref": "#/responses/unauthorisedError" + }, + "403": { + "$ref": "#/responses/forbiddenError" + }, + "500": { + "$ref": "#/responses/internalServerError" + } + } + } + }, + "/serviceaccounts/{serviceAccountId}": { + "get": { + "description": "Required permissions (See note in the [introduction](https://grafana.com/docs/grafana/latest/developers/http_api/serviceaccount/#service-account-api) for an explanation):\naction: `serviceaccounts:read` scope: `serviceaccounts:id:1` (single service account)", + "tags": ["service_accounts"], + "summary": "Get single serviceaccount by Id", + "operationId": "retrieveServiceAccount", + "parameters": [ + { + "type": "integer", + "format": "int64", + "name": "serviceAccountId", + "in": "path", + "required": true + } + ], + "responses": { + "200": { + "$ref": "#/responses/retrieveServiceAccountResponse" + }, + "400": { + "$ref": "#/responses/badRequestError" + }, + "401": { + "$ref": "#/responses/unauthorisedError" + }, + "403": { + "$ref": "#/responses/forbiddenError" + }, + "404": { + "$ref": "#/responses/notFoundError" + }, + "500": { + "$ref": "#/responses/internalServerError" + } + } + }, + "delete": { + "description": "Required permissions (See note in the [introduction](https://grafana.com/docs/grafana/latest/developers/http_api/serviceaccount/#service-account-api) for an explanation):\naction: `serviceaccounts:delete` scope: `serviceaccounts:id:1` (single service account)", + "tags": ["service_accounts"], + "summary": "Delete service account", + "operationId": "deleteServiceAccount", + "parameters": [ + { + "type": "integer", + "format": "int64", + "name": "serviceAccountId", + "in": "path", + "required": true + } + ], + "responses": { + "200": { + "$ref": "#/responses/okResponse" + }, + "400": { + "$ref": "#/responses/badRequestError" + }, + "401": { + "$ref": "#/responses/unauthorisedError" + }, + "403": { + "$ref": "#/responses/forbiddenError" + }, + "500": { + "$ref": "#/responses/internalServerError" + } + } + }, + "patch": { + "description": "Required permissions (See note in the [introduction](https://grafana.com/docs/grafana/latest/developers/http_api/serviceaccount/#service-account-api) for an explanation):\naction: `serviceaccounts:write` scope: `serviceaccounts:id:1` (single service account)", + "tags": ["service_accounts"], + "summary": "Update service account", + "operationId": "updateServiceAccount", + "parameters": [ + { + "type": "integer", + "format": "int64", + "name": "serviceAccountId", + "in": "path", + "required": true + }, + { + "name": "Body", + "in": "body", + "schema": { + "$ref": "#/definitions/UpdateServiceAccountForm" + } + } + ], + "responses": { + "200": { + "$ref": "#/responses/updateServiceAccountResponse" + }, + "400": { + "$ref": "#/responses/badRequestError" + }, + "401": { + "$ref": "#/responses/unauthorisedError" + }, + "403": { + "$ref": "#/responses/forbiddenError" + }, + "404": { + "$ref": "#/responses/notFoundError" + }, + "500": { + "$ref": "#/responses/internalServerError" + } + } + } + }, + "/serviceaccounts/{serviceAccountId}/tokens": { + "get": { + "description": "Required permissions (See note in the [introduction](https://grafana.com/docs/grafana/latest/developers/http_api/serviceaccount/#service-account-api) for an explanation):\naction: `serviceaccounts:read` scope: `global:serviceaccounts:id:1` (single service account)\n\nRequires basic authentication and that the authenticated user is a Grafana Admin.", + "tags": ["service_accounts"], + "summary": "Get service account tokens", + "operationId": "listTokens", + "parameters": [ + { + "type": "integer", + "format": "int64", + "name": "serviceAccountId", + "in": "path", + "required": true + } + ], + "responses": { + "200": { + "$ref": "#/responses/listTokensResponse" + }, + "400": { + "$ref": "#/responses/badRequestError" + }, + "401": { + "$ref": "#/responses/unauthorisedError" + }, + "403": { + "$ref": "#/responses/forbiddenError" + }, + "500": { + "$ref": "#/responses/internalServerError" + } + } + }, + "post": { + "description": "Required permissions (See note in the [introduction](https://grafana.com/docs/grafana/latest/developers/http_api/serviceaccount/#service-account-api) for an explanation):\naction: `serviceaccounts:write` scope: `serviceaccounts:id:1` (single service account)", + "tags": ["service_accounts"], + "summary": "Create service account tokens", + "operationId": "createToken", + "parameters": [ + { + "type": "integer", + "format": "int64", + "name": "serviceAccountId", + "in": "path", + "required": true + }, + { + "name": "Body", + "in": "body", + "schema": { + "$ref": "#/definitions/AddServiceAccountTokenCommand" + } + } + ], + "responses": { + "200": { + "$ref": "#/responses/createTokenResponse" + }, + "400": { + "$ref": "#/responses/badRequestError" + }, + "401": { + "$ref": "#/responses/unauthorisedError" + }, + "403": { + "$ref": "#/responses/forbiddenError" + }, + "404": { + "$ref": "#/responses/notFoundError" + }, + "409": { + "$ref": "#/responses/conflictError" + }, + "500": { + "$ref": "#/responses/internalServerError" + } + } + } + }, + "/serviceaccounts/{serviceAccountId}/tokens/{tokenId}": { + "delete": { + "description": "Required permissions (See note in the [introduction](https://grafana.com/docs/grafana/latest/developers/http_api/serviceaccount/#service-account-api) for an explanation):\naction: `serviceaccounts:write` scope: `serviceaccounts:id:1` (single service account)\n\nRequires basic authentication and that the authenticated user is a Grafana Admin.", + "tags": ["service_accounts"], + "summary": "Delete service account tokens", + "operationId": "deleteToken", + "parameters": [ + { + "type": "integer", + "format": "int64", + "name": "tokenId", + "in": "path", + "required": true + }, + { + "type": "integer", + "format": "int64", + "name": "serviceAccountId", + "in": "path", + "required": true + } + ], + "responses": { + "200": { + "$ref": "#/responses/okResponse" + }, + "400": { + "$ref": "#/responses/badRequestError" + }, + "401": { + "$ref": "#/responses/unauthorisedError" + }, + "403": { + "$ref": "#/responses/forbiddenError" + }, + "404": { + "$ref": "#/responses/notFoundError" + }, + "500": { + "$ref": "#/responses/internalServerError" + } + } + } + }, "/snapshot/shared-options": { "get": { "tags": ["snapshots"], @@ -7374,16 +7713,15 @@ "operationId": "removeTeamGroupApi", "parameters": [ { - "type": "integer", - "format": "int64", - "name": "teamId", + "type": "string", + "name": "groupId", "in": "path", "required": true }, { "type": "integer", "format": "int64", - "name": "groupId", + "name": "teamId", "in": "path", "required": true } @@ -8585,6 +8923,18 @@ } } }, + "AddServiceAccountTokenCommand": { + "type": "object", + "properties": { + "name": { + "type": "string" + }, + "secondsToLive": { + "type": "integer", + "format": "int64" + } + } + }, "AddTeamMemberCommand": { "type": "object", "properties": { @@ -9427,7 +9777,7 @@ } } }, - "CreateRoleWithPermissionsCommand": { + "CreateRoleForm": { "type": "object", "properties": { "description": { @@ -9436,6 +9786,9 @@ "displayName": { "type": "string" }, + "global": { + "type": "boolean" + }, "group": { "type": "string" }, @@ -9460,6 +9813,24 @@ } } }, + "CreateServiceAccountForm": { + "type": "object", + "properties": { + "isDisabled": { + "type": "boolean", + "example": false + }, + "name": { + "type": "string", + "example": "grafana" + }, + "role": { + "type": "string", + "enum": ["Viewer", "Editor", "Admin"], + "example": "Admin" + } + } + }, "CreateTeamCommand": { "type": "object", "properties": { @@ -10099,6 +10470,83 @@ } } }, + "DataSourcePermissionRuleDTO": { + "type": "object", + "properties": { + "builtInRole": { + "type": "string" + }, + "created": { + "type": "string", + "format": "date-time" + }, + "datasourceId": { + "type": "integer", + "format": "int64" + }, + "id": { + "type": "integer", + "format": "int64" + }, + "isManaged": { + "type": "boolean" + }, + "permission": { + "$ref": "#/definitions/DsPermissionType" + }, + "permissionName": { + "type": "string" + }, + "team": { + "type": "string" + }, + "teamAvatarUrl": { + "type": "string" + }, + "teamEmail": { + "type": "string" + }, + "teamId": { + "type": "integer", + "format": "int64" + }, + "updated": { + "type": "string", + "format": "date-time" + }, + "userAvatarUrl": { + "type": "string" + }, + "userEmail": { + "type": "string" + }, + "userId": { + "type": "integer", + "format": "int64" + }, + "userLogin": { + "type": "string" + } + } + }, + "DataSourcePermissionsDTO": { + "type": "object", + "properties": { + "datasourceId": { + "type": "integer", + "format": "int64" + }, + "enabled": { + "type": "boolean" + }, + "permissions": { + "type": "array", + "items": { + "$ref": "#/definitions/DataSourcePermissionRuleDTO" + } + } + } + }, "DeleteTokenCommand": { "type": "object", "properties": { @@ -11024,13 +11472,16 @@ "properties": { "id": { "type": "integer", - "format": "int64" + "format": "int64", + "example": 1 }, "key": { - "type": "string" + "type": "string", + "example": "glsa_yscW25imSKJIuav8zF37RZmnbiDvB05G_fcaaf58a" }, "name": { - "type": "string" + "type": "string", + "example": "grafana" } } }, @@ -11963,6 +12414,31 @@ } } }, + "SearchServiceAccountsResult": { + "description": "swagger: model", + "type": "object", + "properties": { + "page": { + "type": "integer", + "format": "int64" + }, + "perPage": { + "type": "integer", + "format": "int64" + }, + "serviceAccounts": { + "type": "array", + "items": { + "$ref": "#/definitions/ServiceAccountDTO" + } + }, + "totalCount": { + "description": "It can be used for pagination of the user list\nE.g. if totalCount is equal to 100 users and\nthe perpage parameter is set to 10 then there are 10 pages of users.", + "type": "integer", + "format": "int64" + } + } + }, "SearchTeamQueryResult": { "type": "object", "properties": { @@ -12009,6 +12485,119 @@ } } }, + "ServiceAccountDTO": { + "description": "swagger: model", + "type": "object", + "properties": { + "accessControl": { + "type": "object", + "additionalProperties": { + "type": "boolean" + }, + "example": { + "serviceaccounts:delete": true, + "serviceaccounts:read": true, + "serviceaccounts:write": true + } + }, + "avatarUrl": { + "type": "string", + "example": "/avatar/85ec38023d90823d3e5b43ef35646af9" + }, + "id": { + "type": "integer", + "format": "int64" + }, + "isDisabled": { + "type": "boolean", + "example": false + }, + "login": { + "type": "string", + "example": "sa-grafana" + }, + "name": { + "type": "string", + "example": "grafana" + }, + "orgId": { + "type": "integer", + "format": "int64", + "example": 1 + }, + "role": { + "type": "string", + "example": "Viewer" + }, + "tokens": { + "type": "integer", + "format": "int64", + "example": 0 + } + } + }, + "ServiceAccountProfileDTO": { + "type": "object", + "properties": { + "accessControl": { + "type": "object", + "additionalProperties": { + "type": "boolean" + } + }, + "avatarUrl": { + "type": "string", + "example": "/avatar/8ea890a677d6a223c591a1beea6ea9d2" + }, + "createdAt": { + "type": "string", + "format": "date-time", + "example": "2022-03-21T14:35:33Z" + }, + "id": { + "type": "integer", + "format": "int64", + "example": 2 + }, + "isDisabled": { + "type": "boolean", + "example": false + }, + "login": { + "type": "string", + "example": "sa-grafana" + }, + "name": { + "type": "string", + "example": "test" + }, + "orgId": { + "type": "integer", + "format": "int64", + "example": 1 + }, + "role": { + "type": "string", + "example": "Editor" + }, + "teams": { + "type": "array", + "items": { + "type": "string" + }, + "example": [] + }, + "tokens": { + "type": "integer", + "format": "int64" + }, + "updatedAt": { + "type": "string", + "format": "date-time", + "example": "2022-03-21T14:35:33Z" + } + } + }, "SetUserRolesCommand": { "type": "object", "properties": { @@ -12408,6 +12997,44 @@ } } }, + "TokenDTO": { + "type": "object", + "properties": { + "created": { + "type": "string", + "format": "date-time", + "example": "2022-03-23T10:31:02Z" + }, + "expiration": { + "type": "string", + "format": "date-time", + "example": "2022-03-23T10:31:02Z" + }, + "hasExpired": { + "type": "boolean", + "example": false + }, + "id": { + "type": "integer", + "format": "int64", + "example": 1 + }, + "lastUsedAt": { + "type": "string", + "format": "date-time", + "example": "2022-03-23T10:31:02Z" + }, + "name": { + "type": "string", + "example": "grafana" + }, + "secondsUntilExpiration": { + "type": "number", + "format": "double", + "example": 0 + } + } + }, "TokenStatus": { "type": "integer", "format": "int64" @@ -12757,6 +13384,21 @@ } } }, + "UpdateServiceAccountForm": { + "type": "object", + "properties": { + "isDisabled": { + "type": "boolean" + }, + "name": { + "type": "string" + }, + "role": { + "type": "string", + "enum": ["Viewer", "Editor", "Admin"] + } + } + }, "UpdateTeamCommand": { "type": "object", "properties": { @@ -13164,7 +13806,22 @@ "createReportResponse": { "description": "", "schema": { - "type": "object" + "type": "object", + "properties": { + "id": { + "type": "integer", + "format": "int64" + }, + "message": { + "type": "string" + } + } + } + }, + "createServiceAccountResponse": { + "description": "", + "schema": { + "$ref": "#/definitions/ServiceAccountDTO" } }, "createSnapshotResponse": { @@ -13209,6 +13866,12 @@ } } }, + "createTokenResponse": { + "description": "", + "schema": { + "$ref": "#/definitions/NewApiKeyResult" + } + }, "createUserResponse": { "description": "", "schema": { @@ -13578,7 +14241,7 @@ "getPermissionseResponse": { "description": "", "schema": { - "$ref": "#/definitions/AddPermissionDTO" + "$ref": "#/definitions/DataSourcePermissionsDTO" } }, "getPlaylistDashboardsResponse": { @@ -13796,6 +14459,12 @@ } } }, + "listTokensResponse": { + "description": "", + "schema": { + "$ref": "#/definitions/TokenDTO" + } + }, "lookupAlertNotificationChannelsResponse": { "description": "", "schema": { @@ -13946,6 +14615,12 @@ "$ref": "#/definitions/ActiveUserStats" } }, + "retrieveServiceAccountResponse": { + "description": "", + "schema": { + "$ref": "#/definitions/ServiceAccountDTO" + } + }, "searchOrgResponse": { "description": "", "schema": { @@ -13955,6 +14630,12 @@ } } }, + "searchOrgServiceAccountsWithPagingResponse": { + "description": "", + "schema": { + "$ref": "#/definitions/SearchServiceAccountsResult" + } + }, "searchPlaylistsResponse": { "description": "", "schema": { @@ -14032,6 +14713,27 @@ "$ref": "#/definitions/PlaylistDTO" } }, + "updateServiceAccountResponse": { + "description": "", + "schema": { + "type": "object", + "properties": { + "id": { + "type": "integer", + "format": "int64" + }, + "message": { + "type": "string" + }, + "name": { + "type": "string" + }, + "serviceaccount": { + "$ref": "#/definitions/ServiceAccountProfileDTO" + } + } + } + }, "userResponse": { "description": "", "schema": { @@ -14141,6 +14843,10 @@ { "description": "Grafana Alerting Prometheus-compatible endpoints", "name": "prometheus" + }, + { + "description": "If you are running Grafana Enterprise, for some endpoints you'll need to have specific permissions. Refer to [Role-based access control permissions](https://grafana.com/docs/grafana/latest/administration/roles-and-permissions/access-control/custom-role-actions-scopes/) for more information.", + "name": "service_accounts" } ] } From e0d1af186781ae5d77dfb7ee7788cdd4aed886ec Mon Sep 17 00:00:00 2001 From: Dimitris Sotirakis Date: Tue, 19 Jul 2022 14:38:43 +0300 Subject: [PATCH 056/116] Remove volumes from build-e2e pipeline on main (#52386) --- .drone.yml | 8 +------- scripts/drone/pipelines/main.star | 1 - 2 files changed, 1 insertion(+), 8 deletions(-) diff --git a/.drone.yml b/.drone.yml index d83de1d157f..557076fe940 100644 --- a/.drone.yml +++ b/.drone.yml @@ -1305,12 +1305,6 @@ volumes: - host: path: /var/run/docker.sock name: docker -- name: postgres - temp: - medium: memory -- name: mysql - temp: - medium: memory --- depends_on: [] kind: pipeline @@ -4880,6 +4874,6 @@ kind: secret name: gcp_upload_artifacts_key --- kind: signature -hmac: 55383abbbc205824d35aa689a0e00f374e74520d77dc387e354b063e4ade0869 +hmac: 6ca96adcc90cb32d6ded5bcd804548f5c2a5dfdfa1791761f150e1447852357a ... diff --git a/scripts/drone/pipelines/main.star b/scripts/drone/pipelines/main.star index 1080a2f2fd0..27c8b678c52 100644 --- a/scripts/drone/pipelines/main.star +++ b/scripts/drone/pipelines/main.star @@ -219,7 +219,6 @@ def main_pipelines(edition): pipelines = [docs_pipelines(edition, ver_mode, trigger), main_test_frontend(), main_test_backend(), pipeline( name='main-build-e2e-publish', edition=edition, trigger=trigger, services=[], steps=init_steps + build_steps, - volumes=volumes, ), pipeline( name='main-integration-tests', edition=edition, trigger=trigger, services=services, steps=[download_grabpl_step(), identify_runner_step(), verify_gen_cue_step(edition="oss"), wire_install_step(), ] + integration_test_steps, From 49311e1cfbd84c2ca165e08e2ce5bd59cab4d0ab Mon Sep 17 00:00:00 2001 From: Dominik Prokop Date: Tue, 19 Jul 2022 14:07:27 +0200 Subject: [PATCH 057/116] Legend: Right align calculation values (#52400) --- .../grafana-ui/src/components/VizLegend/VizLegendTable.tsx | 4 ++-- .../src/components/VizLegend/VizLegendTableItem.tsx | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/packages/grafana-ui/src/components/VizLegend/VizLegendTable.tsx b/packages/grafana-ui/src/components/VizLegend/VizLegendTable.tsx index 06d5666076f..21e786e6cf5 100644 --- a/packages/grafana-ui/src/components/VizLegend/VizLegendTable.tsx +++ b/packages/grafana-ui/src/components/VizLegend/VizLegendTable.tsx @@ -108,9 +108,9 @@ const getStyles = (theme: GrafanaTheme2) => ({ color: ${theme.colors.primary.text}; font-weight: ${theme.typography.fontWeightMedium}; border-bottom: 1px solid ${theme.colors.border.weak}; - padding: ${theme.spacing(0.25, 2, 0.25, 1)}; + padding: ${theme.spacing(0.25, 1, 0.25, 1)}; font-size: ${theme.typography.bodySmall.fontSize}; - text-align: left; + text-align: right; white-space: nowrap; `, // This needs to be padding-right - icon size(xs==12) to avoid jumping diff --git a/packages/grafana-ui/src/components/VizLegend/VizLegendTableItem.tsx b/packages/grafana-ui/src/components/VizLegend/VizLegendTableItem.tsx index 2454ffaf38e..cad8a3d9674 100644 --- a/packages/grafana-ui/src/components/VizLegend/VizLegendTableItem.tsx +++ b/packages/grafana-ui/src/components/VizLegend/VizLegendTableItem.tsx @@ -123,7 +123,7 @@ const getStyles = (theme: GrafanaTheme2) => { align-items: center; `, value: css` - text-align: left; + text-align: right; `, yAxisLabel: css` color: ${theme.colors.text.secondary}; From c60487fdbf38f9964272b09bce526e4b763da824 Mon Sep 17 00:00:00 2001 From: Leo <108552997+lpskdl@users.noreply.github.com> Date: Tue, 19 Jul 2022 14:17:22 +0200 Subject: [PATCH 058/116] Plugins: Convert BarGaugePanel to RTL (#52423) * Plugins: Convert bargaugepanel tests to RTL * Chore: Improved test methods used in BarGaugePanel --- .betterer.results | 6 - .../panel/bargauge/BarGaugePanel.test.tsx | 186 ++++++++++-------- .../plugins/panel/bargauge/BarGaugePanel.tsx | 3 +- 3 files changed, 109 insertions(+), 86 deletions(-) diff --git a/.betterer.results b/.betterer.results index 914a3afa637..6b4f429c1f8 100644 --- a/.betterer.results +++ b/.betterer.results @@ -115,9 +115,6 @@ exports[`no enzyme tests`] = { ], "public/app/plugins/datasource/prometheus/configuration/AzureCredentialsForm.test.tsx:3424320489": [ [0, 19, 13, "RegExp match", "2409514259"] - ], - "public/app/plugins/panel/bargauge/BarGaugePanel.test.tsx:3368730691": [ - [0, 31, 13, "RegExp match", "2409514259"] ] }` }; @@ -8724,9 +8721,6 @@ exports[`better eslint`] = { "public/app/plugins/panel/bargauge/BarGaugeMigrations.test.ts:5381": [ [0, 0, 0, "Unexpected any. Specify a different type.", "0"] ], - "public/app/plugins/panel/bargauge/BarGaugePanel.test.tsx:5381": [ - [0, 0, 0, "Unexpected any. Specify a different type.", "0"] - ], "public/app/plugins/panel/bargauge/BarGaugePanel.tsx:5381": [ [0, 0, 0, "Unexpected any. Specify a different type.", "0"] ], diff --git a/public/app/plugins/panel/bargauge/BarGaugePanel.test.tsx b/public/app/plugins/panel/bargauge/BarGaugePanel.test.tsx index e5946a60ee3..5b5b64436b4 100644 --- a/public/app/plugins/panel/bargauge/BarGaugePanel.test.tsx +++ b/public/app/plugins/panel/bargauge/BarGaugePanel.test.tsx @@ -1,61 +1,130 @@ -import { mount, ReactWrapper } from 'enzyme'; +import { render, screen } from '@testing-library/react'; +import { uniqueId } from 'lodash'; import React from 'react'; -import { - dateMath, - dateTime, - FieldConfigSource, - LoadingState, - PanelData, - PanelProps, - TimeRange, - toDataFrame, - VizOrientation, -} from '@grafana/data'; +import { dateMath, dateTime, EventBus, LoadingState, TimeRange, toDataFrame, VizOrientation } from '@grafana/data'; import { selectors } from '@grafana/e2e-selectors'; import { BarGaugeDisplayMode } from '@grafana/ui'; -import { BarGaugePanel } from './BarGaugePanel'; -import { PanelOptions } from './models.gen'; +import { BarGaugePanel, BarGaugePanelProps } from './BarGaugePanel'; const valueSelector = selectors.components.Panels.Visualization.BarGauge.valueV2; describe('BarGaugePanel', () => { - describe('when empty result is rendered', () => { - const wrapper = createBarGaugePanelWithData({ - series: [], - timeRange: createTimeRange(), - state: LoadingState.Done, - }); + describe('when there is no data', () => { + it('show a "No Data" message', () => { + const panelData = buildPanelData(); - it('should render with title "No data"', () => { - const displayValue = wrapper.find(`div[data-testid="${valueSelector}"]`).text(); - expect(displayValue).toBe('No data'); + render(); + + expect(screen.getByText(/no data/i)).toBeInTheDocument(); }); }); describe('when there is data', () => { - const wrapper = createBarGaugePanelWithData({ - series: [ - toDataFrame({ - target: 'test', - datapoints: [ - [100, 1000], - [100, 200], + it('shows the panel', () => { + const firstBarPanel = 'firstBarPanel'; + const secondBarPanel = 'secondBarPanel'; + const panelData = buildPanelData({ + data: { + series: [ + toDataFrame({ + target: firstBarPanel, + datapoints: [ + [100, 1000], + [100, 200], + ], + }), ], - }), - ], - timeRange: createTimeRange(), - state: LoadingState.Done, - }); + timeRange: createTimeRange(), + state: LoadingState.Done, + }, + }); - it('should render with title "No data"', () => { - const displayValue = wrapper.find(`div[data-testid="${valueSelector}"]`).text(); - expect(displayValue).toBe('100'); + const { rerender } = render(); + expect(screen.queryByText(/100/)).toBeInTheDocument(); + expect(screen.queryByText(/firstbarpanel/i)).not.toBeInTheDocument(); + expect(screen.getByTestId(valueSelector)).toBeInTheDocument(); + + rerender( + + ); + + expect(screen.queryByText(/firstbarpanel/i)).toBeInTheDocument(); + expect(screen.queryByText(/secondbarpanel/i)).toBeInTheDocument(); + expect(screen.queryByText(/200/)).toBeInTheDocument(); + expect(screen.queryByText(/300/)).toBeInTheDocument(); + expect(screen.getAllByTestId(valueSelector).length).toEqual(2); }); }); }); +function buildPanelData(overrideValues?: Partial): BarGaugePanelProps { + const timeRange = createTimeRange(); + const defaultValues = { + id: Number(uniqueId()), + data: { + series: [], + state: LoadingState.Done, + timeRange, + }, + options: { + displayMode: BarGaugeDisplayMode.Lcd, + reduceOptions: { + calcs: ['mean'], + values: false, + }, + orientation: VizOrientation.Horizontal, + showUnfilled: true, + minVizHeight: 10, + minVizWidth: 0, + }, + transparent: false, + timeRange, + timeZone: 'utc', + title: 'hello', + fieldConfig: { + defaults: {}, + overrides: [], + }, + onFieldConfigChange: jest.fn(), + onOptionsChange: jest.fn(), + onChangeTimeRange: jest.fn(), + replaceVariables: jest.fn(), + renderCounter: 0, + width: 552, + height: 250, + eventBus: {} as EventBus, + }; + + return { + ...defaultValues, + ...overrideValues, + }; +} function createTimeRange(): TimeRange { return { from: dateMath.parse('now-6h') || dateTime(), @@ -63,44 +132,3 @@ function createTimeRange(): TimeRange { raw: { from: 'now-6h', to: 'now' }, }; } - -function createBarGaugePanelWithData(data: PanelData): ReactWrapper> { - const timeRange = createTimeRange(); - - const options: PanelOptions = { - displayMode: BarGaugeDisplayMode.Lcd, - reduceOptions: { - calcs: ['mean'], - values: false, - }, - orientation: VizOrientation.Horizontal, - showUnfilled: true, - minVizHeight: 10, - minVizWidth: 0, - }; - const fieldConfig: FieldConfigSource = { - defaults: {}, - overrides: [], - }; - - return mount( - {}} - onOptionsChange={() => {}} - onChangeTimeRange={() => {}} - replaceVariables={(s) => s} - renderCounter={0} - width={532} - transparent={false} - height={250} - eventBus={{} as any} - /> - ); -} diff --git a/public/app/plugins/panel/bargauge/BarGaugePanel.tsx b/public/app/plugins/panel/bargauge/BarGaugePanel.tsx index e044661b5a6..812db7b911a 100644 --- a/public/app/plugins/panel/bargauge/BarGaugePanel.tsx +++ b/public/app/plugins/panel/bargauge/BarGaugePanel.tsx @@ -18,7 +18,7 @@ import { config } from 'app/core/config'; import { PanelOptions } from './models.gen'; -export class BarGaugePanel extends PureComponent> { +export class BarGaugePanel extends PureComponent { renderComponent = ( valueProps: VizRepeaterRenderValueProps, menuProps: DataLinksContextMenuApi @@ -109,6 +109,7 @@ export class BarGaugePanel extends PureComponent> { ); } } +export type BarGaugePanelProps = PanelProps; export function clearNameForSingleSeries(count: number, field: FieldConfig, display: DisplayValue): DisplayValue { if (count === 1 && !field.displayName) { From 054fe54b03d4829c4b48d6ce62d93e1790d2c9a2 Mon Sep 17 00:00:00 2001 From: Yuriy Tseretyan Date: Tue, 19 Jul 2022 09:32:54 -0400 Subject: [PATCH 059/116] Alerting: Split Scheduler and AlertRouter tests (#52416) * move fake FakeExternalAlertmanager to sender package * move tests from scheduler to router * update alerts router to have all fields private * update scheduler tests to use sender mock --- pkg/services/ngalert/models/testing.go | 10 +- .../ngalert/schedule/schedule_unit_test.go | 434 +++--------------- pkg/services/ngalert/sender/router.go | 76 +-- pkg/services/ngalert/sender/router_test.go | 362 +++++++++++++++ pkg/services/ngalert/sender/testing.go | 88 ++++ .../ngalert/store/admin_configuration.go | 1 + .../store/admin_configuration_store_mock.go | 186 ++++++++ pkg/services/ngalert/store/testing.go | 82 ---- .../alerting/api_admin_configuration_test.go | 8 +- 9 files changed, 747 insertions(+), 500 deletions(-) create mode 100644 pkg/services/ngalert/sender/router_test.go create mode 100644 pkg/services/ngalert/sender/testing.go create mode 100644 pkg/services/ngalert/store/admin_configuration_store_mock.go diff --git a/pkg/services/ngalert/models/testing.go b/pkg/services/ngalert/models/testing.go index 641f1721d3f..75762d66143 100644 --- a/pkg/services/ngalert/models/testing.go +++ b/pkg/services/ngalert/models/testing.go @@ -185,7 +185,15 @@ func GenerateAlertRules(count int, f func() *AlertRule) []*AlertRule { return result } -// GenerateGroupKey generates many random alert rules. Does not guarantee that rules are unique (by UID) +// GenerateRuleKey generates a random alert rule key +func GenerateRuleKey(orgID int64) AlertRuleKey { + return AlertRuleKey{ + OrgID: orgID, + UID: util.GenerateShortUID(), + } +} + +// GenerateGroupKey generates a random group key func GenerateGroupKey(orgID int64) AlertRuleGroupKey { return AlertRuleGroupKey{ OrgID: orgID, diff --git a/pkg/services/ngalert/schedule/schedule_unit_test.go b/pkg/services/ngalert/schedule/schedule_unit_test.go index 7ad5669fa09..f54f6700371 100644 --- a/pkg/services/ngalert/schedule/schedule_unit_test.go +++ b/pkg/services/ngalert/schedule/schedule_unit_test.go @@ -16,6 +16,9 @@ import ( "github.com/grafana/grafana-plugin-sdk-go/data" "github.com/prometheus/client_golang/prometheus" "github.com/prometheus/client_golang/prometheus/testutil" + prometheusModel "github.com/prometheus/common/model" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/mock" "github.com/stretchr/testify/require" busmock "github.com/grafana/grafana/pkg/bus/mock" @@ -23,13 +26,11 @@ import ( "github.com/grafana/grafana/pkg/infra/log" "github.com/grafana/grafana/pkg/services/annotations" "github.com/grafana/grafana/pkg/services/dashboards" + "github.com/grafana/grafana/pkg/services/ngalert/api/tooling/definitions" "github.com/grafana/grafana/pkg/services/ngalert/eval" "github.com/grafana/grafana/pkg/services/ngalert/image" "github.com/grafana/grafana/pkg/services/ngalert/metrics" "github.com/grafana/grafana/pkg/services/ngalert/models" - "github.com/grafana/grafana/pkg/services/ngalert/notifier" - "github.com/grafana/grafana/pkg/services/ngalert/provisioning" - "github.com/grafana/grafana/pkg/services/ngalert/sender" "github.com/grafana/grafana/pkg/services/ngalert/state" "github.com/grafana/grafana/pkg/services/ngalert/store" "github.com/grafana/grafana/pkg/services/secrets/fakes" @@ -38,317 +39,20 @@ import ( "github.com/grafana/grafana/pkg/util" ) -func TestSendingToExternalAlertmanager(t *testing.T) { - fakeAM := store.NewFakeExternalAlertmanager(t) - defer fakeAM.Close() - fakeRuleStore := store.NewFakeRuleStore(t) - fakeInstanceStore := &store.FakeInstanceStore{} - fakeAdminConfigStore := store.NewFakeAdminConfigStore(t) - - // create alert rule with one second interval - alertRule := CreateTestAlertRule(t, fakeRuleStore, 1, 1, eval.Alerting) - - // First, let's create an admin configuration that holds an alertmanager. - adminConfig := &models.AdminConfiguration{OrgID: 1, Alertmanagers: []string{fakeAM.Server.URL}, SendAlertsTo: models.AllAlertmanagers} - cmd := store.UpdateAdminConfigurationCmd{AdminConfiguration: adminConfig} - require.NoError(t, fakeAdminConfigStore.UpdateAdminConfiguration(cmd)) - - sched, mockedClock, alertsRouter := setupScheduler(t, fakeRuleStore, fakeInstanceStore, fakeAdminConfigStore, nil) - - // Make sure we sync the configuration at least once before the evaluation happens to guarantee the sender is running - // when the first alert triggers. - require.NoError(t, alertsRouter.SyncAndApplyConfigFromDatabase()) - alertsRouter.AdminConfigMtx.Lock() - require.Equal(t, 1, len(alertsRouter.Senders)) - require.Equal(t, 1, len(alertsRouter.SendersCfgHash)) - alertsRouter.AdminConfigMtx.Unlock() - - // Then, ensure we've discovered the Alertmanager. - require.Eventually(t, func() bool { - return len(alertsRouter.AlertmanagersFor(1)) == 1 && len(alertsRouter.DroppedAlertmanagersFor(1)) == 0 - }, 10*time.Second, 200*time.Millisecond) - - ctx, cancel := context.WithCancel(context.Background()) - t.Cleanup(func() { - cancel() - }) - go func() { - err := sched.Run(ctx) - require.NoError(t, err) - }() - - // With everything up and running, let's advance the time to make sure we get at least one alert iteration. - mockedClock.Add(2 * time.Second) - - // Eventually, our Alertmanager should have received at least one alert. - require.Eventually(t, func() bool { - return fakeAM.AlertsCount() >= 1 && fakeAM.AlertNamesCompare([]string{alertRule.Title}) - }, 10*time.Second, 200*time.Millisecond) - - // Now, let's remove the Alertmanager from the admin configuration. - adminConfig.Alertmanagers = []string{} - cmd = store.UpdateAdminConfigurationCmd{AdminConfiguration: adminConfig} - require.NoError(t, fakeAdminConfigStore.UpdateAdminConfiguration(cmd)) - - // Again, make sure we sync and verify the senders. - require.NoError(t, alertsRouter.SyncAndApplyConfigFromDatabase()) - alertsRouter.AdminConfigMtx.Lock() - require.Equal(t, 0, len(alertsRouter.Senders)) - require.Equal(t, 0, len(alertsRouter.SendersCfgHash)) - alertsRouter.AdminConfigMtx.Unlock() - - // Then, ensure we've dropped the Alertmanager. - require.Eventually(t, func() bool { - return len(alertsRouter.AlertmanagersFor(1)) == 0 && len(alertsRouter.DroppedAlertmanagersFor(1)) == 0 - }, 10*time.Second, 200*time.Millisecond) -} - -func TestSendingToExternalAlertmanager_WithMultipleOrgs(t *testing.T) { - fakeAM := store.NewFakeExternalAlertmanager(t) - defer fakeAM.Close() - fakeRuleStore := store.NewFakeRuleStore(t) - fakeInstanceStore := &store.FakeInstanceStore{} - fakeAdminConfigStore := store.NewFakeAdminConfigStore(t) - - // First, let's create an admin configuration that holds an alertmanager. - adminConfig := &models.AdminConfiguration{OrgID: 1, Alertmanagers: []string{fakeAM.Server.URL}} - cmd := store.UpdateAdminConfigurationCmd{AdminConfiguration: adminConfig} - require.NoError(t, fakeAdminConfigStore.UpdateAdminConfiguration(cmd)) - - sched, mockedClock, alertsRouter := setupScheduler(t, fakeRuleStore, fakeInstanceStore, fakeAdminConfigStore, nil) - - // Make sure we sync the configuration at least once before the evaluation happens to guarantee the sender is running - // when the first alert triggers. - require.NoError(t, alertsRouter.SyncAndApplyConfigFromDatabase()) - alertsRouter.AdminConfigMtx.Lock() - require.Equal(t, 1, len(alertsRouter.Senders)) - require.Equal(t, 1, len(alertsRouter.SendersCfgHash)) - alertsRouter.AdminConfigMtx.Unlock() - - // Then, ensure we've discovered the Alertmanager. - require.Eventuallyf(t, func() bool { - return len(alertsRouter.AlertmanagersFor(1)) == 1 && len(alertsRouter.DroppedAlertmanagersFor(1)) == 0 - }, 10*time.Second, 200*time.Millisecond, "Alertmanager for org 1 was never discovered") - - ctx, cancel := context.WithCancel(context.Background()) - t.Cleanup(func() { - cancel() - }) - go func() { - err := sched.Run(ctx) - require.NoError(t, err) - }() - - // 1. Now, let's assume a new org comes along. - adminConfig2 := &models.AdminConfiguration{OrgID: 2, Alertmanagers: []string{fakeAM.Server.URL}} - cmd = store.UpdateAdminConfigurationCmd{AdminConfiguration: adminConfig2} - require.NoError(t, fakeAdminConfigStore.UpdateAdminConfiguration(cmd)) - - // If we sync again, new senders must have spawned. - require.NoError(t, alertsRouter.SyncAndApplyConfigFromDatabase()) - alertsRouter.AdminConfigMtx.Lock() - require.Equal(t, 2, len(alertsRouter.Senders)) - require.Equal(t, 2, len(alertsRouter.SendersCfgHash)) - alertsRouter.AdminConfigMtx.Unlock() - - // Then, ensure we've discovered the Alertmanager for the new organization. - require.Eventuallyf(t, func() bool { - return len(alertsRouter.AlertmanagersFor(2)) == 1 && len(alertsRouter.DroppedAlertmanagersFor(2)) == 0 - }, 10*time.Second, 200*time.Millisecond, "Alertmanager for org 2 was never discovered") - - // With everything up and running, let's advance the time to make sure we get at least one alert iteration. - mockedClock.Add(10 * time.Second) - - // TODO(gotjosh): Disabling this assertion as for some reason even after advancing the clock the alert is not being delivered. - // the check previous to this assertion would ensure that the sender is up and running before sending the notification. - // However, sometimes this does not happen. - - // Create two alert rules with one second interval. - // alertRuleOrgOne := CreateTestAlertRule(t, FakeRuleStore, 1, 1) - // alertRuleOrgTwo := CreateTestAlertRule(t, FakeRuleStore, 1, 2) - // Eventually, our Alertmanager should have received at least two alerts. - // var count int - // require.Eventuallyf(t, func() bool { - // count := fakeAM.AlertsCount() - // return count == 2 && fakeAM.AlertNamesCompare([]string{alertRuleOrgOne.Title, alertRuleOrgTwo.Title}) - // }, 20*time.Second, 200*time.Millisecond, "Alertmanager never received an '%s' from org 1 or '%s' from org 2, the alert count was: %d", alertRuleOrgOne.Title, alertRuleOrgTwo.Title, count) - - // 2. Next, let's modify the configuration of an organization by adding an extra alertmanager. - fakeAM2 := store.NewFakeExternalAlertmanager(t) - adminConfig2 = &models.AdminConfiguration{OrgID: 2, Alertmanagers: []string{fakeAM.Server.URL, fakeAM2.Server.URL}} - cmd = store.UpdateAdminConfigurationCmd{AdminConfiguration: adminConfig2} - require.NoError(t, fakeAdminConfigStore.UpdateAdminConfiguration(cmd)) - - // Before we sync, let's grab the existing hash of this particular org. - alertsRouter.AdminConfigMtx.Lock() - currentHash := alertsRouter.SendersCfgHash[2] - alertsRouter.AdminConfigMtx.Unlock() - - // Now, sync again. - require.NoError(t, alertsRouter.SyncAndApplyConfigFromDatabase()) - - // The hash for org two should not be the same and we should still have two senders. - alertsRouter.AdminConfigMtx.Lock() - require.NotEqual(t, alertsRouter.SendersCfgHash[2], currentHash) - require.Equal(t, 2, len(alertsRouter.Senders)) - require.Equal(t, 2, len(alertsRouter.SendersCfgHash)) - alertsRouter.AdminConfigMtx.Unlock() - - // Wait for the discovery of the new Alertmanager for orgID = 2. - require.Eventuallyf(t, func() bool { - return len(alertsRouter.AlertmanagersFor(2)) == 2 && len(alertsRouter.DroppedAlertmanagersFor(2)) == 0 - }, 10*time.Second, 200*time.Millisecond, "Alertmanager for org 2 was never re-discovered after fix") - - // 3. Now, let's provide a configuration that fails for OrgID = 1. - adminConfig2 = &models.AdminConfiguration{OrgID: 1, Alertmanagers: []string{"123://invalid.org"}} - cmd = store.UpdateAdminConfigurationCmd{AdminConfiguration: adminConfig2} - require.NoError(t, fakeAdminConfigStore.UpdateAdminConfiguration(cmd)) - - // Before we sync, let's get the current config hash. - alertsRouter.AdminConfigMtx.Lock() - currentHash = alertsRouter.SendersCfgHash[1] - alertsRouter.AdminConfigMtx.Unlock() - - // Now, sync again. - require.NoError(t, alertsRouter.SyncAndApplyConfigFromDatabase()) - - // The old configuration should still be running. - alertsRouter.AdminConfigMtx.Lock() - require.Equal(t, alertsRouter.SendersCfgHash[1], currentHash) - alertsRouter.AdminConfigMtx.Unlock() - require.Equal(t, 1, len(alertsRouter.AlertmanagersFor(1))) - - // If we fix it - it should be applied. - adminConfig2 = &models.AdminConfiguration{OrgID: 1, Alertmanagers: []string{"notarealalertmanager:3030"}} - cmd = store.UpdateAdminConfigurationCmd{AdminConfiguration: adminConfig2} - require.NoError(t, fakeAdminConfigStore.UpdateAdminConfiguration(cmd)) - require.NoError(t, alertsRouter.SyncAndApplyConfigFromDatabase()) - alertsRouter.AdminConfigMtx.Lock() - require.NotEqual(t, alertsRouter.SendersCfgHash[1], currentHash) - alertsRouter.AdminConfigMtx.Unlock() - - // Finally, remove everything. - require.NoError(t, fakeAdminConfigStore.DeleteAdminConfiguration(1)) - require.NoError(t, fakeAdminConfigStore.DeleteAdminConfiguration(2)) - require.NoError(t, alertsRouter.SyncAndApplyConfigFromDatabase()) - alertsRouter.AdminConfigMtx.Lock() - require.Equal(t, 0, len(alertsRouter.Senders)) - require.Equal(t, 0, len(alertsRouter.SendersCfgHash)) - alertsRouter.AdminConfigMtx.Unlock() - - require.Eventuallyf(t, func() bool { - NoAlertmanagerOrgOne := len(alertsRouter.AlertmanagersFor(1)) == 0 && len(alertsRouter.DroppedAlertmanagersFor(1)) == 0 - NoAlertmanagerOrgTwo := len(alertsRouter.AlertmanagersFor(2)) == 0 && len(alertsRouter.DroppedAlertmanagersFor(2)) == 0 - - return NoAlertmanagerOrgOne && NoAlertmanagerOrgTwo - }, 10*time.Second, 200*time.Millisecond, "Alertmanager for org 1 and 2 were never removed") -} - -func TestChangingAlertmanagersChoice(t *testing.T) { - fakeAM := store.NewFakeExternalAlertmanager(t) - defer fakeAM.Close() - fakeRuleStore := store.NewFakeRuleStore(t) - fakeInstanceStore := &store.FakeInstanceStore{} - fakeAdminConfigStore := store.NewFakeAdminConfigStore(t) - - // create alert rule with one second interval and an Alertmanagers choice. - alertRule := CreateTestAlertRule(t, fakeRuleStore, 1, 1, eval.Alerting) - - // First, let's create an admin configuration that holds an alertmanager - // and sends alerts to both internal and external alertmanagers (default). - adminConfig := &models.AdminConfiguration{OrgID: 1, Alertmanagers: []string{fakeAM.Server.URL}} - cmd := store.UpdateAdminConfigurationCmd{AdminConfiguration: adminConfig} - require.NoError(t, fakeAdminConfigStore.UpdateAdminConfiguration(cmd)) - - sched, mockedClock, alertsRouter := setupScheduler(t, fakeRuleStore, fakeInstanceStore, fakeAdminConfigStore, nil) - - // Make sure we sync the configuration at least once before the evaluation happens to guarantee the sender is running - // when the first alert triggers. - require.NoError(t, alertsRouter.SyncAndApplyConfigFromDatabase()) - alertsRouter.AdminConfigMtx.Lock() - require.Equal(t, 1, len(alertsRouter.Senders)) - require.Equal(t, 1, len(alertsRouter.SendersCfgHash)) - alertsRouter.AdminConfigMtx.Unlock() - - // Then, ensure we've discovered the Alertmanager and the Alertmanagers choice is correct. - require.Eventually(t, func() bool { - return len(alertsRouter.AlertmanagersFor(1)) == 1 && - len(alertsRouter.DroppedAlertmanagersFor(1)) == 0 && - alertsRouter.SendAlertsTo[1] == adminConfig.SendAlertsTo - }, 10*time.Second, 200*time.Millisecond) - - ctx, cancel := context.WithCancel(context.Background()) - t.Cleanup(func() { - cancel() - }) - go func() { - err := sched.Run(ctx) - require.NoError(t, err) - }() - - // With everything up and running, let's advance the time to make sure we get at least one alert iteration. - mockedClock.Add(2 * time.Second) - - // Eventually, our Alertmanager should have received alerts. - require.Eventuallyf(t, func() bool { - return fakeAM.AlertsCount() >= 1 && - fakeAM.AlertNamesCompare([]string{alertRule.Title}) - }, 10*time.Second, 200*time.Millisecond, "expected at least one alert to be received and the title of the first one to be '%s'. but got [%d]: [%v]", alertRule.Title, fakeAM.AlertsCount(), fakeAM.Alerts()) - - // Now, let's change the Alertmanagers choice to send only to the external Alertmanager. - adminConfig.SendAlertsTo = models.ExternalAlertmanagers - cmd = store.UpdateAdminConfigurationCmd{AdminConfiguration: adminConfig} - require.NoError(t, fakeAdminConfigStore.UpdateAdminConfiguration(cmd)) - - // Again, make sure we sync and verify the senders. - require.NoError(t, alertsRouter.SyncAndApplyConfigFromDatabase()) - alertsRouter.AdminConfigMtx.Lock() - require.Equal(t, 1, len(alertsRouter.Senders)) - require.Equal(t, 1, len(alertsRouter.SendersCfgHash)) - alertsRouter.AdminConfigMtx.Unlock() - - // Then, ensure we still have the Alertmanager but the Alertmanagers choice has changed. - require.Eventually(t, func() bool { - return len(alertsRouter.AlertmanagersFor(1)) == 1 && - len(alertsRouter.DroppedAlertmanagersFor(1)) == 0 && - alertsRouter.SendAlertsTo[1] == adminConfig.SendAlertsTo - }, 10*time.Second, 200*time.Millisecond) - - // Finally, let's change the Alertmanagers choice to send only to the internal Alertmanager. - adminConfig.SendAlertsTo = models.InternalAlertmanager - cmd = store.UpdateAdminConfigurationCmd{AdminConfiguration: adminConfig} - require.NoError(t, fakeAdminConfigStore.UpdateAdminConfiguration(cmd)) - - // Again, make sure we sync and verify the senders. - // Senders should be running even though alerts are being handled externally. - require.NoError(t, alertsRouter.SyncAndApplyConfigFromDatabase()) - alertsRouter.AdminConfigMtx.Lock() - require.Equal(t, 1, len(alertsRouter.Senders)) - require.Equal(t, 1, len(alertsRouter.SendersCfgHash)) - alertsRouter.AdminConfigMtx.Unlock() - - // Then, ensure the Alertmanager is still listed and the Alertmanagers choice has changed. - require.Eventually(t, func() bool { - return len(alertsRouter.AlertmanagersFor(1)) == 1 && - len(alertsRouter.DroppedAlertmanagersFor(1)) == 0 && - alertsRouter.SendAlertsTo[1] == adminConfig.SendAlertsTo - }, 10*time.Second, 200*time.Millisecond) -} - func TestSchedule_ruleRoutine(t *testing.T) { createSchedule := func( evalAppliedChan chan time.Time, - ) (*schedule, *store.FakeRuleStore, *store.FakeInstanceStore, *store.FakeAdminConfigStore, prometheus.Gatherer, *sender.AlertsRouter) { + senderMock *AlertsSenderMock, + ) (*schedule, *store.FakeRuleStore, *store.FakeInstanceStore, prometheus.Gatherer) { ruleStore := store.NewFakeRuleStore(t) instanceStore := &store.FakeInstanceStore{} - adminConfigStore := store.NewFakeAdminConfigStore(t) registry := prometheus.NewPedanticRegistry() - sch, _, alertsRouter := setupScheduler(t, ruleStore, instanceStore, adminConfigStore, registry) + sch, _ := setupScheduler(t, ruleStore, instanceStore, registry, senderMock) sch.evalAppliedFunc = func(key models.AlertRuleKey, t time.Time) { evalAppliedChan <- t } - return sch, ruleStore, instanceStore, adminConfigStore, registry, alertsRouter + return sch, ruleStore, instanceStore, registry } // normal states do not include NoData and Error because currently it is not possible to perform any sensible test @@ -364,7 +68,7 @@ func TestSchedule_ruleRoutine(t *testing.T) { t.Run(fmt.Sprintf("when rule evaluation happens (evaluation state %s)", evalState), func(t *testing.T) { evalChan := make(chan *evaluation) evalAppliedChan := make(chan time.Time) - sch, ruleStore, instanceStore, _, reg, _ := createSchedule(evalAppliedChan) + sch, ruleStore, instanceStore, reg := createSchedule(evalAppliedChan, nil) rule := CreateTestAlertRule(t, ruleStore, 10, rand.Int63(), evalState) @@ -483,7 +187,7 @@ func TestSchedule_ruleRoutine(t *testing.T) { t.Run("should exit", func(t *testing.T) { t.Run("when context is cancelled", func(t *testing.T) { stoppedChan := make(chan error) - sch, _, _, _, _, _ := createSchedule(make(chan time.Time)) + sch, _, _, _ := createSchedule(make(chan time.Time), nil) ctx, cancel := context.WithCancel(context.Background()) go func() { @@ -502,7 +206,7 @@ func TestSchedule_ruleRoutine(t *testing.T) { evalAppliedChan := make(chan time.Time) ctx := context.Background() - sch, ruleStore, _, _, _, _ := createSchedule(evalAppliedChan) + sch, ruleStore, _, _ := createSchedule(evalAppliedChan, nil) rule := CreateTestAlertRule(t, ruleStore, 10, rand.Int63(), randomNormalState()) @@ -554,7 +258,7 @@ func TestSchedule_ruleRoutine(t *testing.T) { evalChan := make(chan *evaluation) evalAppliedChan := make(chan time.Time) - sch, ruleStore, _, _, _, _ := createSchedule(evalAppliedChan) + sch, ruleStore, _, _ := createSchedule(evalAppliedChan, nil) rule := CreateTestAlertRule(t, ruleStore, 10, rand.Int63(), randomNormalState()) @@ -606,7 +310,7 @@ func TestSchedule_ruleRoutine(t *testing.T) { evalAppliedChan := make(chan time.Time) updateChan := make(chan ruleVersion) - sch, ruleStore, _, _, _, _ := createSchedule(evalAppliedChan) + sch, ruleStore, _, _ := createSchedule(evalAppliedChan, nil) rule := CreateTestAlertRule(t, ruleStore, 10, rand.Int63(), eval.Alerting) // we want the alert to fire @@ -649,7 +353,7 @@ func TestSchedule_ruleRoutine(t *testing.T) { evalAppliedChan := make(chan time.Time) updateChan := make(chan ruleVersion) - sch, ruleStore, _, _, _, _ := createSchedule(evalAppliedChan) + sch, ruleStore, _, _ := createSchedule(evalAppliedChan, nil) sch.maxAttempts = rand.Int63n(4) + 1 rule := CreateTestAlertRule(t, ruleStore, 10, rand.Int63(), randomNormalState()) @@ -681,32 +385,20 @@ func TestSchedule_ruleRoutine(t *testing.T) { t.Run("when rule version is updated", func(t *testing.T) { t.Run("should clear the state and expire firing alerts", func(t *testing.T) { - fakeAM := store.NewFakeExternalAlertmanager(t) - defer fakeAM.Close() - orgID := rand.Int63() - s, err := sender.New() - require.NoError(t, err) - adminConfig := &models.AdminConfiguration{OrgID: orgID, Alertmanagers: []string{fakeAM.Server.URL}} - err = s.ApplyConfig(adminConfig) - require.NoError(t, err) - s.Run() - defer s.Stop() - - require.Eventuallyf(t, func() bool { - return len(s.Alertmanagers()) == 1 - }, 20*time.Second, 200*time.Millisecond, "external Alertmanager was not discovered.") evalChan := make(chan *evaluation) evalAppliedChan := make(chan time.Time) updateChan := make(chan ruleVersion) - ctx := context.Background() - sch, ruleStore, _, _, _, alertsRouter := createSchedule(evalAppliedChan) - alertsRouter.Senders[orgID] = s + sender := AlertsSenderMock{} - var rulePtr = CreateTestAlertRule(t, ruleStore, 10, orgID, eval.Alerting) // we want the alert to fire - var rule = *rulePtr + ctx := context.Background() + sch, ruleStore, _, _ := createSchedule(evalAppliedChan, &sender) + + var rule = CreateTestAlertRule(t, ruleStore, 10, orgID, eval.Alerting) // we want the alert to fire + + sender.EXPECT().Send(rule.GetKey(), mock.Anything) // define some state states := make([]*state.State, 0, len(allStates)) @@ -754,26 +446,23 @@ func TestSchedule_ruleRoutine(t *testing.T) { updateChan <- ruleVersion(rule.Version) wg.Wait() - newRule := rule + newRule := models.CopyRule(rule) newRule.Version++ - ruleStore.PutRule(ctx, &newRule) + ruleStore.PutRule(ctx, newRule) wg.Add(1) updateChan <- ruleVersion(newRule.Version) wg.Wait() require.Eventually(t, func() bool { - return len(sch.stateManager.GetStatesForRuleUID(rule.OrgID, rule.UID)) == 0 + return len(sender.Calls) > 0 }, 5*time.Second, 100*time.Millisecond) - var count int - require.Eventuallyf(t, func() bool { - count = fakeAM.AlertsCount() - return count == expectedToBeSent - }, 20*time.Second, 200*time.Millisecond, "Alertmanager was expected to receive %d alerts, but received only %d", expectedToBeSent, count) + require.Empty(t, sch.stateManager.GetStatesForRuleUID(rule.OrgID, rule.UID)) - for _, alert := range fakeAM.Alerts() { - require.Equalf(t, sch.clock.Now().UTC(), time.Time(alert.EndsAt).UTC(), "Alert received by Alertmanager should be expired as of now") - } + sender.AssertExpectations(t) + args, ok := sender.Calls[0].Arguments[1].(definitions.PostableAlerts) + require.Truef(t, ok, fmt.Sprintf("expected argument of function was supposed to be 'definitions.PostableAlerts' but got %T", sender.Calls[0].Arguments[1])) + require.Len(t, args.PostableAlerts, expectedToBeSent) }) }) @@ -789,34 +478,21 @@ func TestSchedule_ruleRoutine(t *testing.T) { }) t.Run("when there are alerts that should be firing", func(t *testing.T) { - t.Run("it should send to local alertmanager if configured for organization", func(t *testing.T) { - // TODO figure out how to simulate multiorg alertmanager - t.Skip() - }) - t.Run("it should send to external alertmanager if configured for organization", func(t *testing.T) { - fakeAM := store.NewFakeExternalAlertmanager(t) - defer fakeAM.Close() - + t.Run("it should call sender", func(t *testing.T) { orgID := rand.Int63() - s, err := sender.New() - require.NoError(t, err) - adminConfig := &models.AdminConfiguration{OrgID: orgID, Alertmanagers: []string{fakeAM.Server.URL}} - err = s.ApplyConfig(adminConfig) - require.NoError(t, err) - s.Run() - defer s.Stop() - - require.Eventuallyf(t, func() bool { - return len(s.Alertmanagers()) == 1 - }, 20*time.Second, 200*time.Millisecond, "external Alertmanager was not discovered.") evalChan := make(chan *evaluation) evalAppliedChan := make(chan time.Time) - sch, ruleStore, _, _, _, alertsRouter := createSchedule(evalAppliedChan) - alertsRouter.Senders[orgID] = s + sender := AlertsSenderMock{} + + sch, ruleStore, _, _ := createSchedule(evalAppliedChan, &sender) + // eval.Alerting makes state manager to create notifications for alertmanagers rule := CreateTestAlertRule(t, ruleStore, 10, orgID, eval.Alerting) + folder, _ := ruleStore.GetNamespaceByUID(context.Background(), rule.NamespaceUID, orgID, nil) + + sender.EXPECT().Send(rule.GetKey(), mock.Anything).Return() go func() { ctx, cancel := context.WithCancel(context.Background()) @@ -828,13 +504,22 @@ func TestSchedule_ruleRoutine(t *testing.T) { scheduledAt: time.Now(), version: rule.Version, } + waitForTimeChannel(t, evalAppliedChan) - var count int - require.Eventuallyf(t, func() bool { - count = fakeAM.AlertsCount() - return count == 1 && fakeAM.AlertNamesCompare([]string{rule.Title}) - }, 20*time.Second, 200*time.Millisecond, "Alertmanager never received an '%s', received alerts count: %d", rule.Title, count) + sender.AssertExpectations(t) + args, ok := sender.Calls[0].Arguments[1].(definitions.PostableAlerts) + require.Truef(t, ok, fmt.Sprintf("expected argument of function was supposed to be 'definitions.PostableAlerts' but got %T", sender.Calls[0].Arguments[1])) + + require.Len(t, args.PostableAlerts, 1) + + t.Run("should add extra labels", func(t *testing.T) { + alert := args.PostableAlerts[0] + assert.Equal(t, rule.UID, alert.Labels[models.RuleUIDLabel]) + assert.Equal(t, rule.NamespaceUID, alert.Labels[models.NamespaceUIDLabel]) + assert.Equal(t, rule.Title, alert.Labels[prometheusModel.AlertNameLabel]) + assert.Equal(t, folder.Title, alert.Labels[models.FolderTitleLabel]) + }) }) }) @@ -925,12 +610,11 @@ func setupSchedulerWithFakeStores(t *testing.T) *schedule { t.Helper() ruleStore := store.NewFakeRuleStore(t) instanceStore := &store.FakeInstanceStore{} - adminConfigStore := store.NewFakeAdminConfigStore(t) - sch, _, _ := setupScheduler(t, ruleStore, instanceStore, adminConfigStore, nil) + sch, _ := setupScheduler(t, ruleStore, instanceStore, nil, nil) return sch } -func setupScheduler(t *testing.T, rs store.RuleStore, is store.InstanceStore, acs store.AdminConfigurationStore, registry *prometheus.Registry) (*schedule, *clock.Mock, *sender.AlertsRouter) { +func setupScheduler(t *testing.T, rs store.RuleStore, is store.InstanceStore, registry *prometheus.Registry, senderMock *AlertsSenderMock) (*schedule, *clock.Mock) { t.Helper() fakeAnnoRepo := store.NewFakeAnnotationsRepo() @@ -942,16 +626,16 @@ func setupScheduler(t *testing.T, rs store.RuleStore, is store.InstanceStore, ac } m := metrics.NewNGAlert(registry) secretsService := secretsManager.SetupTestService(t, fakes.NewFakeSecretsStore()) - decryptFn := secretsService.GetDecryptedValue - moa, err := notifier.NewMultiOrgAlertmanager(&setting.Cfg{}, ¬ifier.FakeConfigStore{}, ¬ifier.FakeOrgStore{}, ¬ifier.FakeKVStore{}, provisioning.NewFakeProvisioningStore(), decryptFn, m.GetMultiOrgAlertmanagerMetrics(), nil, log.New("testlogger"), secretsService) - require.NoError(t, err) appUrl := &url.URL{ Scheme: "http", Host: "localhost", } - alertsRouter := sender.NewAlertsRouter(moa, acs, mockedClock, appUrl, map[int64]struct{}{}, 10*time.Minute) // do not poll in unit tests. + if senderMock == nil { + senderMock = &AlertsSenderMock{} + senderMock.EXPECT().Send(mock.Anything, mock.Anything).Return() + } cfg := setting.UnifiedAlertingSettings{ BaseInterval: time.Second, @@ -967,10 +651,10 @@ func setupScheduler(t *testing.T, rs store.RuleStore, is store.InstanceStore, ac InstanceStore: is, Logger: logger, Metrics: m.GetSchedulerMetrics(), - AlertSender: alertsRouter, + AlertSender: senderMock, } st := state.NewManager(schedCfg.Logger, m.GetStateMetrics(), nil, rs, is, &dashboards.FakeDashboardService{}, &image.NoopImageService{}, clock.NewMock()) - return NewScheduler(schedCfg, appUrl, st, busmock.New()), mockedClock, alertsRouter + return NewScheduler(schedCfg, appUrl, st, busmock.New()), mockedClock } // createTestAlertRule creates a dummy alert definition to be used by the tests. diff --git a/pkg/services/ngalert/sender/router.go b/pkg/services/ngalert/sender/router.go index decd20d1a6d..2ea0f1fe04e 100644 --- a/pkg/services/ngalert/sender/router.go +++ b/pkg/services/ngalert/sender/router.go @@ -27,13 +27,13 @@ type AlertsRouter struct { clock clock.Clock adminConfigStore store.AdminConfigurationStore - // Senders help us send alerts to external Alertmanagers. - AdminConfigMtx sync.RWMutex - SendAlertsTo map[int64]models.AlertmanagersChoice - Senders map[int64]*Sender - SendersCfgHash map[int64]string + // senders help us send alerts to external Alertmanagers. + adminConfigMtx sync.RWMutex + sendAlertsTo map[int64]models.AlertmanagersChoice + senders map[int64]*Sender + sendersCfgHash map[int64]string - MultiOrgNotifier *notifier.MultiOrgAlertmanager + multiOrgNotifier *notifier.MultiOrgAlertmanager appURL *url.URL disabledOrgs map[int64]struct{} @@ -46,12 +46,12 @@ func NewAlertsRouter(multiOrgNotifier *notifier.MultiOrgAlertmanager, store stor clock: clk, adminConfigStore: store, - AdminConfigMtx: sync.RWMutex{}, - Senders: map[int64]*Sender{}, - SendersCfgHash: map[int64]string{}, - SendAlertsTo: map[int64]models.AlertmanagersChoice{}, + adminConfigMtx: sync.RWMutex{}, + senders: map[int64]*Sender{}, + sendersCfgHash: map[int64]string{}, + sendAlertsTo: map[int64]models.AlertmanagersChoice{}, - MultiOrgNotifier: multiOrgNotifier, + multiOrgNotifier: multiOrgNotifier, appURL: appURL, disabledOrgs: disabledOrgs, @@ -72,7 +72,7 @@ func (d *AlertsRouter) SyncAndApplyConfigFromDatabase() error { d.logger.Debug("found admin configurations", "count", len(cfgs)) orgsFound := make(map[int64]struct{}, len(cfgs)) - d.AdminConfigMtx.Lock() + d.adminConfigMtx.Lock() for _, cfg := range cfgs { _, isDisabledOrg := d.disabledOrgs[cfg.OrgID] if isDisabledOrg { @@ -81,11 +81,11 @@ func (d *AlertsRouter) SyncAndApplyConfigFromDatabase() error { } // Update the Alertmanagers choice for the organization. - d.SendAlertsTo[cfg.OrgID] = cfg.SendAlertsTo + d.sendAlertsTo[cfg.OrgID] = cfg.SendAlertsTo orgsFound[cfg.OrgID] = struct{}{} // keep track of the which senders we need to keep. - existing, ok := d.Senders[cfg.OrgID] + existing, ok := d.senders[cfg.OrgID] // We have no running sender and no Alertmanager(s) configured, no-op. if !ok && len(cfg.Alertmanagers) == 0 { @@ -107,7 +107,7 @@ func (d *AlertsRouter) SyncAndApplyConfigFromDatabase() error { // We have a running sender, check if we need to apply a new config. if ok { - if d.SendersCfgHash[cfg.OrgID] == cfg.AsSHA256() { + if d.sendersCfgHash[cfg.OrgID] == cfg.AsSHA256() { d.logger.Debug("sender configuration is the same as the one running, no-op", "org", cfg.OrgID, "alertmanagers", cfg.Alertmanagers) continue } @@ -118,7 +118,7 @@ func (d *AlertsRouter) SyncAndApplyConfigFromDatabase() error { d.logger.Error("failed to apply configuration", "err", err, "org", cfg.OrgID) continue } - d.SendersCfgHash[cfg.OrgID] = cfg.AsSHA256() + d.sendersCfgHash[cfg.OrgID] = cfg.AsSHA256() continue } @@ -130,7 +130,7 @@ func (d *AlertsRouter) SyncAndApplyConfigFromDatabase() error { continue } - d.Senders[cfg.OrgID] = s + d.senders[cfg.OrgID] = s s.Run() err = s.ApplyConfig(cfg) @@ -139,19 +139,19 @@ func (d *AlertsRouter) SyncAndApplyConfigFromDatabase() error { continue } - d.SendersCfgHash[cfg.OrgID] = cfg.AsSHA256() + d.sendersCfgHash[cfg.OrgID] = cfg.AsSHA256() } sendersToStop := map[int64]*Sender{} - for orgID, s := range d.Senders { + for orgID, s := range d.senders { if _, exists := orgsFound[orgID]; !exists { sendersToStop[orgID] = s - delete(d.Senders, orgID) - delete(d.SendersCfgHash, orgID) + delete(d.senders, orgID) + delete(d.sendersCfgHash, orgID) } } - d.AdminConfigMtx.Unlock() + d.adminConfigMtx.Unlock() // We can now stop these senders w/o having to hold a lock. for orgID, s := range sendersToStop { @@ -174,11 +174,11 @@ func (d *AlertsRouter) Send(key models.AlertRuleKey, alerts definitions.Postable // Send alerts to local notifier if they need to be handled internally // or if no external AMs have been discovered yet. var localNotifierExist, externalNotifierExist bool - if d.SendAlertsTo[key.OrgID] == models.ExternalAlertmanagers && len(d.AlertmanagersFor(key.OrgID)) > 0 { + if d.sendAlertsTo[key.OrgID] == models.ExternalAlertmanagers && len(d.AlertmanagersFor(key.OrgID)) > 0 { logger.Debug("no alerts to put in the notifier") } else { logger.Debug("sending alerts to local notifier", "count", len(alerts.PostableAlerts), "alerts", alerts.PostableAlerts) - n, err := d.MultiOrgNotifier.AlertmanagerFor(key.OrgID) + n, err := d.multiOrgNotifier.AlertmanagerFor(key.OrgID) if err == nil { localNotifierExist = true if err := n.PutAlerts(alerts); err != nil { @@ -195,10 +195,10 @@ func (d *AlertsRouter) Send(key models.AlertRuleKey, alerts definitions.Postable // Send alerts to external Alertmanager(s) if we have a sender for this organization // and alerts are not being handled just internally. - d.AdminConfigMtx.RLock() - defer d.AdminConfigMtx.RUnlock() - s, ok := d.Senders[key.OrgID] - if ok && d.SendAlertsTo[key.OrgID] != models.InternalAlertmanager { + d.adminConfigMtx.RLock() + defer d.adminConfigMtx.RUnlock() + s, ok := d.senders[key.OrgID] + if ok && d.sendAlertsTo[key.OrgID] != models.InternalAlertmanager { logger.Debug("sending alerts to external notifier", "count", len(alerts.PostableAlerts), "alerts", alerts.PostableAlerts) s.SendAlerts(alerts) externalNotifierExist = true @@ -211,9 +211,9 @@ func (d *AlertsRouter) Send(key models.AlertRuleKey, alerts definitions.Postable // AlertmanagersFor returns all the discovered Alertmanager(s) for a particular organization. func (d *AlertsRouter) AlertmanagersFor(orgID int64) []*url.URL { - d.AdminConfigMtx.RLock() - defer d.AdminConfigMtx.RUnlock() - s, ok := d.Senders[orgID] + d.adminConfigMtx.RLock() + defer d.adminConfigMtx.RUnlock() + s, ok := d.senders[orgID] if !ok { return []*url.URL{} } @@ -222,9 +222,9 @@ func (d *AlertsRouter) AlertmanagersFor(orgID int64) []*url.URL { // DroppedAlertmanagersFor returns all the dropped Alertmanager(s) for a particular organization. func (d *AlertsRouter) DroppedAlertmanagersFor(orgID int64) []*url.URL { - d.AdminConfigMtx.RLock() - defer d.AdminConfigMtx.RUnlock() - s, ok := d.Senders[orgID] + d.adminConfigMtx.RLock() + defer d.adminConfigMtx.RUnlock() + s, ok := d.senders[orgID] if !ok { return []*url.URL{} } @@ -242,12 +242,12 @@ func (d *AlertsRouter) Run(ctx context.Context) error { } case <-ctx.Done(): // Stop sending alerts to all external Alertmanager(s). - d.AdminConfigMtx.Lock() - for orgID, s := range d.Senders { - delete(d.Senders, orgID) // delete before we stop to make sure we don't accept any more alerts. + d.adminConfigMtx.Lock() + for orgID, s := range d.senders { + delete(d.senders, orgID) // delete before we stop to make sure we don't accept any more alerts. s.Stop() } - d.AdminConfigMtx.Unlock() + d.adminConfigMtx.Unlock() return nil } diff --git a/pkg/services/ngalert/sender/router_test.go b/pkg/services/ngalert/sender/router_test.go new file mode 100644 index 00000000000..325b78b100b --- /dev/null +++ b/pkg/services/ngalert/sender/router_test.go @@ -0,0 +1,362 @@ +package sender + +import ( + "context" + "fmt" + "math/rand" + "net/url" + "testing" + "time" + + "github.com/benbjohnson/clock" + "github.com/go-openapi/strfmt" + models2 "github.com/prometheus/alertmanager/api/v2/models" + "github.com/prometheus/client_golang/prometheus" + "github.com/stretchr/testify/require" + + "github.com/grafana/grafana/pkg/infra/log" + "github.com/grafana/grafana/pkg/services/ngalert/api/tooling/definitions" + "github.com/grafana/grafana/pkg/services/ngalert/metrics" + "github.com/grafana/grafana/pkg/services/ngalert/models" + "github.com/grafana/grafana/pkg/services/ngalert/notifier" + "github.com/grafana/grafana/pkg/services/ngalert/provisioning" + "github.com/grafana/grafana/pkg/services/ngalert/store" + "github.com/grafana/grafana/pkg/services/secrets/fakes" + secretsManager "github.com/grafana/grafana/pkg/services/secrets/manager" + "github.com/grafana/grafana/pkg/setting" + "github.com/grafana/grafana/pkg/util" +) + +func TestSendingToExternalAlertmanager(t *testing.T) { + ruleKey := models.GenerateRuleKey(1) + + fakeAM := NewFakeExternalAlertmanager(t) + defer fakeAM.Close() + + fakeAdminConfigStore := &store.AdminConfigurationStoreMock{} + mockedGetAdminConfigurations := fakeAdminConfigStore.EXPECT().GetAdminConfigurations() + + mockedClock := clock.NewMock() + + moa := createMultiOrgAlertmanager(t, []int64{1}) + + appUrl := &url.URL{ + Scheme: "http", + Host: "localhost", + } + + alertsRouter := NewAlertsRouter(moa, fakeAdminConfigStore, mockedClock, appUrl, map[int64]struct{}{}, 10*time.Minute) + + mockedGetAdminConfigurations.Return([]*models.AdminConfiguration{ + {OrgID: ruleKey.OrgID, Alertmanagers: []string{fakeAM.Server.URL}, SendAlertsTo: models.AllAlertmanagers}, + }, nil) + // Make sure we sync the configuration at least once before the evaluation happens to guarantee the sender is running + // when the first alert triggers. + require.NoError(t, alertsRouter.SyncAndApplyConfigFromDatabase()) + require.Equal(t, 1, len(alertsRouter.senders)) + require.Equal(t, 1, len(alertsRouter.sendersCfgHash)) + + // Then, ensure we've discovered the Alertmanager. + assertAlertmanagersStatusForOrg(t, alertsRouter, ruleKey.OrgID, 1, 0) + + var expected []*models2.PostableAlert + alerts := definitions.PostableAlerts{} + for i := 0; i < rand.Intn(5)+1; i++ { + alert := generatePostableAlert(t, mockedClock) + expected = append(expected, &alert) + alerts.PostableAlerts = append(alerts.PostableAlerts, alert) + } + + alertsRouter.Send(ruleKey, alerts) + + // Eventually, our Alertmanager should have received at least one alert. + assertAlertsDelivered(t, fakeAM, expected) + + // Now, let's remove the Alertmanager from the admin configuration. + mockedGetAdminConfigurations.Return(nil, nil) + // Again, make sure we sync and verify the senders. + require.NoError(t, alertsRouter.SyncAndApplyConfigFromDatabase()) + require.Equal(t, 0, len(alertsRouter.senders)) + require.Equal(t, 0, len(alertsRouter.sendersCfgHash)) + + // Then, ensure we've dropped the Alertmanager. + assertAlertmanagersStatusForOrg(t, alertsRouter, ruleKey.OrgID, 0, 0) +} + +func TestSendingToExternalAlertmanager_WithMultipleOrgs(t *testing.T) { + ruleKey1 := models.GenerateRuleKey(1) + ruleKey2 := models.GenerateRuleKey(2) + + fakeAM := NewFakeExternalAlertmanager(t) + defer fakeAM.Close() + + fakeAdminConfigStore := &store.AdminConfigurationStoreMock{} + mockedGetAdminConfigurations := fakeAdminConfigStore.EXPECT().GetAdminConfigurations() + + mockedClock := clock.NewMock() + + moa := createMultiOrgAlertmanager(t, []int64{1, 2}) + + appUrl := &url.URL{ + Scheme: "http", + Host: "localhost", + } + + alertsRouter := NewAlertsRouter(moa, fakeAdminConfigStore, mockedClock, appUrl, map[int64]struct{}{}, 10*time.Minute) + + mockedGetAdminConfigurations.Return([]*models.AdminConfiguration{ + {OrgID: ruleKey1.OrgID, Alertmanagers: []string{fakeAM.Server.URL}, SendAlertsTo: models.AllAlertmanagers}, + }, nil) + + // Make sure we sync the configuration at least once before the evaluation happens to guarantee the sender is running + // when the first alert triggers. + require.NoError(t, alertsRouter.SyncAndApplyConfigFromDatabase()) + require.Equal(t, 1, len(alertsRouter.senders)) + require.Equal(t, 1, len(alertsRouter.sendersCfgHash)) + + // Then, ensure we've discovered the Alertmanager. + assertAlertmanagersStatusForOrg(t, alertsRouter, ruleKey1.OrgID, 1, 0) + + // 1. Now, let's assume a new org comes along. + mockedGetAdminConfigurations.Return([]*models.AdminConfiguration{ + {OrgID: ruleKey1.OrgID, Alertmanagers: []string{fakeAM.Server.URL}, SendAlertsTo: models.AllAlertmanagers}, + {OrgID: ruleKey2.OrgID, Alertmanagers: []string{fakeAM.Server.URL}}, + }, nil) + + // If we sync again, new senders must have spawned. + require.NoError(t, alertsRouter.SyncAndApplyConfigFromDatabase()) + require.Equal(t, 2, len(alertsRouter.senders)) + require.Equal(t, 2, len(alertsRouter.sendersCfgHash)) + + // Then, ensure we've discovered the Alertmanager for the new organization. + assertAlertmanagersStatusForOrg(t, alertsRouter, ruleKey1.OrgID, 1, 0) + assertAlertmanagersStatusForOrg(t, alertsRouter, ruleKey2.OrgID, 1, 0) + + var expected []*models2.PostableAlert + alerts1 := definitions.PostableAlerts{} + for i := 0; i < rand.Intn(5)+1; i++ { + alert := generatePostableAlert(t, mockedClock) + expected = append(expected, &alert) + alerts1.PostableAlerts = append(alerts1.PostableAlerts, alert) + } + alerts2 := definitions.PostableAlerts{} + for i := 0; i < rand.Intn(5)+1; i++ { + alert := generatePostableAlert(t, mockedClock) + expected = append(expected, &alert) + alerts2.PostableAlerts = append(alerts2.PostableAlerts, alert) + } + + alertsRouter.Send(ruleKey1, alerts1) + alertsRouter.Send(ruleKey2, alerts2) + + assertAlertsDelivered(t, fakeAM, expected) + + // 2. Next, let's modify the configuration of an organization by adding an extra alertmanager. + fakeAM2 := NewFakeExternalAlertmanager(t) + + mockedGetAdminConfigurations.Return([]*models.AdminConfiguration{ + {OrgID: ruleKey1.OrgID, Alertmanagers: []string{fakeAM.Server.URL}, SendAlertsTo: models.AllAlertmanagers}, + {OrgID: ruleKey2.OrgID, Alertmanagers: []string{fakeAM.Server.URL, fakeAM2.Server.URL}}, + }, nil) + + // Before we sync, let's grab the existing hash of this particular org. + currentHash := alertsRouter.sendersCfgHash[ruleKey2.OrgID] + + // Now, sync again. + require.NoError(t, alertsRouter.SyncAndApplyConfigFromDatabase()) + + // The hash for org two should not be the same and we should still have two senders. + require.NotEqual(t, alertsRouter.sendersCfgHash[ruleKey2.OrgID], currentHash) + require.Equal(t, 2, len(alertsRouter.senders)) + require.Equal(t, 2, len(alertsRouter.sendersCfgHash)) + + assertAlertmanagersStatusForOrg(t, alertsRouter, ruleKey2.OrgID, 2, 0) + + // 3. Now, let's provide a configuration that fails for OrgID = 1. + mockedGetAdminConfigurations.Return([]*models.AdminConfiguration{ + {OrgID: ruleKey1.OrgID, Alertmanagers: []string{"123://invalid.org"}, SendAlertsTo: models.AllAlertmanagers}, + {OrgID: ruleKey2.OrgID, Alertmanagers: []string{fakeAM.Server.URL, fakeAM2.Server.URL}}, + }, nil) + + // Before we sync, let's get the current config hash. + currentHash = alertsRouter.sendersCfgHash[ruleKey1.OrgID] + + // Now, sync again. + require.NoError(t, alertsRouter.SyncAndApplyConfigFromDatabase()) + + // The old configuration should still be running. + require.Equal(t, alertsRouter.sendersCfgHash[ruleKey1.OrgID], currentHash) + require.Equal(t, 1, len(alertsRouter.AlertmanagersFor(ruleKey1.OrgID))) + + // If we fix it - it should be applied. + mockedGetAdminConfigurations.Return([]*models.AdminConfiguration{ + {OrgID: ruleKey1.OrgID, Alertmanagers: []string{"notarealalertmanager:3030"}, SendAlertsTo: models.AllAlertmanagers}, + {OrgID: ruleKey2.OrgID, Alertmanagers: []string{fakeAM.Server.URL, fakeAM2.Server.URL}}, + }, nil) + + require.NoError(t, alertsRouter.SyncAndApplyConfigFromDatabase()) + require.NotEqual(t, alertsRouter.sendersCfgHash[ruleKey1.OrgID], currentHash) + + // Finally, remove everything. + mockedGetAdminConfigurations.Return([]*models.AdminConfiguration{}, nil) + + require.NoError(t, alertsRouter.SyncAndApplyConfigFromDatabase()) + + require.Equal(t, 0, len(alertsRouter.senders)) + require.Equal(t, 0, len(alertsRouter.sendersCfgHash)) + + assertAlertmanagersStatusForOrg(t, alertsRouter, ruleKey1.OrgID, 0, 0) + assertAlertmanagersStatusForOrg(t, alertsRouter, ruleKey2.OrgID, 0, 0) +} + +func TestChangingAlertmanagersChoice(t *testing.T) { + ruleKey := models.GenerateRuleKey(1) + + fakeAM := NewFakeExternalAlertmanager(t) + defer fakeAM.Close() + + fakeAdminConfigStore := &store.AdminConfigurationStoreMock{} + mockedGetAdminConfigurations := fakeAdminConfigStore.EXPECT().GetAdminConfigurations() + + mockedClock := clock.NewMock() + mockedClock.Set(time.Now()) + + moa := createMultiOrgAlertmanager(t, []int64{1}) + + appUrl := &url.URL{ + Scheme: "http", + Host: "localhost", + } + + alertsRouter := NewAlertsRouter(moa, fakeAdminConfigStore, mockedClock, appUrl, map[int64]struct{}{}, 10*time.Minute) + + mockedGetAdminConfigurations.Return([]*models.AdminConfiguration{ + {OrgID: ruleKey.OrgID, Alertmanagers: []string{fakeAM.Server.URL}, SendAlertsTo: models.AllAlertmanagers}, + }, nil) + // Make sure we sync the configuration at least once before the evaluation happens to guarantee the sender is running + // when the first alert triggers. + require.NoError(t, alertsRouter.SyncAndApplyConfigFromDatabase()) + require.Equal(t, 1, len(alertsRouter.senders)) + require.Equal(t, 1, len(alertsRouter.sendersCfgHash)) + require.Equal(t, models.AllAlertmanagers, alertsRouter.sendAlertsTo[ruleKey.OrgID]) + + // Then, ensure we've discovered the Alertmanager. + assertAlertmanagersStatusForOrg(t, alertsRouter, ruleKey.OrgID, 1, 0) + + var expected []*models2.PostableAlert + alerts := definitions.PostableAlerts{} + for i := 0; i < rand.Intn(5)+1; i++ { + alert := generatePostableAlert(t, mockedClock) + expected = append(expected, &alert) + alerts.PostableAlerts = append(alerts.PostableAlerts, alert) + } + alertsRouter.Send(ruleKey, alerts) + + // Eventually, our Alertmanager should have received at least one alert. + assertAlertsDelivered(t, fakeAM, expected) + + // Now, let's change the Alertmanagers choice to send only to the external Alertmanager. + mockedGetAdminConfigurations.Return([]*models.AdminConfiguration{ + {OrgID: ruleKey.OrgID, Alertmanagers: []string{fakeAM.Server.URL}, SendAlertsTo: models.ExternalAlertmanagers}, + }, nil) + // Again, make sure we sync and verify the senders. + require.NoError(t, alertsRouter.SyncAndApplyConfigFromDatabase()) + require.Equal(t, 1, len(alertsRouter.senders)) + require.Equal(t, 1, len(alertsRouter.sendersCfgHash)) + + assertAlertmanagersStatusForOrg(t, alertsRouter, ruleKey.OrgID, 1, 0) + require.Equal(t, models.ExternalAlertmanagers, alertsRouter.sendAlertsTo[ruleKey.OrgID]) + + // Finally, let's change the Alertmanagers choice to send only to the internal Alertmanager. + mockedGetAdminConfigurations.Return([]*models.AdminConfiguration{ + {OrgID: ruleKey.OrgID, Alertmanagers: []string{fakeAM.Server.URL}, SendAlertsTo: models.InternalAlertmanager}, + }, nil) + + // Again, make sure we sync and verify the senders. + // senders should be running even though alerts are being handled externally. + require.NoError(t, alertsRouter.SyncAndApplyConfigFromDatabase()) + require.Equal(t, 1, len(alertsRouter.senders)) + require.Equal(t, 1, len(alertsRouter.sendersCfgHash)) + + // Then, ensure the Alertmanager is still listed and the Alertmanagers choice has changed. + assertAlertmanagersStatusForOrg(t, alertsRouter, ruleKey.OrgID, 1, 0) + require.Equal(t, models.InternalAlertmanager, alertsRouter.sendAlertsTo[ruleKey.OrgID]) + + alertsRouter.Send(ruleKey, alerts) + + am, err := moa.AlertmanagerFor(ruleKey.OrgID) + require.NoError(t, err) + actualAlerts, err := am.GetAlerts(true, true, true, nil, "") + require.NoError(t, err) + require.Len(t, actualAlerts, len(expected)) +} + +func assertAlertmanagersStatusForOrg(t *testing.T, alertsRouter *AlertsRouter, orgID int64, active, dropped int) { + t.Helper() + require.Eventuallyf(t, func() bool { + return len(alertsRouter.AlertmanagersFor(orgID)) == active && len(alertsRouter.DroppedAlertmanagersFor(orgID)) == dropped + }, 10*time.Second, 200*time.Millisecond, + fmt.Sprintf("expected %d active Alertmanagers and %d dropped ones but got %d active and %d dropped", active, dropped, len(alertsRouter.AlertmanagersFor(orgID)), len(alertsRouter.DroppedAlertmanagersFor(orgID)))) +} + +func assertAlertsDelivered(t *testing.T, fakeAM *FakeExternalAlertmanager, expectedAlerts []*models2.PostableAlert) { + t.Helper() + require.Eventuallyf(t, func() bool { + return fakeAM.AlertsCount() == len(expectedAlerts) + }, 10*time.Second, 200*time.Millisecond, fmt.Sprintf("expected %d alerts to be delivered to remote Alertmanager but only %d was delivered", len(expectedAlerts), fakeAM.AlertsCount())) + require.Len(t, fakeAM.Alerts(), len(expectedAlerts)) +} + +func generatePostableAlert(t *testing.T, clk clock.Clock) models2.PostableAlert { + t.Helper() + u := url.URL{ + Scheme: "http", + Host: "localhost", + RawPath: "/" + util.GenerateShortUID(), + } + return models2.PostableAlert{ + Annotations: models2.LabelSet(models.GenerateAlertLabels(5, "ann-")), + EndsAt: strfmt.DateTime(clk.Now().Add(1 * time.Minute)), + StartsAt: strfmt.DateTime(clk.Now()), + Alert: models2.Alert{ + GeneratorURL: strfmt.URI(u.String()), + Labels: models2.LabelSet(models.GenerateAlertLabels(5, "lbl-")), + }, + } +} + +func createMultiOrgAlertmanager(t *testing.T, orgs []int64) *notifier.MultiOrgAlertmanager { + t.Helper() + + tmpDir := t.TempDir() + orgStore := notifier.NewFakeOrgStore(t, orgs) + + cfg := &setting.Cfg{ + DataPath: tmpDir, + UnifiedAlerting: setting.UnifiedAlertingSettings{ + AlertmanagerConfigPollInterval: 3 * time.Minute, + DefaultConfiguration: setting.GetAlertmanagerDefaultConfiguration(), + DisabledOrgs: map[int64]struct{}{}, + }, // do not poll in tests. + } + + cfgStore := notifier.NewFakeConfigStore(t, make(map[int64]*models.AlertConfiguration)) + kvStore := notifier.NewFakeKVStore(t) + registry := prometheus.NewPedanticRegistry() + m := metrics.NewNGAlert(registry) + secretsService := secretsManager.SetupTestService(t, fakes.NewFakeSecretsStore()) + decryptFn := secretsService.GetDecryptedValue + moa, err := notifier.NewMultiOrgAlertmanager(cfg, &cfgStore, &orgStore, kvStore, provisioning.NewFakeProvisioningStore(), decryptFn, m.GetMultiOrgAlertmanagerMetrics(), nil, log.New("testlogger"), secretsService) + require.NoError(t, err) + require.NoError(t, moa.LoadAndSyncAlertmanagersForOrgs(context.Background())) + require.Eventually(t, func() bool { + for _, org := range orgs { + _, err := moa.AlertmanagerFor(org) + if err != nil { + return false + } + } + return true + }, 10*time.Second, 100*time.Millisecond) + return moa +} diff --git a/pkg/services/ngalert/sender/testing.go b/pkg/services/ngalert/sender/testing.go new file mode 100644 index 00000000000..ffec49634bc --- /dev/null +++ b/pkg/services/ngalert/sender/testing.go @@ -0,0 +1,88 @@ +package sender + +import ( + "encoding/json" + "io/ioutil" + "net/http" + "net/http/httptest" + "sync" + "testing" + + amv2 "github.com/prometheus/alertmanager/api/v2/models" + "github.com/prometheus/common/model" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +type FakeExternalAlertmanager struct { + t *testing.T + mtx sync.Mutex + alerts amv2.PostableAlerts + Server *httptest.Server +} + +func NewFakeExternalAlertmanager(t *testing.T) *FakeExternalAlertmanager { + t.Helper() + + am := &FakeExternalAlertmanager{ + t: t, + alerts: amv2.PostableAlerts{}, + } + am.Server = httptest.NewServer(http.HandlerFunc(am.Handler())) + + return am +} + +func (am *FakeExternalAlertmanager) URL() string { + return am.Server.URL +} + +func (am *FakeExternalAlertmanager) AlertNamesCompare(expected []string) bool { + n := []string{} + alerts := am.Alerts() + + if len(expected) != len(alerts) { + return false + } + + for _, a := range am.Alerts() { + for k, v := range a.Alert.Labels { + if k == model.AlertNameLabel { + n = append(n, v) + } + } + } + + return assert.ObjectsAreEqual(expected, n) +} + +func (am *FakeExternalAlertmanager) AlertsCount() int { + am.mtx.Lock() + defer am.mtx.Unlock() + + return len(am.alerts) +} + +func (am *FakeExternalAlertmanager) Alerts() amv2.PostableAlerts { + am.mtx.Lock() + defer am.mtx.Unlock() + return am.alerts +} + +func (am *FakeExternalAlertmanager) Handler() func(w http.ResponseWriter, r *http.Request) { + return func(w http.ResponseWriter, r *http.Request) { + b, err := ioutil.ReadAll(r.Body) + require.NoError(am.t, err) + + a := amv2.PostableAlerts{} + require.NoError(am.t, json.Unmarshal(b, &a)) + + am.mtx.Lock() + am.alerts = append(am.alerts, a...) + am.mtx.Unlock() + } +} + +func (am *FakeExternalAlertmanager) Close() { + am.Server.Close() +} diff --git a/pkg/services/ngalert/store/admin_configuration.go b/pkg/services/ngalert/store/admin_configuration.go index b7579adfca1..b30fd31ab83 100644 --- a/pkg/services/ngalert/store/admin_configuration.go +++ b/pkg/services/ngalert/store/admin_configuration.go @@ -17,6 +17,7 @@ type UpdateAdminConfigurationCmd struct { AdminConfiguration *ngmodels.AdminConfiguration } +//go:generate mockery --name AdminConfigurationStore --structname AdminConfigurationStoreMock --inpackage --filename admin_configuration_store_mock.go --with-expecter type AdminConfigurationStore interface { GetAdminConfiguration(orgID int64) (*ngmodels.AdminConfiguration, error) GetAdminConfigurations() ([]*ngmodels.AdminConfiguration, error) diff --git a/pkg/services/ngalert/store/admin_configuration_store_mock.go b/pkg/services/ngalert/store/admin_configuration_store_mock.go new file mode 100644 index 00000000000..5ec071ee40d --- /dev/null +++ b/pkg/services/ngalert/store/admin_configuration_store_mock.go @@ -0,0 +1,186 @@ +// Code generated by mockery v2.10.0. DO NOT EDIT. + +package store + +import ( + models "github.com/grafana/grafana/pkg/services/ngalert/models" + mock "github.com/stretchr/testify/mock" +) + +// AdminConfigurationStoreMock is an autogenerated mock type for the AdminConfigurationStore type +type AdminConfigurationStoreMock struct { + mock.Mock +} + +type AdminConfigurationStoreMock_Expecter struct { + mock *mock.Mock +} + +func (_m *AdminConfigurationStoreMock) EXPECT() *AdminConfigurationStoreMock_Expecter { + return &AdminConfigurationStoreMock_Expecter{mock: &_m.Mock} +} + +// DeleteAdminConfiguration provides a mock function with given fields: orgID +func (_m *AdminConfigurationStoreMock) DeleteAdminConfiguration(orgID int64) error { + ret := _m.Called(orgID) + + var r0 error + if rf, ok := ret.Get(0).(func(int64) error); ok { + r0 = rf(orgID) + } else { + r0 = ret.Error(0) + } + + return r0 +} + +// AdminConfigurationStoreMock_DeleteAdminConfiguration_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'DeleteAdminConfiguration' +type AdminConfigurationStoreMock_DeleteAdminConfiguration_Call struct { + *mock.Call +} + +// DeleteAdminConfiguration is a helper method to define mock.On call +// - orgID int64 +func (_e *AdminConfigurationStoreMock_Expecter) DeleteAdminConfiguration(orgID interface{}) *AdminConfigurationStoreMock_DeleteAdminConfiguration_Call { + return &AdminConfigurationStoreMock_DeleteAdminConfiguration_Call{Call: _e.mock.On("DeleteAdminConfiguration", orgID)} +} + +func (_c *AdminConfigurationStoreMock_DeleteAdminConfiguration_Call) Run(run func(orgID int64)) *AdminConfigurationStoreMock_DeleteAdminConfiguration_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(int64)) + }) + return _c +} + +func (_c *AdminConfigurationStoreMock_DeleteAdminConfiguration_Call) Return(_a0 error) *AdminConfigurationStoreMock_DeleteAdminConfiguration_Call { + _c.Call.Return(_a0) + return _c +} + +// GetAdminConfiguration provides a mock function with given fields: orgID +func (_m *AdminConfigurationStoreMock) GetAdminConfiguration(orgID int64) (*models.AdminConfiguration, error) { + ret := _m.Called(orgID) + + var r0 *models.AdminConfiguration + if rf, ok := ret.Get(0).(func(int64) *models.AdminConfiguration); ok { + r0 = rf(orgID) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*models.AdminConfiguration) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(int64) error); ok { + r1 = rf(orgID) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// AdminConfigurationStoreMock_GetAdminConfiguration_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'GetAdminConfiguration' +type AdminConfigurationStoreMock_GetAdminConfiguration_Call struct { + *mock.Call +} + +// GetAdminConfiguration is a helper method to define mock.On call +// - orgID int64 +func (_e *AdminConfigurationStoreMock_Expecter) GetAdminConfiguration(orgID interface{}) *AdminConfigurationStoreMock_GetAdminConfiguration_Call { + return &AdminConfigurationStoreMock_GetAdminConfiguration_Call{Call: _e.mock.On("GetAdminConfiguration", orgID)} +} + +func (_c *AdminConfigurationStoreMock_GetAdminConfiguration_Call) Run(run func(orgID int64)) *AdminConfigurationStoreMock_GetAdminConfiguration_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(int64)) + }) + return _c +} + +func (_c *AdminConfigurationStoreMock_GetAdminConfiguration_Call) Return(_a0 *models.AdminConfiguration, _a1 error) *AdminConfigurationStoreMock_GetAdminConfiguration_Call { + _c.Call.Return(_a0, _a1) + return _c +} + +// GetAdminConfigurations provides a mock function with given fields: +func (_m *AdminConfigurationStoreMock) GetAdminConfigurations() ([]*models.AdminConfiguration, error) { + ret := _m.Called() + + var r0 []*models.AdminConfiguration + if rf, ok := ret.Get(0).(func() []*models.AdminConfiguration); ok { + r0 = rf() + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).([]*models.AdminConfiguration) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func() error); ok { + r1 = rf() + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// AdminConfigurationStoreMock_GetAdminConfigurations_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'GetAdminConfigurations' +type AdminConfigurationStoreMock_GetAdminConfigurations_Call struct { + *mock.Call +} + +// GetAdminConfigurations is a helper method to define mock.On call +func (_e *AdminConfigurationStoreMock_Expecter) GetAdminConfigurations() *AdminConfigurationStoreMock_GetAdminConfigurations_Call { + return &AdminConfigurationStoreMock_GetAdminConfigurations_Call{Call: _e.mock.On("GetAdminConfigurations")} +} + +func (_c *AdminConfigurationStoreMock_GetAdminConfigurations_Call) Run(run func()) *AdminConfigurationStoreMock_GetAdminConfigurations_Call { + _c.Call.Run(func(args mock.Arguments) { + run() + }) + return _c +} + +func (_c *AdminConfigurationStoreMock_GetAdminConfigurations_Call) Return(_a0 []*models.AdminConfiguration, _a1 error) *AdminConfigurationStoreMock_GetAdminConfigurations_Call { + _c.Call.Return(_a0, _a1) + return _c +} + +// UpdateAdminConfiguration provides a mock function with given fields: _a0 +func (_m *AdminConfigurationStoreMock) UpdateAdminConfiguration(_a0 UpdateAdminConfigurationCmd) error { + ret := _m.Called(_a0) + + var r0 error + if rf, ok := ret.Get(0).(func(UpdateAdminConfigurationCmd) error); ok { + r0 = rf(_a0) + } else { + r0 = ret.Error(0) + } + + return r0 +} + +// AdminConfigurationStoreMock_UpdateAdminConfiguration_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'UpdateAdminConfiguration' +type AdminConfigurationStoreMock_UpdateAdminConfiguration_Call struct { + *mock.Call +} + +// UpdateAdminConfiguration is a helper method to define mock.On call +// - _a0 UpdateAdminConfigurationCmd +func (_e *AdminConfigurationStoreMock_Expecter) UpdateAdminConfiguration(_a0 interface{}) *AdminConfigurationStoreMock_UpdateAdminConfiguration_Call { + return &AdminConfigurationStoreMock_UpdateAdminConfiguration_Call{Call: _e.mock.On("UpdateAdminConfiguration", _a0)} +} + +func (_c *AdminConfigurationStoreMock_UpdateAdminConfiguration_Call) Run(run func(_a0 UpdateAdminConfigurationCmd)) *AdminConfigurationStoreMock_UpdateAdminConfiguration_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(UpdateAdminConfigurationCmd)) + }) + return _c +} + +func (_c *AdminConfigurationStoreMock_UpdateAdminConfiguration_Call) Return(_a0 error) *AdminConfigurationStoreMock_UpdateAdminConfiguration_Call { + _c.Call.Return(_a0) + return _c +} diff --git a/pkg/services/ngalert/store/testing.go b/pkg/services/ngalert/store/testing.go index dade4b71746..c6cc5670060 100644 --- a/pkg/services/ngalert/store/testing.go +++ b/pkg/services/ngalert/store/testing.go @@ -2,12 +2,8 @@ package store import ( "context" - "encoding/json" "fmt" - "io/ioutil" "math/rand" - "net/http" - "net/http/httptest" "sync" "testing" @@ -16,11 +12,6 @@ import ( models2 "github.com/grafana/grafana/pkg/models" "github.com/grafana/grafana/pkg/services/ngalert/models" - - amv2 "github.com/prometheus/alertmanager/api/v2/models" - "github.com/prometheus/common/model" - "github.com/stretchr/testify/assert" - "github.com/stretchr/testify/require" ) func NewFakeRuleStore(t *testing.T) *FakeRuleStore { @@ -437,79 +428,6 @@ func (f *FakeAdminConfigStore) UpdateAdminConfiguration(cmd UpdateAdminConfigura return nil } -type FakeExternalAlertmanager struct { - t *testing.T - mtx sync.Mutex - alerts amv2.PostableAlerts - Server *httptest.Server -} - -func NewFakeExternalAlertmanager(t *testing.T) *FakeExternalAlertmanager { - t.Helper() - - am := &FakeExternalAlertmanager{ - t: t, - alerts: amv2.PostableAlerts{}, - } - am.Server = httptest.NewServer(http.HandlerFunc(am.Handler())) - - return am -} - -func (am *FakeExternalAlertmanager) URL() string { - return am.Server.URL -} - -func (am *FakeExternalAlertmanager) AlertNamesCompare(expected []string) bool { - n := []string{} - alerts := am.Alerts() - - if len(expected) != len(alerts) { - return false - } - - for _, a := range am.Alerts() { - for k, v := range a.Alert.Labels { - if k == model.AlertNameLabel { - n = append(n, v) - } - } - } - - return assert.ObjectsAreEqual(expected, n) -} - -func (am *FakeExternalAlertmanager) AlertsCount() int { - am.mtx.Lock() - defer am.mtx.Unlock() - - return len(am.alerts) -} - -func (am *FakeExternalAlertmanager) Alerts() amv2.PostableAlerts { - am.mtx.Lock() - defer am.mtx.Unlock() - return am.alerts -} - -func (am *FakeExternalAlertmanager) Handler() func(w http.ResponseWriter, r *http.Request) { - return func(w http.ResponseWriter, r *http.Request) { - b, err := ioutil.ReadAll(r.Body) - require.NoError(am.t, err) - - a := amv2.PostableAlerts{} - require.NoError(am.t, json.Unmarshal(b, &a)) - - am.mtx.Lock() - am.alerts = append(am.alerts, a...) - am.mtx.Unlock() - } -} - -func (am *FakeExternalAlertmanager) Close() { - am.Server.Close() -} - type FakeAnnotationsRepo struct { mtx sync.Mutex Items []*annotations.Item diff --git a/pkg/tests/api/alerting/api_admin_configuration_test.go b/pkg/tests/api/alerting/api_admin_configuration_test.go index 60a95c8ba56..3bde07f9996 100644 --- a/pkg/tests/api/alerting/api_admin_configuration_test.go +++ b/pkg/tests/api/alerting/api_admin_configuration_test.go @@ -15,7 +15,7 @@ import ( "github.com/grafana/grafana/pkg/models" apimodels "github.com/grafana/grafana/pkg/services/ngalert/api/tooling/definitions" ngmodels "github.com/grafana/grafana/pkg/services/ngalert/models" - "github.com/grafana/grafana/pkg/services/ngalert/store" + "github.com/grafana/grafana/pkg/services/ngalert/sender" "github.com/grafana/grafana/pkg/services/user" "github.com/grafana/grafana/pkg/tests/testinfra" ) @@ -54,9 +54,9 @@ func TestAdminConfiguration_SendingToExternalAlertmanagers(t *testing.T) { }) // Create a couple of "fake" Alertmanagers - fakeAM1 := store.NewFakeExternalAlertmanager(t) - fakeAM2 := store.NewFakeExternalAlertmanager(t) - fakeAM3 := store.NewFakeExternalAlertmanager(t) + fakeAM1 := sender.NewFakeExternalAlertmanager(t) + fakeAM2 := sender.NewFakeExternalAlertmanager(t) + fakeAM3 := sender.NewFakeExternalAlertmanager(t) // Now, let's test the configuration API. { From 39025bb4cd7d7349c5ac59d106e87652cb05df7f Mon Sep 17 00:00:00 2001 From: Ieva Date: Tue, 19 Jul 2022 14:34:49 +0100 Subject: [PATCH 060/116] add logs to debug failing migration (#52447) --- .../accesscontrol/dashboard_permissions.go | 27 ++++++++++--------- .../accesscontrol/permission_migrator.go | 12 +++++---- 2 files changed, 22 insertions(+), 17 deletions(-) diff --git a/pkg/services/sqlstore/migrations/accesscontrol/dashboard_permissions.go b/pkg/services/sqlstore/migrations/accesscontrol/dashboard_permissions.go index b9232e0b7bb..d734e3cd295 100644 --- a/pkg/services/sqlstore/migrations/accesscontrol/dashboard_permissions.go +++ b/pkg/services/sqlstore/migrations/accesscontrol/dashboard_permissions.go @@ -77,12 +77,12 @@ func (m dashboardPermissionsMigrator) Exec(sess *xorm.Session, migrator *migrato var dashboards []dashboard if err := m.sess.SQL("SELECT id, is_folder, folder_id, org_id FROM dashboard").Find(&dashboards); err != nil { - return err + return fmt.Errorf("failed to list dashboards: %w", err) } var acl []models.DashboardACL if err := m.sess.Find(&acl); err != nil { - return err + return fmt.Errorf("failed to list dashboard ACL: %w", err) } aclMap := make(map[int64][]models.DashboardACL, len(acl)) @@ -90,14 +90,14 @@ func (m dashboardPermissionsMigrator) Exec(sess *xorm.Session, migrator *migrato aclMap[p.DashboardID] = append(aclMap[p.DashboardID], p) } - if err := m.migratePermissions(dashboards, aclMap); err != nil { - return err + if err := m.migratePermissions(dashboards, aclMap, migrator); err != nil { + return fmt.Errorf("failed to migrate permissions: %w", err) } return nil } -func (m dashboardPermissionsMigrator) migratePermissions(dashboards []dashboard, aclMap map[int64][]models.DashboardACL) error { +func (m dashboardPermissionsMigrator) migratePermissions(dashboards []dashboard, aclMap map[int64][]models.DashboardACL, migrator *migrator.Migrator) error { permissionMap := map[int64]map[string][]*ac.Permission{} for _, d := range dashboards { if d.ID == -1 { @@ -133,7 +133,7 @@ func (m dashboardPermissionsMigrator) migratePermissions(dashboards []dashboard, for name := range roles { role, err := m.findRole(orgID, name) if err != nil { - return err + return fmt.Errorf("failed to find role %s: %w", name, err) } if role.ID == 0 { rolesToCreate = append(rolesToCreate, &ac.Role{OrgID: orgID, Name: name}) @@ -143,9 +143,10 @@ func (m dashboardPermissionsMigrator) migratePermissions(dashboards []dashboard, } } + migrator.Logger.Debug(fmt.Sprintf("bulk-creating roles %v", rolesToCreate)) createdRoles, err := m.bulkCreateRoles(rolesToCreate) if err != nil { - return err + return fmt.Errorf("failed to bulk-create roles: %w", err) } for i := range createdRoles { @@ -153,17 +154,18 @@ func (m dashboardPermissionsMigrator) migratePermissions(dashboards []dashboard, } if err := m.bulkAssignRoles(createdRoles); err != nil { - return err + return fmt.Errorf("failed to bulk-assign roles: %w", err) } - return m.setPermissions(allRoles, permissionMap) + return m.setPermissions(allRoles, permissionMap, migrator) } -func (m dashboardPermissionsMigrator) setPermissions(allRoles []*ac.Role, permissionMap map[int64]map[string][]*ac.Permission) error { +func (m dashboardPermissionsMigrator) setPermissions(allRoles []*ac.Role, permissionMap map[int64]map[string][]*ac.Permission, migrator *migrator.Migrator) error { now := time.Now() for _, role := range allRoles { + migrator.Logger.Debug(fmt.Sprintf("setting permissions for role %s with ID %d in org %d", role.Name, role.ID, role.OrgID)) if _, err := m.sess.Exec("DELETE FROM permission WHERE role_id = ? AND (action LIKE ? OR action LIKE ?)", role.ID, "dashboards%", "folders%"); err != nil { - return err + return fmt.Errorf("failed to clear dashboard and folder permissions for role: %w", err) } var permissions []ac.Permission mappedPermissions := permissionMap[role.OrgID][role.Name] @@ -178,8 +180,9 @@ func (m dashboardPermissionsMigrator) setPermissions(allRoles []*ac.Role, permis } err := batch(len(permissions), batchSize, func(start, end int) error { + migrator.Logger.Debug(fmt.Sprintf("inserting permissions %v", permissions[start:end])) if _, err := m.sess.InsertMulti(permissions[start:end]); err != nil { - return err + return fmt.Errorf("failed to create permissions for role: %w", err) } return nil }) diff --git a/pkg/services/sqlstore/migrations/accesscontrol/permission_migrator.go b/pkg/services/sqlstore/migrations/accesscontrol/permission_migrator.go index 7f17ecc13cd..5e5fbcc8354 100644 --- a/pkg/services/sqlstore/migrations/accesscontrol/permission_migrator.go +++ b/pkg/services/sqlstore/migrations/accesscontrol/permission_migrator.go @@ -110,7 +110,7 @@ func (m *permissionMigrator) bulkAssignRoles(allRoles []*accesscontrol.Role) err return err }) if err != nil { - return err + return fmt.Errorf("failed to create user role assignments: %w", err) } err = batch(len(teamRoleAssignments), batchSize, func(start, end int) error { @@ -118,12 +118,14 @@ func (m *permissionMigrator) bulkAssignRoles(allRoles []*accesscontrol.Role) err return err }) if err != nil { - return err + return fmt.Errorf("failed to create team role assignments: %w", err) } return batch(len(builtInRoleAssignments), batchSize, func(start, end int) error { - _, err := m.sess.Table("builtin_role").InsertMulti(builtInRoleAssignments[start:end]) - return err + if _, err := m.sess.Table("builtin_role").InsertMulti(builtInRoleAssignments[start:end]); err != nil { + return fmt.Errorf("failed to create builtin role assignments: %w", err) + } + return nil }) } @@ -148,7 +150,7 @@ func (m *permissionMigrator) createRoles(roles []*accesscontrol.Role) ([]*access valueString := strings.Join(valueStrings, ",") sql := fmt.Sprintf("INSERT INTO role (org_id, uid, name, version, created, updated) VALUES %s RETURNING id, org_id, name", valueString) if errCreate := m.sess.SQL(sql, args...).Find(&createdRoles); errCreate != nil { - return nil, errCreate + return nil, fmt.Errorf("failed to create roles: %w", errCreate) } return createdRoles, nil From c061b66d5fa0fc5d0393701f225460371bfa1cce Mon Sep 17 00:00:00 2001 From: idafurjes <36131195+idafurjes@users.noreply.github.com> Date: Tue, 19 Jul 2022 16:01:05 +0200 Subject: [PATCH 061/116] Chore: Split delete user method (#52216) * Remove user from preferences, stars, orguser, team member * Fix lint * Add Delete user from org and dashboard acl * Delete user from user auth * Add DeleteUser to quota * Add test files and adjust user auth store * Rename package in wire for user auth * Import Quota Service interface in other services * do the same in tests * fix lint tests * Fix tests * Add some tests * Rename InsertUser and DeleteUser to InsertOrgUser and DeleteOrgUser * Rename DeleteUser to DeleteByUser in quota * changing a method name in few additional places * Fix in other places * Fix lint * Fix tests * Chore: Split Delete User method * Add fakes for userauth * Add mock for access control Delete User permossion, use interface * Use interface for ream guardian * Add simple fake for dashboard acl * Add go routines, clean up, use interfaces * fix lint * Update pkg/services/user/userimpl/user_test.go Co-authored-by: Sofia Papagiannaki <1632407+papagian@users.noreply.github.com> * Update pkg/services/user/userimpl/user_test.go Co-authored-by: Sofia Papagiannaki <1632407+papagian@users.noreply.github.com> * Update pkg/services/user/userimpl/user_test.go Co-authored-by: Sofia Papagiannaki <1632407+papagian@users.noreply.github.com> * Add wrapper for not service account error * fix indentation * Use fmt for error wrapper Co-authored-by: Sofia Papagiannaki <1632407+papagian@users.noreply.github.com> --- pkg/api/http_server.go | 4 +- pkg/server/wire.go | 3 +- pkg/services/accesscontrol/accesscontrol.go | 3 + pkg/services/accesscontrol/mock/mock.go | 11 ++ .../ossaccesscontrol/ossaccesscontrol.go | 4 + pkg/services/dashboards/dashboard.go | 1 + .../dashboards/dashboard_service_mock.go | 5 + .../service/dashboard_service_test.go | 14 ++ pkg/services/dashboards/store_mock.go | 12 +- pkg/services/org/orgimpl/store_test.go | 5 + pkg/services/teamguardian/manager/service.go | 2 +- .../teamguardian/manager/service_mock.go | 10 +- pkg/services/user/model.go | 8 +- pkg/services/user/user.go | 1 + pkg/services/user/userimpl/store.go | 45 ++++++- pkg/services/user/userimpl/user.go | 121 +++++++++++++++++- pkg/services/user/userimpl/user_test.go | 74 ++++++++++- pkg/services/user/usertest/fake.go | 4 + pkg/services/userauth/userauth.go | 4 +- pkg/services/userauth/userauthtest/fake.go | 19 +++ 20 files changed, 315 insertions(+), 35 deletions(-) create mode 100644 pkg/services/userauth/userauthtest/fake.go diff --git a/pkg/api/http_server.go b/pkg/api/http_server.go index 532a0b201f1..c0d466fe2ab 100644 --- a/pkg/api/http_server.go +++ b/pkg/api/http_server.go @@ -208,8 +208,8 @@ func ProvideHTTPServer(opts ServerOptions, cfg *setting.Cfg, routeRegister routi avatarCacheServer *avatar.AvatarCacheServer, preferenceService pref.Service, entityEventsService store.EntityEventsService, teamsPermissionsService accesscontrol.TeamPermissionsService, folderPermissionsService accesscontrol.FolderPermissionsService, dashboardPermissionsService accesscontrol.DashboardPermissionsService, dashboardVersionService dashver.Service, - starService star.Service, playlistService playlist.Service, csrfService csrf.Service, coremodelRegistry *registry.Generic, coremodelStaticRegistry *registry.Static, - kvStore kvstore.KVStore, secretsMigrator secrets.Migrator, remoteSecretsCheck secretsKV.UseRemoteSecretsPluginCheck, + starService star.Service, csrfService csrf.Service, coremodelRegistry *registry.Generic, coremodelStaticRegistry *registry.Static, + playlistService playlist.Service, kvStore kvstore.KVStore, secretsMigrator secrets.Migrator, remoteSecretsCheck secretsKV.UseRemoteSecretsPluginCheck, publicDashboardsApi *publicdashboardsApi.Api, userService user.Service) (*HTTPServer, error) { web.Env = cfg.Env m := web.New() diff --git a/pkg/server/wire.go b/pkg/server/wire.go index 70ef423e848..4c781ad3c14 100644 --- a/pkg/server/wire.go +++ b/pkg/server/wire.go @@ -110,6 +110,7 @@ import ( "github.com/grafana/grafana/pkg/services/thumbs" "github.com/grafana/grafana/pkg/services/updatechecker" "github.com/grafana/grafana/pkg/services/user/userimpl" + "github.com/grafana/grafana/pkg/services/userauth/userauthimpl" "github.com/grafana/grafana/pkg/setting" "github.com/grafana/grafana/pkg/tsdb/azuremonitor" "github.com/grafana/grafana/pkg/tsdb/cloudmonitoring" @@ -251,7 +252,6 @@ var wireBasicSet = wire.NewSet( teamguardianDatabase.ProvideTeamGuardianStore, wire.Bind(new(teamguardian.Store), new(*teamguardianDatabase.TeamGuardianStoreImpl)), teamguardianManager.ProvideService, - wire.Bind(new(teamguardian.TeamGuardian), new(*teamguardianManager.Service)), featuremgmt.ProvideManagerService, featuremgmt.ProvideToggles, dashboardservice.ProvideDashboardService, @@ -299,6 +299,7 @@ var wireBasicSet = wire.NewSet( datasourceservice.ProvideDataSourceMigrationService, secretsMigrations.ProvideSecretMigrationService, wire.Bind(new(secretsMigrations.SecretMigrationService), new(*secretsMigrations.SecretMigrationServiceImpl)), + userauthimpl.ProvideService, ) var wireSet = wire.NewSet( diff --git a/pkg/services/accesscontrol/accesscontrol.go b/pkg/services/accesscontrol/accesscontrol.go index c4c30ed4c69..45dcafc01d6 100644 --- a/pkg/services/accesscontrol/accesscontrol.go +++ b/pkg/services/accesscontrol/accesscontrol.go @@ -33,6 +33,8 @@ type AccessControl interface { // RegisterScopeAttributeResolver allows the caller to register a scope resolver for a // specific scope prefix (ex: datasources:name:) RegisterScopeAttributeResolver(scopePrefix string, resolver ScopeAttributeResolver) + + DeleteUserPermissions(ctx context.Context, userID int64) error } type RoleRegistry interface { @@ -43,6 +45,7 @@ type RoleRegistry interface { type PermissionsStore interface { // GetUserPermissions returns user permissions with only action and scope fields set. GetUserPermissions(ctx context.Context, query GetUserPermissionsQuery) ([]Permission, error) + DeleteUserPermissions(ctx context.Context, userID int64) error } type TeamPermissionsService interface { diff --git a/pkg/services/accesscontrol/mock/mock.go b/pkg/services/accesscontrol/mock/mock.go index 49085a81083..82ab38692b8 100644 --- a/pkg/services/accesscontrol/mock/mock.go +++ b/pkg/services/accesscontrol/mock/mock.go @@ -21,6 +21,7 @@ type Calls struct { GetUserBuiltInRoles []interface{} RegisterFixedRoles []interface{} RegisterAttributeScopeResolver []interface{} + DeleteUserPermissions []interface{} } type Mock struct { @@ -42,6 +43,7 @@ type Mock struct { GetUserBuiltInRolesFunc func(user *models.SignedInUser) []string RegisterFixedRolesFunc func() error RegisterScopeAttributeResolverFunc func(string, accesscontrol.ScopeAttributeResolver) + DeleteUserPermissionsFunc func(context.Context, int64) error scopeResolvers accesscontrol.ScopeResolvers } @@ -180,3 +182,12 @@ func (m *Mock) RegisterScopeAttributeResolver(scopePrefix string, resolver acces m.RegisterScopeAttributeResolverFunc(scopePrefix, resolver) } } + +func (m *Mock) DeleteUserPermissions(ctx context.Context, userID int64) error { + m.Calls.DeleteUserPermissions = append(m.Calls.DeleteUserPermissions, []interface{}{ctx, userID}) + // Use override if provided + if m.DeleteUserPermissionsFunc != nil { + return m.DeleteUserPermissionsFunc(ctx, userID) + } + return nil +} diff --git a/pkg/services/accesscontrol/ossaccesscontrol/ossaccesscontrol.go b/pkg/services/accesscontrol/ossaccesscontrol/ossaccesscontrol.go index 7ef48a53104..410fc2994e2 100644 --- a/pkg/services/accesscontrol/ossaccesscontrol/ossaccesscontrol.go +++ b/pkg/services/accesscontrol/ossaccesscontrol/ossaccesscontrol.go @@ -198,3 +198,7 @@ func (ac *OSSAccessControlService) DeclareFixedRoles(registrations ...accesscont func (ac *OSSAccessControlService) RegisterScopeAttributeResolver(scopePrefix string, resolver accesscontrol.ScopeAttributeResolver) { ac.scopeResolvers.AddScopeAttributeResolver(scopePrefix, resolver) } + +func (ac *OSSAccessControlService) DeleteUserPermissions(ctx context.Context, userID int64) error { + return ac.store.DeleteUserPermissions(ctx, userID) +} diff --git a/pkg/services/dashboards/dashboard.go b/pkg/services/dashboards/dashboard.go index 2e5bb29725a..2ab4f480f41 100644 --- a/pkg/services/dashboards/dashboard.go +++ b/pkg/services/dashboards/dashboard.go @@ -24,6 +24,7 @@ type DashboardService interface { SaveDashboard(ctx context.Context, dto *SaveDashboardDTO, allowUiUpdate bool) (*models.Dashboard, error) SearchDashboards(ctx context.Context, query *models.FindPersistedDashboardsQuery) error UpdateDashboardACL(ctx context.Context, uid int64, items []*models.DashboardACL) error + DeleteACLByUser(ctx context.Context, userID int64) error } // PluginService is a service for operating on plugin dashboards. diff --git a/pkg/services/dashboards/dashboard_service_mock.go b/pkg/services/dashboards/dashboard_service_mock.go index e9ab55529b2..0b9e3486468 100644 --- a/pkg/services/dashboards/dashboard_service_mock.go +++ b/pkg/services/dashboards/dashboard_service_mock.go @@ -14,6 +14,7 @@ import ( // FakeDashboardService is an autogenerated mock type for the DashboardService type type FakeDashboardService struct { mock.Mock + ExpectedError error } // BuildSaveDashboardCommand provides a mock function with given fields: ctx, dto, shouldValidateAlerts, validateProvisionedDashboard @@ -262,6 +263,10 @@ func (_m *FakeDashboardService) UpdateDashboardACL(ctx context.Context, uid int6 return r0 } +func (_m *FakeDashboardService) DeleteACLByUser(ctx context.Context, userID int64) error { + return _m.ExpectedError +} + // NewFakeDashboardService creates a new instance of FakeDashboardService. It also registers the testing.TB interface on the mock and a cleanup function to assert the mocks expectations. func NewFakeDashboardService(t testing.TB) *FakeDashboardService { mock := &FakeDashboardService{} diff --git a/pkg/services/dashboards/service/dashboard_service_test.go b/pkg/services/dashboards/service/dashboard_service_test.go index e0593cabad1..0b730c5f53f 100644 --- a/pkg/services/dashboards/service/dashboard_service_test.go +++ b/pkg/services/dashboards/service/dashboard_service_test.go @@ -223,4 +223,18 @@ func TestDashboardService(t *testing.T) { // }) }) }) + + t.Run("Delete user by acl", func(t *testing.T) { + fakeStore := dashboards.FakeDashboardStore{} + defer fakeStore.AssertExpectations(t) + + service := &DashboardServiceImpl{ + cfg: setting.NewCfg(), + log: log.New("test.logger"), + dashboardStore: &fakeStore, + dashAlertExtractor: &dummyDashAlertExtractor{}, + } + err := service.DeleteACLByUser(context.Background(), 1) + require.NoError(t, err) + }) } diff --git a/pkg/services/dashboards/store_mock.go b/pkg/services/dashboards/store_mock.go index a84db251092..63af437bd35 100644 --- a/pkg/services/dashboards/store_mock.go +++ b/pkg/services/dashboards/store_mock.go @@ -14,6 +14,7 @@ import ( // FakeDashboardStore is an autogenerated mock type for the Store type type FakeDashboardStore struct { mock.Mock + ExpectedError error } // DeleteDashboard provides a mock function with given fields: ctx, cmd @@ -436,16 +437,7 @@ func (_m *FakeDashboardStore) ValidateDashboardBeforeSave(dashboard *models.Dash } func (_m *FakeDashboardStore) DeleteACLByUser(ctx context.Context, userID int64) error { - ret := _m.Called(ctx, userID) - - var r0 error - if rf, ok := ret.Get(0).(func(context.Context, int64) error); ok { - r0 = rf(ctx, userID) - } else { - r0 = ret.Error(0) - } - - return r0 + return _m.ExpectedError } // NewFakeDashboardStore creates a new instance of FakeDashboardStore. It also registers the testing.TB interface on the mock and a cleanup function to assert the mocks expectations. diff --git a/pkg/services/org/orgimpl/store_test.go b/pkg/services/org/orgimpl/store_test.go index 3dd505ab833..04b2c1ffe6e 100644 --- a/pkg/services/org/orgimpl/store_test.go +++ b/pkg/services/org/orgimpl/store_test.go @@ -48,6 +48,11 @@ func TestIntegrationOrgDataAccess(t *testing.T) { _, err = orgStore.Get(context.Background(), orgID) require.NoError(t, err) }) + + t.Run("delete by user", func(t *testing.T) { + err := orgStore.DeleteUserFromAll(context.Background(), 1) + require.NoError(t, err) + }) } func TestIntegrationOrgUserDataAccess(t *testing.T) { diff --git a/pkg/services/teamguardian/manager/service.go b/pkg/services/teamguardian/manager/service.go index 7349f2732a1..9e2ccbf354e 100644 --- a/pkg/services/teamguardian/manager/service.go +++ b/pkg/services/teamguardian/manager/service.go @@ -11,7 +11,7 @@ type Service struct { store teamguardian.Store } -func ProvideService(store teamguardian.Store) *Service { +func ProvideService(store teamguardian.Store) teamguardian.TeamGuardian { return &Service{store: store} } diff --git a/pkg/services/teamguardian/manager/service_mock.go b/pkg/services/teamguardian/manager/service_mock.go index dbe5a1107ab..dbd37fbb006 100644 --- a/pkg/services/teamguardian/manager/service_mock.go +++ b/pkg/services/teamguardian/manager/service_mock.go @@ -9,6 +9,11 @@ import ( type TeamGuardianMock struct { mock.Mock + ExpectedError error +} + +func NewTeamGuardianMock() *TeamGuardianMock { + return &TeamGuardianMock{} } func (t *TeamGuardianMock) CanAdmin(ctx context.Context, orgId int64, teamId int64, user *models.SignedInUser) error { @@ -16,7 +21,6 @@ func (t *TeamGuardianMock) CanAdmin(ctx context.Context, orgId int64, teamId int return args.Error(0) } -func (t *TeamGuardianMock) DeleteByUser(context.Context, int64) error { - args := t.Called(context.Background(), 0) - return args.Error(0) +func (t *TeamGuardianMock) DeleteByUser(ctx context.Context, userID int64) error { + return t.ExpectedError } diff --git a/pkg/services/user/model.go b/pkg/services/user/model.go index beb22502ec7..e7425d69b9d 100644 --- a/pkg/services/user/model.go +++ b/pkg/services/user/model.go @@ -1,6 +1,8 @@ package user -import "time" +import ( + "time" +) type HelpFlags1 uint64 @@ -53,3 +55,7 @@ func (u *User) NameOrFallback() string { } return u.Email } + +type DeleteUserCommand struct { + UserID int64 +} diff --git a/pkg/services/user/user.go b/pkg/services/user/user.go index dc04e6c3e4d..d720d4f5c10 100644 --- a/pkg/services/user/user.go +++ b/pkg/services/user/user.go @@ -6,4 +6,5 @@ import ( type Service interface { Create(context.Context, *CreateUserCommand) (*User, error) + Delete(context.Context, *DeleteUserCommand) error } diff --git a/pkg/services/user/userimpl/store.go b/pkg/services/user/userimpl/store.go index 33afdf891b3..9dabdf39cb6 100644 --- a/pkg/services/user/userimpl/store.go +++ b/pkg/services/user/userimpl/store.go @@ -2,21 +2,26 @@ package userimpl import ( "context" + "fmt" "github.com/grafana/grafana/pkg/events" "github.com/grafana/grafana/pkg/models" "github.com/grafana/grafana/pkg/services/sqlstore" "github.com/grafana/grafana/pkg/services/sqlstore/db" + "github.com/grafana/grafana/pkg/services/sqlstore/migrator" "github.com/grafana/grafana/pkg/services/user" ) type store interface { Insert(context.Context, *user.User) (int64, error) Get(context.Context, *user.User) (*user.User, error) + GetNotServiceAccount(context.Context, int64) (*user.User, error) + Delete(context.Context, int64) error } type sqlStore struct { - db db.DB + db db.DB + dialect migrator.Dialect } func (ss *sqlStore) Insert(ctx context.Context, cmd *user.User) (int64, error) { @@ -43,10 +48,9 @@ func (ss *sqlStore) Insert(ctx context.Context, cmd *user.User) (int64, error) { return userID, nil } -func (ss *sqlStore) Get(ctx context.Context, cmd *user.User) (*user.User, error) { - var usr *user.User +func (ss *sqlStore) Get(ctx context.Context, usr *user.User) (*user.User, error) { err := ss.db.WithDbSession(ctx, func(sess *sqlstore.DBSession) error { - exists, err := sess.Where("email=? OR login=?", cmd.Email, cmd.Login).Get(&user.User{}) + exists, err := sess.Where("email=? OR login=?", usr.Email, usr.Login).Get(usr) if !exists { return models.ErrUserNotFound } @@ -60,3 +64,36 @@ func (ss *sqlStore) Get(ctx context.Context, cmd *user.User) (*user.User, error) } return usr, nil } + +func (ss *sqlStore) Delete(ctx context.Context, userID int64) error { + err := ss.db.WithDbSession(ctx, func(sess *sqlstore.DBSession) error { + var rawSQL = "DELETE FROM " + ss.dialect.Quote("user") + " WHERE id = ?" + _, err := sess.Exec(rawSQL, userID) + return err + }) + if err != nil { + return err + } + return nil +} + +func (ss *sqlStore) GetNotServiceAccount(ctx context.Context, userID int64) (*user.User, error) { + user := user.User{ID: userID} + err := ss.db.WithDbSession(ctx, func(sess *sqlstore.DBSession) error { + has, err := sess.Where(ss.notServiceAccountFilter()).Get(&user) + if err != nil { + return err + } + if !has { + return models.ErrUserNotFound + } + return nil + }) + return &user, err +} + +func (ss *sqlStore) notServiceAccountFilter() string { + return fmt.Sprintf("%s.is_service_account = %s", + ss.dialect.Quote("user"), + ss.dialect.BooleanStr(false)) +} diff --git a/pkg/services/user/userimpl/user.go b/pkg/services/user/userimpl/user.go index 62f925db4df..cac3f4e83e1 100644 --- a/pkg/services/user/userimpl/user.go +++ b/pkg/services/user/userimpl/user.go @@ -3,27 +3,62 @@ package userimpl import ( "context" "errors" + "fmt" "time" "github.com/grafana/grafana/pkg/models" + "github.com/grafana/grafana/pkg/services/accesscontrol" + "github.com/grafana/grafana/pkg/services/dashboards" "github.com/grafana/grafana/pkg/services/org" + pref "github.com/grafana/grafana/pkg/services/preference" + "github.com/grafana/grafana/pkg/services/quota" "github.com/grafana/grafana/pkg/services/sqlstore/db" + "github.com/grafana/grafana/pkg/services/star" + "github.com/grafana/grafana/pkg/services/teamguardian" "github.com/grafana/grafana/pkg/services/user" + "github.com/grafana/grafana/pkg/services/userauth" "github.com/grafana/grafana/pkg/setting" "github.com/grafana/grafana/pkg/util" + + "golang.org/x/sync/errgroup" ) type Service struct { - store store - orgService org.Service + store store + orgService org.Service + starService star.Service + dashboardService dashboards.DashboardService + preferenceService pref.Service + teamMemberService teamguardian.TeamGuardian + userAuthService userauth.Service + quotaService quota.Service + accessControlStore accesscontrol.AccessControl } -func ProvideService(db db.DB, orgService org.Service) user.Service { +func ProvideService( + db db.DB, + orgService org.Service, + starService star.Service, + dashboardService dashboards.DashboardService, + preferenceService pref.Service, + teamMemberService teamguardian.TeamGuardian, + userAuthService userauth.Service, + quotaService quota.Service, + accessControlStore accesscontrol.AccessControl, +) user.Service { return &Service{ store: &sqlStore{ - db: db, + db: db, + dialect: db.GetDialect(), }, - orgService: orgService, + orgService: orgService, + starService: starService, + dashboardService: dashboardService, + preferenceService: preferenceService, + teamMemberService: teamMemberService, + userAuthService: userAuthService, + quotaService: quotaService, + accessControlStore: accessControlStore, } } @@ -88,7 +123,7 @@ func (s *Service) Create(ctx context.Context, cmd *user.CreateUserCommand) (*use usr.Password = encodedPassword } - _, err = s.store.Insert(ctx, usr) + userID, err := s.store.Insert(ctx, usr) if err != nil { return nil, err } @@ -112,10 +147,82 @@ func (s *Service) Create(ctx context.Context, cmd *user.CreateUserCommand) (*use } _, err = s.orgService.InsertOrgUser(ctx, &orgUser) if err != nil { - // HERE ADD DELETE USER + err := s.store.Delete(ctx, userID) return usr, err } } return usr, nil } + +func (s *Service) Delete(ctx context.Context, cmd *user.DeleteUserCommand) error { + _, err := s.store.GetNotServiceAccount(ctx, cmd.UserID) + if err != nil { + return fmt.Errorf("failed to get user with not service account: %w", err) + } + // delete from all the stores + if err := s.store.Delete(ctx, cmd.UserID); err != nil { + return err + } + + g, ctx := errgroup.WithContext(ctx) + g.Go(func() error { + if err := s.starService.DeleteByUser(ctx, cmd.UserID); err != nil { + return err + } + return nil + }) + g.Go(func() error { + if err := s.orgService.DeleteUserFromAll(ctx, cmd.UserID); err != nil { + return err + } + return nil + }) + g.Go(func() error { + if err := s.dashboardService.DeleteACLByUser(ctx, cmd.UserID); err != nil { + return err + } + return nil + }) + g.Go(func() error { + if err := s.preferenceService.DeleteByUser(ctx, cmd.UserID); err != nil { + return err + } + return nil + }) + g.Go(func() error { + if err := s.teamMemberService.DeleteByUser(ctx, cmd.UserID); err != nil { + return err + } + return nil + }) + g.Go(func() error { + if err := s.userAuthService.Delete(ctx, cmd.UserID); err != nil { + return err + } + return nil + }) + g.Go(func() error { + if err := s.userAuthService.DeleteToken(ctx, cmd.UserID); err != nil { + return err + } + return nil + }) + g.Go(func() error { + if err := s.quotaService.DeleteByUser(ctx, cmd.UserID); err != nil { + return err + } + return nil + }) + g.Go(func() error { + if err := s.accessControlStore.DeleteUserPermissions(ctx, cmd.UserID); err != nil { + return err + } + return nil + }) + if err := g.Wait(); err != nil { + return err + } + + return nil +} diff --git a/pkg/services/user/userimpl/user_test.go b/pkg/services/user/userimpl/user_test.go index 4c75a8e6b0e..7e982e5bd38 100644 --- a/pkg/services/user/userimpl/user_test.go +++ b/pkg/services/user/userimpl/user_test.go @@ -2,30 +2,88 @@ package userimpl import ( "context" + "errors" "testing" + "github.com/grafana/grafana/pkg/models" + "github.com/grafana/grafana/pkg/services/accesscontrol/mock" + "github.com/grafana/grafana/pkg/services/dashboards" "github.com/grafana/grafana/pkg/services/org/orgtest" + "github.com/grafana/grafana/pkg/services/preference/preftest" + "github.com/grafana/grafana/pkg/services/quota/quotatest" + "github.com/grafana/grafana/pkg/services/star/startest" + "github.com/grafana/grafana/pkg/services/teamguardian/manager" "github.com/grafana/grafana/pkg/services/user" + "github.com/grafana/grafana/pkg/services/userauth/userauthtest" "github.com/stretchr/testify/require" ) func TestUserService(t *testing.T) { userStore := newUserStoreFake() orgService := orgtest.NewOrgServiceFake() + starService := startest.NewStarServiceFake() + dashboardService := dashboards.NewFakeDashboardService(t) + preferenceService := preftest.NewPreferenceServiceFake() + teamMemberService := manager.NewTeamGuardianMock() + userAuthService := userauthtest.NewFakeUserAuthService() + quotaService := quotatest.NewQuotaServiceFake() + accessControlStore := mock.New() userService := Service{ - store: userStore, - orgService: orgService, + store: userStore, + orgService: orgService, + starService: starService, + dashboardService: dashboardService, + preferenceService: preferenceService, + teamMemberService: teamMemberService, + userAuthService: userAuthService, + quotaService: quotaService, + accessControlStore: accessControlStore, } t.Run("create user", func(t *testing.T) { _, err := userService.Create(context.Background(), &user.CreateUserCommand{}) require.NoError(t, err) }) + + t.Run("delete user store returns error", func(t *testing.T) { + userStore.ExpectedDeleteUserError = models.ErrUserNotFound + t.Cleanup(func() { + userStore.ExpectedDeleteUserError = nil + }) + err := userService.Delete(context.Background(), &user.DeleteUserCommand{UserID: 1}) + require.Error(t, err, models.ErrUserNotFound) + }) + + t.Run("delete user returns from team", func(t *testing.T) { + teamMemberService.ExpectedError = errors.New("some error") + t.Cleanup(func() { + teamMemberService.ExpectedError = nil + }) + err := userService.Delete(context.Background(), &user.DeleteUserCommand{UserID: 1}) + require.Error(t, err) + }) + + t.Run("delete user returns from team and pref", func(t *testing.T) { + teamMemberService.ExpectedError = errors.New("some error") + preferenceService.ExpectedError = errors.New("some error 2") + t.Cleanup(func() { + teamMemberService.ExpectedError = nil + preferenceService.ExpectedError = nil + }) + err := userService.Delete(context.Background(), &user.DeleteUserCommand{UserID: 1}) + require.Error(t, err) + }) + + t.Run("delete user successfully", func(t *testing.T) { + err := userService.Delete(context.Background(), &user.DeleteUserCommand{UserID: 1}) + require.NoError(t, err) + }) } type FakeUserStore struct { - ExpectedUser *user.User - ExpectedError error + ExpectedUser *user.User + ExpectedError error + ExpectedDeleteUserError error } func newUserStoreFake() *FakeUserStore { @@ -39,3 +97,11 @@ func (f *FakeUserStore) Get(ctx context.Context, query *user.User) (*user.User, func (f *FakeUserStore) Insert(ctx context.Context, query *user.User) (int64, error) { return 0, f.ExpectedError } + +func (f *FakeUserStore) Delete(ctx context.Context, userID int64) error { + return f.ExpectedDeleteUserError +} + +func (f *FakeUserStore) GetNotServiceAccount(ctx context.Context, userID int64) (*user.User, error) { + return f.ExpectedUser, f.ExpectedError +} diff --git a/pkg/services/user/usertest/fake.go b/pkg/services/user/usertest/fake.go index 5863b592fab..c33090ca968 100644 --- a/pkg/services/user/usertest/fake.go +++ b/pkg/services/user/usertest/fake.go @@ -18,3 +18,7 @@ func NewUserServiceFake() *FakeUserService { func (f *FakeUserService) Create(ctx context.Context, cmd *user.CreateUserCommand) (*user.User, error) { return f.ExpectedUser, f.ExpectedError } + +func (f *FakeUserService) Delete(ctx context.Context, cmd *user.DeleteUserCommand) error { + return f.ExpectedError +} diff --git a/pkg/services/userauth/userauth.go b/pkg/services/userauth/userauth.go index 8751e953bc9..e0cb1f5c7ee 100644 --- a/pkg/services/userauth/userauth.go +++ b/pkg/services/userauth/userauth.go @@ -3,6 +3,6 @@ package userauth import "context" type Service interface { - Delete(ctx context.Context, userID int64) error - DeleteToken(ctx context.Context, userID int64) error + Delete(context.Context, int64) error + DeleteToken(context.Context, int64) error } diff --git a/pkg/services/userauth/userauthtest/fake.go b/pkg/services/userauth/userauthtest/fake.go new file mode 100644 index 00000000000..d7a3b0bb7d7 --- /dev/null +++ b/pkg/services/userauth/userauthtest/fake.go @@ -0,0 +1,19 @@ +package userauthtest + +import "context" + +type FakeUserAuthService struct { + ExpectedError error +} + +func NewFakeUserAuthService() *FakeUserAuthService { + return &FakeUserAuthService{} +} + +func (f *FakeUserAuthService) Delete(ctx context.Context, userID int64) error { + return f.ExpectedError +} + +func (f *FakeUserAuthService) DeleteToken(ctx context.Context, userID int64) error { + return f.ExpectedError +} From 2b7fe7251cbf3a318e8ba37925f16fb5b2c65607 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Agn=C3=A8s=20Toulet?= <35176601+AgnesToulet@users.noreply.github.com> Date: Tue, 19 Jul 2022 16:23:44 +0200 Subject: [PATCH 062/116] Docs: Update query caching support (#50706) * Docs: Update query caching support * Update query-caching.md --- docs/sources/enterprise/query-caching.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/sources/enterprise/query-caching.md b/docs/sources/enterprise/query-caching.md index 6cd0fbeb8db..ae0461edb6d 100644 --- a/docs/sources/enterprise/query-caching.md +++ b/docs/sources/enterprise/query-caching.md @@ -15,7 +15,7 @@ weight: 300 When query caching is enabled, Grafana temporarily stores the results of data source queries. When you or another user submit the exact same query again, the results will come back from the cache instead of from the data source (like Splunk or ServiceNow) itself. -Query caching works for all backend data sources, and queries sent through the data source proxy. You can enable the cache globally and configure the cache duration (also called Time to Live, or TTL). +Query caching works for all backend data sources. You can enable the cache globally and configure the cache duration (also called Time to Live, or TTL). > **Note:** Available in [Grafana Enterprise]({{< relref "../enterprise/" >}}) and [Grafana Cloud Pro and Advanced]({{< ref "/grafana-cloud" >}}). From 0300b1c6740333f1b1e09ce064daae78a5ebd6ee Mon Sep 17 00:00:00 2001 From: Artur Wierzbicki Date: Tue, 19 Jul 2022 18:47:11 +0400 Subject: [PATCH 063/116] Storage: branding access (#52412) * Storage: add special users for system branding access * Storage: explicit global org id * Storage: initialize org storages with global org id * Storage: add comments * Storage: simplify - use orgId: 1 for systembranding --- pkg/services/store/service.go | 47 +++++++++++++++++++++++++++++------ 1 file changed, 39 insertions(+), 8 deletions(-) diff --git a/pkg/services/store/service.go b/pkg/services/store/service.go index 480fee1b6eb..6278719602e 100644 --- a/pkg/services/store/service.go +++ b/pkg/services/store/service.go @@ -30,7 +30,13 @@ const RootResources = "resources" const RootDevenv = "devenv" const RootSystem = "system" -const SystemBrandingStorage = "system/branding" +const brandingStorage = "branding" +const SystemBrandingStorage = "system/" + brandingStorage + +var ( + SystemBrandingReader = &models.SignedInUser{OrgId: 1} + SystemBrandingAdmin = &models.SignedInUser{OrgId: 1} +) const MAX_UPLOAD_SIZE = 1 * 1024 * 1024 // 3MB @@ -129,7 +135,30 @@ func ProvideService(sql *sqlstore.SQLStore, features featuremgmt.FeatureToggles, } authService := newStaticStorageAuthService(func(ctx context.Context, user *models.SignedInUser, storageName string) map[string]filestorage.PathFilter { - if user == nil || !user.IsGrafanaAdmin { + if user == nil { + return nil + } + + if storageName == RootSystem { + if user == SystemBrandingReader { + return map[string]filestorage.PathFilter{ + ActionFilesRead: createSystemBrandingPathFilter(), + ActionFilesWrite: denyAllPathFilter, + ActionFilesDelete: denyAllPathFilter, + } + } + + if user == SystemBrandingAdmin { + systemBrandingFilter := createSystemBrandingPathFilter() + return map[string]filestorage.PathFilter{ + ActionFilesRead: systemBrandingFilter, + ActionFilesWrite: systemBrandingFilter, + ActionFilesDelete: systemBrandingFilter, + } + } + } + + if !user.IsGrafanaAdmin { return nil } @@ -152,12 +181,6 @@ func ProvideService(sql *sqlstore.SQLStore, features featuremgmt.FeatureToggles, ActionFilesWrite: allowAllPathFilter, ActionFilesDelete: allowAllPathFilter, } - case RootSystem: - return map[string]filestorage.PathFilter{ - ActionFilesRead: allowAllPathFilter, - ActionFilesWrite: allowAllPathFilter, - ActionFilesDelete: allowAllPathFilter, - } default: return nil } @@ -166,6 +189,14 @@ func ProvideService(sql *sqlstore.SQLStore, features featuremgmt.FeatureToggles, return newStandardStorageService(sql, globalRoots, initializeOrgStorages, authService) } +func createSystemBrandingPathFilter() filestorage.PathFilter { + return filestorage.NewPathFilter( + []string{filestorage.Delimiter + brandingStorage + filestorage.Delimiter}, // access to all folders and files inside `/branding/` + []string{filestorage.Delimiter + brandingStorage}, // access to the `/branding` folder itself, but not to any other sibling folder + nil, + nil) +} + func newStandardStorageService(sql *sqlstore.SQLStore, globalRoots []storageRuntime, initializeOrgStorages func(orgId int64) []storageRuntime, authService storageAuthService) *standardStorageService { rootsByOrgId := make(map[int64][]storageRuntime) rootsByOrgId[ac.GlobalOrgID] = globalRoots From e0a58300aca8b384a13bbc7e63251435b3f64f9c Mon Sep 17 00:00:00 2001 From: Ezequiel Victorero Date: Tue, 19 Jul 2022 11:50:37 -0300 Subject: [PATCH 064/116] Public Dashboards: count public dashboards view requests (#52419) --- pkg/api/api.go | 2 +- pkg/infra/metrics/metrics.go | 10 ++++++++++ pkg/services/publicdashboards/api/middleware.go | 11 ++++++++++- 3 files changed, 21 insertions(+), 2 deletions(-) diff --git a/pkg/api/api.go b/pkg/api/api.go index 8eac11dac35..58789b610ba 100644 --- a/pkg/api/api.go +++ b/pkg/api/api.go @@ -113,7 +113,7 @@ func (hs *HTTPServer) registerRoutes() { } if hs.Features.IsEnabled(featuremgmt.FlagPublicDashboards) { - r.Get("/public-dashboards/:accessToken", publicdashboardsapi.SetPublicDashboardFlag(), hs.Index) + r.Get("/public-dashboards/:accessToken", publicdashboardsapi.SetPublicDashboardFlag(), publicdashboardsapi.CountPublicDashboardRequest(), hs.Index) } r.Get("/explore", authorize(func(c *models.ReqContext) { diff --git a/pkg/infra/metrics/metrics.go b/pkg/infra/metrics/metrics.go index 788e9702c59..c3dfce10fcc 100644 --- a/pkg/infra/metrics/metrics.go +++ b/pkg/infra/metrics/metrics.go @@ -101,6 +101,9 @@ var ( // MAccessEvaluationCount is a metric gauge for total number of evaluation requests MAccessEvaluationCount prometheus.Counter + + // MPublicDashboardRequestCount is a metric counter for public dashboards requests + MPublicDashboardRequestCount prometheus.Counter ) // Timers @@ -410,6 +413,12 @@ func init() { Namespace: ExporterName, }) + MPublicDashboardRequestCount = metricutil.NewCounterStartingAtZero(prometheus.CounterOpts{ + Name: "public_dashboard_request_count", + Help: "counter for public dashboards requests", + Namespace: ExporterName, + }) + MStatTotalDashboards = prometheus.NewGauge(prometheus.GaugeOpts{ Name: "stat_totals_dashboard", Help: "total amount of dashboards", @@ -654,5 +663,6 @@ func initMetricVars() { StatsTotalLibraryVariables, StatsTotalDataKeys, MStatTotalPublicDashboards, + MPublicDashboardRequestCount, ) } diff --git a/pkg/services/publicdashboards/api/middleware.go b/pkg/services/publicdashboards/api/middleware.go index f7ba59d587a..e31a461d2b3 100644 --- a/pkg/services/publicdashboards/api/middleware.go +++ b/pkg/services/publicdashboards/api/middleware.go @@ -1,9 +1,18 @@ package api -import "github.com/grafana/grafana/pkg/models" +import ( + "github.com/grafana/grafana/pkg/infra/metrics" + "github.com/grafana/grafana/pkg/models" +) func SetPublicDashboardFlag() func(c *models.ReqContext) { return func(c *models.ReqContext) { c.IsPublicDashboardView = true } } + +func CountPublicDashboardRequest() func(c *models.ReqContext) { + return func(c *models.ReqContext) { + metrics.MPublicDashboardRequestCount.Inc() + } +} From 529289268b22e87daed034841efc49e2bb588fcf Mon Sep 17 00:00:00 2001 From: Dimitris Sotirakis Date: Tue, 19 Jul 2022 18:04:16 +0300 Subject: [PATCH 065/116] Docker: Install `git` in `grafana/grafana-ci-deploy` image (#52466) * Install git in grafana/grafana-ci-deploy image * Bump grafana/grafana-ci-deploy version --- .drone.yml | 50 +++++++++++++++--------------- scripts/build/ci-deploy/Dockerfile | 2 +- scripts/drone/steps/lib.star | 2 +- 3 files changed, 27 insertions(+), 27 deletions(-) diff --git a/.drone.yml b/.drone.yml index 557076fe940..fb05c58b1c9 100644 --- a/.drone.yml +++ b/.drone.yml @@ -1146,7 +1146,7 @@ steps: from_secret: gcp_key PRERELEASE_BUCKET: from_secret: prerelease_bucket - image: grafana/grafana-ci-deploy:1.3.1 + image: grafana/grafana-ci-deploy:1.3.2 name: store-storybook when: repo: @@ -1277,7 +1277,7 @@ steps: from_secret: gcp_key PRERELEASE_BUCKET: from_secret: prerelease_bucket - image: grafana/grafana-ci-deploy:1.3.1 + image: grafana/grafana-ci-deploy:1.3.2 name: upload-packages when: repo: @@ -1291,7 +1291,7 @@ steps: from_secret: gcp_key PRERELEASE_BUCKET: from_secret: prerelease_bucket - image: grafana/grafana-ci-deploy:1.3.1 + image: grafana/grafana-ci-deploy:1.3.2 name: upload-cdn-assets when: repo: @@ -1546,7 +1546,7 @@ steps: from_secret: gpg_pub_key GRAFANA_COM_API_KEY: from_secret: grafana_api_key - image: grafana/grafana-ci-deploy:1.3.1 + image: grafana/grafana-ci-deploy:1.3.2 name: store-packages-oss trigger: branch: main @@ -1822,7 +1822,7 @@ steps: from_secret: gcp_key PRERELEASE_BUCKET: from_secret: prerelease_bucket - image: grafana/grafana-ci-deploy:1.3.1 + image: grafana/grafana-ci-deploy:1.3.2 name: upload-cdn-assets - commands: - ./bin/grabpl upload-packages --edition oss @@ -1836,7 +1836,7 @@ steps: from_secret: gcp_key PRERELEASE_BUCKET: from_secret: prerelease_bucket - image: grafana/grafana-ci-deploy:1.3.1 + image: grafana/grafana-ci-deploy:1.3.2 name: upload-packages - commands: - ./bin/grabpl store-storybook --deployment latest --src-bucket grafana-prerelease @@ -1854,7 +1854,7 @@ steps: from_secret: gcp_key PRERELEASE_BUCKET: from_secret: prerelease_bucket - image: grafana/grafana-ci-deploy:1.3.1 + image: grafana/grafana-ci-deploy:1.3.2 name: store-storybook - commands: - ./bin/grabpl artifacts npm store --tag ${DRONE_TAG} @@ -1865,7 +1865,7 @@ steps: from_secret: gcp_key PRERELEASE_BUCKET: from_secret: prerelease_bucket - image: grafana/grafana-ci-deploy:1.3.1 + image: grafana/grafana-ci-deploy:1.3.2 name: store-npm-packages trigger: event: @@ -2436,7 +2436,7 @@ steps: from_secret: gcp_key PRERELEASE_BUCKET: from_secret: prerelease_bucket - image: grafana/grafana-ci-deploy:1.3.1 + image: grafana/grafana-ci-deploy:1.3.2 name: upload-cdn-assets - commands: - ./bin/grabpl upload-packages --edition enterprise @@ -2447,7 +2447,7 @@ steps: from_secret: gcp_key PRERELEASE_BUCKET: from_secret: prerelease_bucket - image: grafana/grafana-ci-deploy:1.3.1 + image: grafana/grafana-ci-deploy:1.3.2 name: upload-packages - commands: - ./bin/grabpl artifacts npm store --tag ${DRONE_TAG} @@ -2458,7 +2458,7 @@ steps: from_secret: gcp_key PRERELEASE_BUCKET: from_secret: prerelease_bucket - image: grafana/grafana-ci-deploy:1.3.1 + image: grafana/grafana-ci-deploy:1.3.2 name: store-npm-packages - commands: - ./bin/grabpl package --jobs 8 --edition enterprise2 --sign ${DRONE_TAG} @@ -2488,7 +2488,7 @@ steps: from_secret: gcp_key PRERELEASE_BUCKET: from_secret: prerelease_bucket - image: grafana/grafana-ci-deploy:1.3.1 + image: grafana/grafana-ci-deploy:1.3.2 name: upload-cdn-assets-enterprise2 - commands: - ./bin/grabpl upload-packages --edition enterprise2 @@ -2499,7 +2499,7 @@ steps: from_secret: gcp_key PRERELEASE_BUCKET: from_secret: prerelease_bucket - image: grafana/grafana-ci-deploy:1.3.1 + image: grafana/grafana-ci-deploy:1.3.2 name: upload-packages-enterprise2 trigger: event: @@ -3231,7 +3231,7 @@ steps: from_secret: gcp_key PRERELEASE_BUCKET: from_secret: prerelease_bucket - image: grafana/grafana-ci-deploy:1.3.1 + image: grafana/grafana-ci-deploy:1.3.2 name: publish-artifacts trigger: event: @@ -3269,7 +3269,7 @@ steps: from_secret: gcp_key PRERELEASE_BUCKET: from_secret: prerelease_bucket - image: grafana/grafana-ci-deploy:1.3.1 + image: grafana/grafana-ci-deploy:1.3.2 name: publish-artifacts trigger: event: @@ -3313,7 +3313,7 @@ steps: from_secret: gcp_key PRERELEASE_BUCKET: from_secret: prerelease_bucket - image: grafana/grafana-ci-deploy:1.3.1 + image: grafana/grafana-ci-deploy:1.3.2 name: retrieve-npm-packages - commands: - ./bin/grabpl artifacts npm release --tag v${TAG} @@ -3376,7 +3376,7 @@ steps: from_secret: gpg_pub_key GRAFANA_COM_API_KEY: from_secret: grafana_api_key - image: grafana/grafana-ci-deploy:1.3.1 + image: grafana/grafana-ci-deploy:1.3.2 name: store-packages-oss trigger: event: @@ -3430,7 +3430,7 @@ steps: from_secret: gpg_pub_key GRAFANA_COM_API_KEY: from_secret: grafana_api_key - image: grafana/grafana-ci-deploy:1.3.1 + image: grafana/grafana-ci-deploy:1.3.2 name: store-packages-enterprise trigger: event: @@ -3706,7 +3706,7 @@ steps: from_secret: gcp_key PRERELEASE_BUCKET: from_secret: prerelease_bucket - image: grafana/grafana-ci-deploy:1.3.1 + image: grafana/grafana-ci-deploy:1.3.2 name: upload-cdn-assets when: repo: @@ -3723,7 +3723,7 @@ steps: from_secret: gcp_key PRERELEASE_BUCKET: from_secret: prerelease_bucket - image: grafana/grafana-ci-deploy:1.3.1 + image: grafana/grafana-ci-deploy:1.3.2 name: upload-packages when: repo: @@ -4278,7 +4278,7 @@ steps: from_secret: gcp_key PRERELEASE_BUCKET: from_secret: prerelease_bucket - image: grafana/grafana-ci-deploy:1.3.1 + image: grafana/grafana-ci-deploy:1.3.2 name: upload-cdn-assets when: repo: @@ -4292,7 +4292,7 @@ steps: from_secret: gcp_key PRERELEASE_BUCKET: from_secret: prerelease_bucket - image: grafana/grafana-ci-deploy:1.3.1 + image: grafana/grafana-ci-deploy:1.3.2 name: upload-packages when: repo: @@ -4326,7 +4326,7 @@ steps: from_secret: gcp_key PRERELEASE_BUCKET: from_secret: prerelease_bucket - image: grafana/grafana-ci-deploy:1.3.1 + image: grafana/grafana-ci-deploy:1.3.2 name: upload-cdn-assets-enterprise2 - commands: - ./bin/grabpl upload-packages --edition enterprise2 @@ -4337,7 +4337,7 @@ steps: from_secret: gcp_key PRERELEASE_BUCKET: from_secret: prerelease_bucket - image: grafana/grafana-ci-deploy:1.3.1 + image: grafana/grafana-ci-deploy:1.3.2 name: upload-packages-enterprise2 trigger: ref: @@ -4874,6 +4874,6 @@ kind: secret name: gcp_upload_artifacts_key --- kind: signature -hmac: 6ca96adcc90cb32d6ded5bcd804548f5c2a5dfdfa1791761f150e1447852357a +hmac: 21307c19aa1bb40d2da542435c76666ba935db47ddd936ca24cfdf79ffb11443 ... diff --git a/scripts/build/ci-deploy/Dockerfile b/scripts/build/ci-deploy/Dockerfile index 1e6561cbdad..b60aa5fd16d 100644 --- a/scripts/build/ci-deploy/Dockerfile +++ b/scripts/build/ci-deploy/Dockerfile @@ -27,7 +27,7 @@ ARG DEBIAN_FRONTEND=noninteractive \ GOOGLE_SDK_CHECKSUM=374f960c9f384f88b6fc190b268ceac5dcad777301390107af63782bfb5ecbc7 # Need procps for pkill utility, which is used by the build pipeline tool to restart the GPG agent -RUN apt update && apt install -yq curl python3-pip procps && pip3 install -U awscli crcmod && \ +RUN apt update && apt install -yq git curl python3-pip procps && pip3 install -U awscli crcmod && \ curl -fLO https://dl.google.com/dl/cloudsdk/channels/rapid/downloads/google-cloud-sdk-${GOOGLE_SDK_VERSION}-linux-x86_64.tar.gz && \ echo "${GOOGLE_SDK_CHECKSUM} google-cloud-sdk-${GOOGLE_SDK_VERSION}-linux-x86_64.tar.gz" | sha256sum --check --status && \ tar xzf google-cloud-sdk-${GOOGLE_SDK_VERSION}-linux-x86_64.tar.gz -C /opt && \ diff --git a/scripts/drone/steps/lib.star b/scripts/drone/steps/lib.star index 252d559b4b1..7bfe3d2ccc1 100644 --- a/scripts/drone/steps/lib.star +++ b/scripts/drone/steps/lib.star @@ -2,7 +2,7 @@ load('scripts/drone/vault.star', 'from_secret', 'github_token', 'pull_secret', ' grabpl_version = 'v2.9.52' build_image = 'grafana/build-container:1.5.7' -publish_image = 'grafana/grafana-ci-deploy:1.3.1' +publish_image = 'grafana/grafana-ci-deploy:1.3.2' deploy_docker_image = 'us.gcr.io/kubernetes-dev/drone/plugins/deploy-image' alpine_image = 'alpine:3.15' curl_image = 'byrnedo/alpine-curl:0.1.8' From 3bdec4d138ad89c3c2e6a918cc321297a6174007 Mon Sep 17 00:00:00 2001 From: Ryan McKinley Date: Tue, 19 Jul 2022 08:13:26 -0700 Subject: [PATCH 066/116] Storage: remove orgId from sql config (#52426) --- pkg/services/store/config.go | 1 - pkg/services/store/service.go | 52 +++++++++++++++++------------- pkg/services/store/service_test.go | 32 +++++++++++------- pkg/services/store/storage_disk.go | 17 +++++----- pkg/services/store/storage_sql.go | 13 ++++---- pkg/services/store/types.go | 5 --- 6 files changed, 65 insertions(+), 55 deletions(-) diff --git a/pkg/services/store/config.go b/pkg/services/store/config.go index 3864397674d..206cfb7112a 100644 --- a/pkg/services/store/config.go +++ b/pkg/services/store/config.go @@ -34,7 +34,6 @@ type StorageGitConfig struct { type StorageSQLConfig struct { // SQLStorage will prefix all paths with orgId for isolation between orgs - orgId int64 } type StorageS3Config struct { diff --git a/pkg/services/store/service.go b/pkg/services/store/service.go index 6278719602e..dbbf505d1b8 100644 --- a/pkg/services/store/service.go +++ b/pkg/services/store/service.go @@ -85,29 +85,37 @@ type standardStorageService struct { func ProvideService(sql *sqlstore.SQLStore, features featuremgmt.FeatureToggles, cfg *setting.Cfg) StorageService { globalRoots := []storageRuntime{ - newDiskStorage(RootPublicStatic, "Public static files", &StorageLocalDiskConfig{ - Path: cfg.StaticRootPath, - Roots: []string{ - "/testdata/", - "/img/", - "/gazetteer/", - "/maps/", + newDiskStorage(RootStorageConfig{ + Prefix: RootPublicStatic, + Name: "Public static files", + Description: "Access files from the static public files", + Disk: &StorageLocalDiskConfig{ + Path: cfg.StaticRootPath, + Roots: []string{ + "/testdata/", + "/img/", + "/gazetteer/", + "/maps/", + }, }, - }).setReadOnly(true).setBuiltin(true). - setDescription("Access files from the static public files"), + }).setReadOnly(true).setBuiltin(true), } // Development dashboards if setting.Env != setting.Prod { devenv := filepath.Join(cfg.StaticRootPath, "..", "devenv") if _, err := os.Stat(devenv); !os.IsNotExist(err) { - // path/to/whatever exists - s := newDiskStorage(RootDevenv, "Development Environment", &StorageLocalDiskConfig{ - Path: devenv, - Roots: []string{ - "/dev-dashboards/", - }, - }).setReadOnly(false).setDescription("Explore files within the developer environment directly") + s := newDiskStorage(RootStorageConfig{ + Prefix: RootDevenv, + Name: "Development Environment", + Description: "Explore files within the developer environment directly", + Disk: &StorageLocalDiskConfig{ + Path: devenv, + Roots: []string{ + "/dev-dashboards/", + }, + }}).setReadOnly(false) + globalRoots = append(globalRoots, s) } } @@ -119,17 +127,17 @@ func ProvideService(sql *sqlstore.SQLStore, features featuremgmt.FeatureToggles, storages = append(storages, newSQLStorage(RootResources, "Resources", - &StorageSQLConfig{orgId: orgId}, sql). - setBuiltin(true). - setDescription("Upload custom resource files")) + "Upload custom resource files", + &StorageSQLConfig{}, sql, orgId). + setBuiltin(true)) // System settings storages = append(storages, newSQLStorage(RootSystem, "System", - &StorageSQLConfig{orgId: orgId}, - sql, - ).setBuiltin(true).setDescription("Grafana system storage")) + "Grafana system storage", + &StorageSQLConfig{}, sql, orgId). + setBuiltin(true)) return storages } diff --git a/pkg/services/store/service_test.go b/pkg/services/store/service_test.go index 597d6a6bf93..916ae84e430 100644 --- a/pkg/services/store/service_test.go +++ b/pkg/services/store/service_test.go @@ -32,17 +32,20 @@ var ( } }) publicRoot, _ = filepath.Abs("../../../public") - publicStaticFilesStorage = newDiskStorage("public", "Public static files", &StorageLocalDiskConfig{ - Path: publicRoot, - Roots: []string{ - "/testdata/", - "/img/icons/", - "/img/bg/", - "/gazetteer/", - "/maps/", - "/upload/", - }, - }).setReadOnly(true).setBuiltin(true) + publicStaticFilesStorage = newDiskStorage(RootStorageConfig{ + Prefix: "public", + Name: "Public static files", + Disk: &StorageLocalDiskConfig{ + Path: publicRoot, + Roots: []string{ + "/testdata/", + "/img/icons/", + "/img/bg/", + "/gazetteer/", + "/maps/", + "/upload/", + }, + }}).setReadOnly(true).setBuiltin(true) ) func TestListFiles(t *testing.T) { @@ -82,7 +85,12 @@ func setupUploadStore(t *testing.T, authService storageAuthService) (StorageServ t.Helper() storageName := "resources" mockStorage := &filestorage.MockFileStorage{} - sqlStorage := newSQLStorage(storageName, "Testing upload", &StorageSQLConfig{orgId: 1}, sqlstore.InitTestDB(t)) + sqlStorage := newSQLStorage( + storageName, "Testing upload", "dummy descr", + &StorageSQLConfig{}, + sqlstore.InitTestDB(t), + 1, // orgID (prefix init) + ) sqlStorage.store = mockStorage if authService == nil { diff --git a/pkg/services/store/storage_disk.go b/pkg/services/store/storage_disk.go index 78955451da7..c7a523161ad 100644 --- a/pkg/services/store/storage_disk.go +++ b/pkg/services/store/storage_disk.go @@ -18,20 +18,18 @@ type rootStorageDisk struct { settings *StorageLocalDiskConfig } -func newDiskStorage(prefix string, name string, cfg *StorageLocalDiskConfig) *rootStorageDisk { +func newDiskStorage(scfg RootStorageConfig) *rootStorageDisk { + cfg := scfg.Disk if cfg == nil { cfg = &StorageLocalDiskConfig{} + scfg.Disk = cfg } + scfg.Type = rootStorageTypeDisk meta := RootStorageMeta{ - Config: RootStorageConfig{ - Type: rootStorageTypeDisk, - Prefix: prefix, - Name: name, - Disk: cfg, - }, + Config: scfg, } - if prefix == "" { + if scfg.Prefix == "" { meta.Notice = append(meta.Notice, data.Notice{ Severity: data.NoticeSeverityError, Text: "Missing prefix", @@ -43,13 +41,14 @@ func newDiskStorage(prefix string, name string, cfg *StorageLocalDiskConfig) *ro Text: "Missing path configuration", }) } + s := &rootStorageDisk{} if meta.Notice == nil { path := fmt.Sprintf("file://%s", cfg.Path) bucket, err := blob.OpenBucket(context.Background(), path) if err != nil { - grafanaStorageLogger.Warn("error loading storage", "prefix", prefix, "err", err) + grafanaStorageLogger.Warn("error loading storage", "prefix", scfg.Prefix, "err", err) meta.Notice = append(meta.Notice, data.Notice{ Severity: data.NoticeSeverityError, Text: "Failed to initialize storage", diff --git a/pkg/services/store/storage_sql.go b/pkg/services/store/storage_sql.go index e2e4f3ac79a..ecfb3652192 100644 --- a/pkg/services/store/storage_sql.go +++ b/pkg/services/store/storage_sql.go @@ -28,17 +28,18 @@ func getDbStoragePathPrefix(orgId int64, storageName string) string { return filestorage.Join(fmt.Sprintf("%d", orgId), storageName+filestorage.Delimiter) } -func newSQLStorage(prefix string, name string, cfg *StorageSQLConfig, sql *sqlstore.SQLStore) *rootStorageSQL { +func newSQLStorage(prefix string, name string, descr string, cfg *StorageSQLConfig, sql *sqlstore.SQLStore, orgId int64) *rootStorageSQL { if cfg == nil { cfg = &StorageSQLConfig{} } meta := RootStorageMeta{ Config: RootStorageConfig{ - Type: rootStorageTypeSQL, - Prefix: prefix, - Name: name, - SQL: cfg, + Type: rootStorageTypeSQL, + Prefix: prefix, + Name: name, + Description: descr, + SQL: cfg, }, } @@ -52,7 +53,7 @@ func newSQLStorage(prefix string, name string, cfg *StorageSQLConfig, sql *sqlst s := &rootStorageSQL{} s.store = filestorage.NewDbStorage( grafanaStorageLogger, - sql, nil, getDbStoragePathPrefix(cfg.orgId, prefix)) + sql, nil, getDbStoragePathPrefix(orgId, prefix)) meta.Ready = true s.meta = meta diff --git a/pkg/services/store/types.go b/pkg/services/store/types.go index c076a60e7d0..20c345fd1f3 100644 --- a/pkg/services/store/types.go +++ b/pkg/services/store/types.go @@ -82,11 +82,6 @@ func (t *baseStorageRuntime) setBuiltin(val bool) *baseStorageRuntime { return t } -func (t *baseStorageRuntime) setDescription(v string) *baseStorageRuntime { - t.meta.Config.Description = v - return t -} - type RootStorageMeta struct { ReadOnly bool `json:"editable,omitempty"` Builtin bool `json:"builtin,omitempty"` From 0142c8ccd1953126e178d981f6aa145da4891c06 Mon Sep 17 00:00:00 2001 From: kay delaney <45561153+kaydelaney@users.noreply.github.com> Date: Tue, 19 Jul 2022 16:15:59 +0100 Subject: [PATCH 067/116] Dashboard Links: Fix styles for very long dashboard titles (#52443) --- .betterer.results | 6 --- .../SubMenu/DashboardLinksDashboard.tsx | 49 +++++++++++++------ 2 files changed, 35 insertions(+), 20 deletions(-) diff --git a/.betterer.results b/.betterer.results index 6b4f429c1f8..4d50cf774cc 100644 --- a/.betterer.results +++ b/.betterer.results @@ -4363,12 +4363,6 @@ exports[`better eslint`] = { [0, 0, 0, "Unexpected any. Specify a different type.", "0"], [0, 0, 0, "Unexpected any. Specify a different type.", "1"] ], - "public/app/features/dashboard/components/SubMenu/DashboardLinksDashboard.tsx:5381": [ - [0, 0, 0, "Unexpected any. Specify a different type.", "0"], - [0, 0, 0, "Unexpected any. Specify a different type.", "1"], - [0, 0, 0, "Unexpected any. Specify a different type.", "2"], - [0, 0, 0, "Unexpected any. Specify a different type.", "3"] - ], "public/app/features/dashboard/components/SubMenu/SubMenu.tsx:5381": [ [0, 0, 0, "Unexpected any. Specify a different type.", "0"] ], diff --git a/public/app/features/dashboard/components/SubMenu/DashboardLinksDashboard.tsx b/public/app/features/dashboard/components/SubMenu/DashboardLinksDashboard.tsx index 53ccd630a0b..3b74510d7be 100644 --- a/public/app/features/dashboard/components/SubMenu/DashboardLinksDashboard.tsx +++ b/public/app/features/dashboard/components/SubMenu/DashboardLinksDashboard.tsx @@ -2,6 +2,7 @@ import { css, cx } from '@emotion/css'; import React, { useRef, useState, useLayoutEffect } from 'react'; import { useAsync } from 'react-use'; +import { GrafanaTheme2 } from '@grafana/data'; import { sanitize, sanitizeUrl } from '@grafana/data/src/text/sanitize'; import { selectors } from '@grafana/e2e-selectors'; import { Icon, ToolbarButton, Tooltip, useStyles2 } from '@grafana/ui'; @@ -14,7 +15,7 @@ import { DashboardLink } from '../../state/DashboardModel'; interface Props { link: DashboardLink; linkInfo: { title: string; href: string }; - dashboardId: any; + dashboardId: number; } export const DashboardLinksDashboard: React.FC = (props) => { @@ -23,13 +24,7 @@ export const DashboardLinksDashboard: React.FC = (props) => { const [dropdownCssClass, setDropdownCssClass] = useState('invisible'); const [opened, setOpened] = useState(0); const resolvedLinks = useResolvedLinks(props, opened); - - const buttonStyle = useStyles2( - (theme) => - css` - color: ${theme.colors.text.primary}; - ` - ); + const styles = useStyles2(getStyles); useLayoutEffect(() => { setDropdownCssClass(getDropdownLocationCssClass(listRef.current)); @@ -41,17 +36,22 @@ export const DashboardLinksDashboard: React.FC = (props) => { <> setOpened(Date.now())} - className={cx('gf-form-label gf-form-label--dashlink', buttonStyle)} + className={cx('gf-form-label gf-form-label--dashlink', styles.button)} data-placement="bottom" data-toggle="dropdown" aria-expanded={!!opened} aria-controls="dropdown-list" aria-haspopup="menu" > - + {linkInfo.title} -
+ + + [[ end ]] + [[ if ne .EmbeddedImage "" ]] + + + + [[ end ]]
-
+
+ Alerting Panel +
+ Alerting Chart Attached Below +
Value: [[ .ValueString ]] @@ -101,7 +116,7 @@ [[ end ]] [[ if gt (len .Message) 0 ]] - [[ .Message ]] +
[[ .Message ]] [[ else ]]