mirror of
https://github.com/mattermost/mattermost.git
synced 2025-02-25 18:55:24 -06:00
Mm 16623 fix all initialism errors in channels/app and channels/api4 and /store dir (#25787)
This commit is contained in:
parent
2415438d88
commit
ae33de57ff
@ -14,16 +14,16 @@ import (
|
||||
)
|
||||
|
||||
type mixedUnlinkedGroup struct {
|
||||
Id *string `json:"mattermost_group_id"`
|
||||
ID *string `json:"mattermost_group_id"`
|
||||
DisplayName string `json:"name"`
|
||||
RemoteId string `json:"primary_key"`
|
||||
RemoteID string `json:"primary_key"`
|
||||
HasSyncables *bool `json:"has_syncables"`
|
||||
}
|
||||
|
||||
func (api *API) InitLdap() {
|
||||
api.BaseRoutes.LDAP.Handle("/sync", api.APISessionRequired(syncLdap)).Methods("POST")
|
||||
api.BaseRoutes.LDAP.Handle("/test", api.APISessionRequired(testLdap)).Methods("POST")
|
||||
api.BaseRoutes.LDAP.Handle("/migrateid", api.APISessionRequired(migrateIdLdap)).Methods("POST")
|
||||
api.BaseRoutes.LDAP.Handle("/migrateid", api.APISessionRequired(migrateIDLdap)).Methods("POST")
|
||||
|
||||
// GET /api/v4/ldap/groups?page=0&per_page=1000
|
||||
api.BaseRoutes.LDAP.Handle("/groups", api.APISessionRequired(getLdapGroups)).Methods("GET")
|
||||
@ -122,10 +122,10 @@ func getLdapGroups(c *Context, w http.ResponseWriter, r *http.Request) {
|
||||
for _, group := range groups {
|
||||
mug := &mixedUnlinkedGroup{
|
||||
DisplayName: group.DisplayName,
|
||||
RemoteId: group.GetRemoteId(),
|
||||
RemoteID: group.GetRemoteId(),
|
||||
}
|
||||
if len(group.Id) == 26 {
|
||||
mug.Id = &group.Id
|
||||
mug.ID = &group.Id
|
||||
mug.HasSyncables = &group.HasSyncables
|
||||
}
|
||||
mugs = append(mugs, mug)
|
||||
@ -284,7 +284,7 @@ func unlinkLdapGroup(c *Context, w http.ResponseWriter, r *http.Request) {
|
||||
ReturnStatusOK(w)
|
||||
}
|
||||
|
||||
func migrateIdLdap(c *Context, w http.ResponseWriter, r *http.Request) {
|
||||
func migrateIDLdap(c *Context, w http.ResponseWriter, r *http.Request) {
|
||||
props := model.StringInterfaceFromJSON(r.Body)
|
||||
toAttribute, ok := props["toAttribute"].(string)
|
||||
if !ok || toAttribute == "" {
|
||||
|
@ -4,7 +4,7 @@
|
||||
package api4
|
||||
|
||||
func (api *API) InitLdapLocal() {
|
||||
api.BaseRoutes.LDAP.Handle("/migrateid", api.APILocal(migrateIdLdap)).Methods("POST")
|
||||
api.BaseRoutes.LDAP.Handle("/migrateid", api.APILocal(migrateIDLdap)).Methods("POST")
|
||||
api.BaseRoutes.LDAP.Handle("/sync", api.APILocal(syncLdap)).Methods("POST")
|
||||
api.BaseRoutes.LDAP.Handle("/test", api.APILocal(testLdap)).Methods("POST")
|
||||
api.BaseRoutes.LDAP.Handle("/groups", api.APILocal(getLdapGroups)).Methods("GET")
|
||||
|
@ -27,7 +27,7 @@ func TestCheckIfRolesGrantPermission(t *testing.T) {
|
||||
|
||||
cases := []struct {
|
||||
roles []string
|
||||
permissionId string
|
||||
permissionID string
|
||||
shouldGrant bool
|
||||
}{
|
||||
{[]string{model.SystemAdminRoleId}, model.PermissionManageSystem.Id, true},
|
||||
@ -43,7 +43,7 @@ func TestCheckIfRolesGrantPermission(t *testing.T) {
|
||||
}
|
||||
|
||||
for _, testcase := range cases {
|
||||
require.Equal(t, th.App.RolesGrantPermission(testcase.roles, testcase.permissionId), testcase.shouldGrant)
|
||||
require.Equal(t, th.App.RolesGrantPermission(testcase.roles, testcase.permissionID), testcase.shouldGrant)
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -272,8 +272,8 @@ func TestJoinDefaultChannelsCreatesChannelMemberHistoryRecordTownSquare(t *testi
|
||||
// figure out the initial number of users in town square
|
||||
channel, err := th.App.Srv().Store().Channel().GetByName(th.BasicTeam.Id, "town-square", true)
|
||||
require.NoError(t, err)
|
||||
townSquareChannelId := channel.Id
|
||||
users, nErr := th.App.Srv().Store().ChannelMemberHistory().GetUsersInChannelDuring(model.GetMillis()-100, model.GetMillis()+100, townSquareChannelId)
|
||||
townSquareChannelID := channel.Id
|
||||
users, nErr := th.App.Srv().Store().ChannelMemberHistory().GetUsersInChannelDuring(model.GetMillis()-100, model.GetMillis()+100, townSquareChannelID)
|
||||
require.NoError(t, nErr)
|
||||
initialNumTownSquareUsers := len(users)
|
||||
|
||||
@ -282,13 +282,13 @@ func TestJoinDefaultChannelsCreatesChannelMemberHistoryRecordTownSquare(t *testi
|
||||
th.App.JoinDefaultChannels(th.Context, th.BasicTeam.Id, user, false, "")
|
||||
|
||||
// there should be a ChannelMemberHistory record for the user
|
||||
histories, nErr := th.App.Srv().Store().ChannelMemberHistory().GetUsersInChannelDuring(model.GetMillis()-100, model.GetMillis()+100, townSquareChannelId)
|
||||
histories, nErr := th.App.Srv().Store().ChannelMemberHistory().GetUsersInChannelDuring(model.GetMillis()-100, model.GetMillis()+100, townSquareChannelID)
|
||||
require.NoError(t, nErr)
|
||||
assert.Len(t, histories, initialNumTownSquareUsers+1)
|
||||
|
||||
found := false
|
||||
for _, history := range histories {
|
||||
if user.Id == history.UserId && townSquareChannelId == history.ChannelId {
|
||||
if user.Id == history.UserId && townSquareChannelID == history.ChannelId {
|
||||
found = true
|
||||
break
|
||||
}
|
||||
|
@ -75,7 +75,7 @@ func BenchmarkUploadFile(b *testing.B) {
|
||||
{fmt.Sprintf("zero-%dMb", mb(len(zero10M))), ".zero", zero10M},
|
||||
}
|
||||
|
||||
file_benchmarks := []struct {
|
||||
fileBenchmarks := []struct {
|
||||
title string
|
||||
f func(b *testing.B, n int, data []byte, ext string)
|
||||
}{
|
||||
@ -164,7 +164,7 @@ func BenchmarkUploadFile(b *testing.B) {
|
||||
}
|
||||
|
||||
for _, file := range files {
|
||||
for _, fb := range file_benchmarks {
|
||||
for _, fb := range fileBenchmarks {
|
||||
b.Run(file.title+"-"+fb.title, func(b *testing.B) {
|
||||
for i := 0; i < b.N; i++ {
|
||||
fb.f(b, i, file.data, file.ext)
|
||||
|
@ -812,10 +812,10 @@ func (a *App) importUserTeams(rctx request.CTX, user *model.User, data *[]import
|
||||
teamMemberByTeamID = map[string]*model.TeamMember{}
|
||||
newTeamMembers = []*model.TeamMember{}
|
||||
oldTeamMembers = []*model.TeamMember{}
|
||||
rolesByTeamId = map[string]string{}
|
||||
isGuestByTeamId = map[string]bool{}
|
||||
rolesByTeamID = map[string]string{}
|
||||
isGuestByTeamID = map[string]bool{}
|
||||
isUserByTeamId = map[string]bool{}
|
||||
isAdminByTeamId = map[string]bool{}
|
||||
isAdminByTeamID = map[string]bool{}
|
||||
)
|
||||
|
||||
existingMemberships, nErr := a.Srv().Store().Team().GetTeamsForUser(rctx, user.Id, "", true)
|
||||
@ -839,9 +839,9 @@ func (a *App) importUserTeams(rctx request.CTX, user *model.User, data *[]import
|
||||
})
|
||||
}
|
||||
|
||||
isGuestByTeamId[team.Id] = false
|
||||
isGuestByTeamID[team.Id] = false
|
||||
isUserByTeamId[team.Id] = true
|
||||
isAdminByTeamId[team.Id] = false
|
||||
isAdminByTeamID[team.Id] = false
|
||||
|
||||
if tdata.Roles == nil {
|
||||
isUserByTeamId[team.Id] = true
|
||||
@ -850,17 +850,17 @@ func (a *App) importUserTeams(rctx request.CTX, user *model.User, data *[]import
|
||||
explicitRoles := []string{}
|
||||
for _, role := range strings.Fields(rawRoles) {
|
||||
if role == model.TeamGuestRoleId {
|
||||
isGuestByTeamId[team.Id] = true
|
||||
isGuestByTeamID[team.Id] = true
|
||||
isUserByTeamId[team.Id] = false
|
||||
} else if role == model.TeamUserRoleId {
|
||||
isUserByTeamId[team.Id] = true
|
||||
} else if role == model.TeamAdminRoleId {
|
||||
isAdminByTeamId[team.Id] = true
|
||||
isAdminByTeamID[team.Id] = true
|
||||
} else {
|
||||
explicitRoles = append(explicitRoles, role)
|
||||
}
|
||||
}
|
||||
rolesByTeamId[team.Id] = strings.Join(explicitRoles, " ")
|
||||
rolesByTeamID[team.Id] = strings.Join(explicitRoles, " ")
|
||||
}
|
||||
|
||||
member := &model.TeamMember{
|
||||
@ -929,13 +929,13 @@ func (a *App) importUserTeams(rctx request.CTX, user *model.User, data *[]import
|
||||
}
|
||||
|
||||
for _, member := range append(newMembers, oldMembers...) {
|
||||
if member.ExplicitRoles != rolesByTeamId[member.TeamId] {
|
||||
if _, err = a.UpdateTeamMemberRoles(rctx, member.TeamId, user.Id, rolesByTeamId[member.TeamId]); err != nil {
|
||||
if member.ExplicitRoles != rolesByTeamID[member.TeamId] {
|
||||
if _, err = a.UpdateTeamMemberRoles(rctx, member.TeamId, user.Id, rolesByTeamID[member.TeamId]); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
a.UpdateTeamMemberSchemeRoles(rctx, member.TeamId, user.Id, isGuestByTeamId[member.TeamId], isUserByTeamId[member.TeamId], isAdminByTeamId[member.TeamId])
|
||||
a.UpdateTeamMemberSchemeRoles(rctx, member.TeamId, user.Id, isGuestByTeamID[member.TeamId], isUserByTeamId[member.TeamId], isAdminByTeamID[member.TeamId])
|
||||
}
|
||||
|
||||
for _, team := range allTeams {
|
||||
|
@ -307,18 +307,18 @@ func TestPostAction(t *testing.T) {
|
||||
require.NotEmpty(t, attachments2[0].Actions)
|
||||
require.NotEmpty(t, attachments2[0].Actions[0].Id)
|
||||
|
||||
clientTriggerId, err := th.App.DoPostActionWithCookie(th.Context, post.Id, "notavalidid", th.BasicUser.Id, "", nil)
|
||||
clientTriggerID, err := th.App.DoPostActionWithCookie(th.Context, post.Id, "notavalidid", th.BasicUser.Id, "", nil)
|
||||
require.NotNil(t, err)
|
||||
assert.Equal(t, http.StatusNotFound, err.StatusCode)
|
||||
assert.True(t, clientTriggerId == "")
|
||||
assert.True(t, clientTriggerID == "")
|
||||
|
||||
clientTriggerId, err = th.App.DoPostActionWithCookie(th.Context, post.Id, attachments[0].Actions[0].Id, th.BasicUser.Id, "", nil)
|
||||
clientTriggerID, err = th.App.DoPostActionWithCookie(th.Context, post.Id, attachments[0].Actions[0].Id, th.BasicUser.Id, "", nil)
|
||||
require.Nil(t, err)
|
||||
assert.True(t, len(clientTriggerId) == 26)
|
||||
assert.True(t, len(clientTriggerID) == 26)
|
||||
|
||||
clientTriggerId, err = th.App.DoPostActionWithCookie(th.Context, post2.Id, attachments2[0].Actions[0].Id, th.BasicUser.Id, "selected", nil)
|
||||
clientTriggerID, err = th.App.DoPostActionWithCookie(th.Context, post2.Id, attachments2[0].Actions[0].Id, th.BasicUser.Id, "selected", nil)
|
||||
require.Nil(t, err)
|
||||
assert.True(t, len(clientTriggerId) == 26)
|
||||
assert.True(t, len(clientTriggerID) == 26)
|
||||
|
||||
th.App.UpdateConfig(func(cfg *model.Config) {
|
||||
*cfg.ServiceSettings.AllowedUntrustedInternalConnections = ""
|
||||
|
@ -1737,7 +1737,7 @@ func TestGetMentionKeywords(t *testing.T) {
|
||||
require.True(t, ok)
|
||||
require.Equal(t, mentionableUser4ID, ids[0], "should've returned mention key of First")
|
||||
|
||||
dup_count := func(list []MentionableID) map[MentionableID]int {
|
||||
dupCount := func(list []MentionableID) map[MentionableID]int {
|
||||
duplicate_frequency := make(map[MentionableID]int)
|
||||
|
||||
for _, item := range list {
|
||||
@ -1784,7 +1784,7 @@ func TestGetMentionKeywords(t *testing.T) {
|
||||
require.Len(t, ids, 2)
|
||||
require.False(t, ids[0] != mentionableUser1ID && ids[1] != mentionableUser1ID, "should've mentioned user1 with user")
|
||||
require.False(t, ids[0] != mentionableUser4ID && ids[1] != mentionableUser4ID, "should've mentioned user4 with user")
|
||||
idsMap := dup_count(keywords["@user"])
|
||||
idsMap := dupCount(keywords["@user"])
|
||||
require.True(t, ok)
|
||||
require.Len(t, idsMap, 4)
|
||||
require.Equal(t, idsMap[mentionableUser1ID], 2, "should've mentioned user1 with @user")
|
||||
|
@ -14,7 +14,7 @@ import (
|
||||
"github.com/mattermost/mattermost/server/public/model"
|
||||
)
|
||||
|
||||
const PluginIdGithub = "github"
|
||||
const PluginIDGithub = "github"
|
||||
|
||||
func Test_SendNotifyAdminPosts(t *testing.T) {
|
||||
t.Run("no error sending non trial upgrade post when no notifications are available", func(t *testing.T) {
|
||||
@ -144,7 +144,7 @@ func Test_SendNotifyAdminPosts(t *testing.T) {
|
||||
// some notifications
|
||||
_, appErr := th.App.SaveAdminNotifyData(&model.NotifyAdminData{
|
||||
UserId: th.BasicUser.Id,
|
||||
RequiredPlan: PluginIdGithub,
|
||||
RequiredPlan: PluginIDGithub,
|
||||
RequiredFeature: model.PluginFeature,
|
||||
Trial: false,
|
||||
})
|
||||
@ -188,7 +188,7 @@ func Test_SendNotifyAdminPosts(t *testing.T) {
|
||||
// some notifications
|
||||
_, appErr := th.App.SaveAdminNotifyData(&model.NotifyAdminData{
|
||||
UserId: th.BasicUser.Id,
|
||||
RequiredPlan: PluginIdGithub,
|
||||
RequiredPlan: PluginIDGithub,
|
||||
RequiredFeature: model.PluginFeature,
|
||||
Trial: false,
|
||||
})
|
||||
@ -234,7 +234,7 @@ func Test_SendNotifyAdminPosts(t *testing.T) {
|
||||
defer th.TearDown()
|
||||
|
||||
err := th.App.SaveAdminNotification(th.BasicUser.Id, &model.NotifyAdminToUpgradeRequest{
|
||||
RequiredPlan: PluginIdGithub,
|
||||
RequiredPlan: PluginIDGithub,
|
||||
RequiredFeature: model.PluginFeature,
|
||||
TrialNotification: false,
|
||||
})
|
||||
@ -242,7 +242,7 @@ func Test_SendNotifyAdminPosts(t *testing.T) {
|
||||
require.Nil(t, err)
|
||||
|
||||
err = th.App.SaveAdminNotification(th.BasicUser.Id, &model.NotifyAdminToUpgradeRequest{
|
||||
RequiredPlan: PluginIdGithub,
|
||||
RequiredPlan: PluginIDGithub,
|
||||
RequiredFeature: model.PluginFeature,
|
||||
TrialNotification: false,
|
||||
})
|
||||
@ -394,7 +394,7 @@ func Test_SendNotifyAdminPosts(t *testing.T) {
|
||||
// some notifications
|
||||
_, appErr = th.App.SaveAdminNotifyData(&model.NotifyAdminData{
|
||||
UserId: th.BasicUser.Id,
|
||||
RequiredPlan: PluginIdGithub,
|
||||
RequiredPlan: PluginIDGithub,
|
||||
RequiredFeature: model.PluginFeature,
|
||||
Trial: false,
|
||||
})
|
||||
|
@ -516,14 +516,14 @@ func TestAuthorizeOAuthUser(t *testing.T) {
|
||||
state := base64.StdEncoding.EncodeToString([]byte(model.MapToJSON(stateProps)))
|
||||
|
||||
recorder := httptest.ResponseRecorder{}
|
||||
body, receivedTeamId, receivedStateProps, _, err := th.App.AuthorizeOAuthUser(th.Context, &recorder, request, model.ServiceGitlab, "", state, "")
|
||||
body, receivedTeamID, receivedStateProps, _, err := th.App.AuthorizeOAuthUser(th.Context, &recorder, request, model.ServiceGitlab, "", state, "")
|
||||
|
||||
require.NotNil(t, body)
|
||||
bodyBytes, bodyErr := io.ReadAll(body)
|
||||
require.NoError(t, bodyErr)
|
||||
assert.Equal(t, userData, string(bodyBytes))
|
||||
|
||||
assert.Equal(t, stateProps["team_id"], receivedTeamId)
|
||||
assert.Equal(t, stateProps["team_id"], receivedTeamID)
|
||||
assert.Equal(t, stateProps, receivedStateProps)
|
||||
assert.Nil(t, err)
|
||||
|
||||
|
@ -62,11 +62,11 @@ func makeInMemoryGzipTarFile(t *testing.T, files []testFile) *bytes.Reader {
|
||||
return bytes.NewReader(buf.Bytes())
|
||||
}
|
||||
|
||||
type byBundleInfoId []*model.BundleInfo
|
||||
type byBundleInfoID []*model.BundleInfo
|
||||
|
||||
func (b byBundleInfoId) Len() int { return len(b) }
|
||||
func (b byBundleInfoId) Swap(i, j int) { b[i], b[j] = b[j], b[i] }
|
||||
func (b byBundleInfoId) Less(i, j int) bool { return b[i].Manifest.Id < b[j].Manifest.Id }
|
||||
func (b byBundleInfoID) Len() int { return len(b) }
|
||||
func (b byBundleInfoID) Swap(i, j int) { b[i], b[j] = b[j], b[i] }
|
||||
func (b byBundleInfoID) Less(i, j int) bool { return b[i].Manifest.Id < b[j].Manifest.Id }
|
||||
|
||||
func TestInstallPluginLocally(t *testing.T) {
|
||||
t.Run("invalid tar", func(t *testing.T) {
|
||||
@ -145,7 +145,7 @@ func TestInstallPluginLocally(t *testing.T) {
|
||||
bundleInfos, err := pluginsEnvironment.Available()
|
||||
require.NoError(t, err)
|
||||
|
||||
sort.Sort(byBundleInfoId(bundleInfos))
|
||||
sort.Sort(byBundleInfoID(bundleInfos))
|
||||
|
||||
actualManifests := make([]*model.Manifest, 0, len(bundleInfos))
|
||||
for _, bundleInfo := range bundleInfos {
|
||||
|
@ -805,11 +805,11 @@ func TestPluginStatusActivateError(t *testing.T) {
|
||||
})
|
||||
}
|
||||
|
||||
type byId []*plugin.PrepackagedPlugin
|
||||
type byID []*plugin.PrepackagedPlugin
|
||||
|
||||
func (a byId) Len() int { return len(a) }
|
||||
func (a byId) Swap(i, j int) { a[i], a[j] = a[j], a[i] }
|
||||
func (a byId) Less(i, j int) bool { return a[i].Manifest.Id < a[j].Manifest.Id }
|
||||
func (a byID) Len() int { return len(a) }
|
||||
func (a byID) Swap(i, j int) { a[i], a[j] = a[j], a[i] }
|
||||
func (a byID) Less(i, j int) bool { return a[i].Manifest.Id < a[j].Manifest.Id }
|
||||
|
||||
type pluginStatusById model.PluginStatuses
|
||||
|
||||
@ -1027,7 +1027,7 @@ func TestProcessPrepackagedPlugins(t *testing.T) {
|
||||
|
||||
plugins, transitionalPlugins := initPlugins(t, th)
|
||||
require.Len(t, plugins, 2, "expected two prepackaged plugins")
|
||||
sort.Sort(byId(plugins))
|
||||
sort.Sort(byID(plugins))
|
||||
expectPrepackagedPlugin(t, "testplugin", "0.0.1", plugins[0])
|
||||
expectPrepackagedPlugin(t, "testplugin2", "1.2.3", plugins[1])
|
||||
require.Empty(t, transitionalPlugins)
|
||||
@ -1061,7 +1061,7 @@ func TestProcessPrepackagedPlugins(t *testing.T) {
|
||||
|
||||
plugins, transitionalPlugins := initPlugins(t, th)
|
||||
require.Len(t, plugins, 2, "expected two prepackaged plugins")
|
||||
sort.Sort(byId(plugins))
|
||||
sort.Sort(byID(plugins))
|
||||
expectPrepackagedPlugin(t, "testplugin", "0.0.1", plugins[0])
|
||||
expectPrepackagedPlugin(t, "testplugin2", "1.2.3", plugins[1])
|
||||
require.Empty(t, transitionalPlugins)
|
||||
@ -1102,7 +1102,7 @@ func TestProcessPrepackagedPlugins(t *testing.T) {
|
||||
require.NoError(t, err)
|
||||
|
||||
require.Len(t, plugins, 2, "expected two prepackaged plugins")
|
||||
sort.Sort(byId(plugins))
|
||||
sort.Sort(byID(plugins))
|
||||
expectPrepackagedPlugin(t, "testplugin", "0.0.2", plugins[0])
|
||||
expectPrepackagedPlugin(t, "testplugin2", "1.2.3", plugins[1])
|
||||
require.Empty(t, transitionalPlugins)
|
||||
@ -1140,7 +1140,7 @@ func TestProcessPrepackagedPlugins(t *testing.T) {
|
||||
|
||||
plugins, transitionalPlugins := initPlugins(t, th)
|
||||
require.Len(t, plugins, 2, "expected two prepackaged plugins")
|
||||
sort.Sort(byId(plugins))
|
||||
sort.Sort(byID(plugins))
|
||||
expectPrepackagedPlugin(t, "testplugin", "0.0.1", plugins[0])
|
||||
expectPrepackagedPlugin(t, "testplugin2", "1.2.3", plugins[1])
|
||||
require.Empty(t, transitionalPlugins)
|
||||
@ -1173,7 +1173,7 @@ func TestProcessPrepackagedPlugins(t *testing.T) {
|
||||
|
||||
plugins, transitionalPlugins := initPlugins(t, th)
|
||||
require.Len(t, plugins, 2, "expected two prepackaged plugins")
|
||||
sort.Sort(byId(plugins))
|
||||
sort.Sort(byID(plugins))
|
||||
expectPrepackagedPlugin(t, "testplugin", "0.0.1", plugins[0])
|
||||
expectPrepackagedPlugin(t, "testplugin2", "1.2.3", plugins[1])
|
||||
require.Empty(t, transitionalPlugins)
|
||||
@ -1201,7 +1201,7 @@ func TestProcessPrepackagedPlugins(t *testing.T) {
|
||||
|
||||
plugins, transitionalPlugins := initPlugins(t, th)
|
||||
require.Len(t, plugins, 2, "expected two prepackaged plugins")
|
||||
sort.Sort(byId(plugins))
|
||||
sort.Sort(byID(plugins))
|
||||
expectPrepackagedPlugin(t, "testplugin", "0.0.1", plugins[0])
|
||||
expectPrepackagedPlugin(t, "testplugin2", "1.2.3", plugins[1])
|
||||
require.Empty(t, transitionalPlugins)
|
||||
@ -1225,7 +1225,7 @@ func TestProcessPrepackagedPlugins(t *testing.T) {
|
||||
|
||||
plugins, transitionalPlugins = initPlugins(t, th)
|
||||
require.Len(t, plugins, 2, "expected two prepackaged plugins")
|
||||
sort.Sort(byId(plugins))
|
||||
sort.Sort(byID(plugins))
|
||||
expectPrepackagedPlugin(t, "testplugin", "0.0.1", plugins[0])
|
||||
expectPrepackagedPlugin(t, "testplugin2", "1.2.3", plugins[1])
|
||||
require.Empty(t, transitionalPlugins)
|
||||
|
@ -94,15 +94,15 @@ func linearFilterPostList(postList *model.PostList, earliestAccessibleTime int64
|
||||
order := postList.Order
|
||||
|
||||
n := 0
|
||||
for i, postId := range order {
|
||||
if createAt := posts[postId].CreateAt; createAt >= earliestAccessibleTime {
|
||||
for i, postID := range order {
|
||||
if createAt := posts[postID].CreateAt; createAt >= earliestAccessibleTime {
|
||||
order[n] = order[i]
|
||||
n++
|
||||
} else {
|
||||
if createAt > postList.FirstInaccessiblePostTime {
|
||||
postList.FirstInaccessiblePostTime = createAt
|
||||
}
|
||||
delete(posts, postId)
|
||||
delete(posts, postID)
|
||||
}
|
||||
}
|
||||
postList.Order = order[:n]
|
||||
|
@ -363,10 +363,10 @@ func TestGetCloudSession(t *testing.T) {
|
||||
func TestGetRemoteClusterSession(t *testing.T) {
|
||||
th := Setup(t)
|
||||
token := model.NewId()
|
||||
remoteId := model.NewId()
|
||||
remoteID := model.NewId()
|
||||
|
||||
rc := model.RemoteCluster{
|
||||
RemoteId: remoteId,
|
||||
RemoteId: remoteID,
|
||||
RemoteTeamId: model.NewId(),
|
||||
Name: "test",
|
||||
Token: token,
|
||||
@ -377,14 +377,14 @@ func TestGetRemoteClusterSession(t *testing.T) {
|
||||
require.NoError(t, err)
|
||||
|
||||
t.Run("Valid remote token should return session", func(t *testing.T) {
|
||||
session, err := th.App.GetRemoteClusterSession(token, remoteId)
|
||||
session, err := th.App.GetRemoteClusterSession(token, remoteID)
|
||||
require.Nil(t, err)
|
||||
require.NotNil(t, session)
|
||||
require.Equal(t, token, session.Token)
|
||||
})
|
||||
|
||||
t.Run("Invalid remote token should return error", func(t *testing.T) {
|
||||
session, err := th.App.GetRemoteClusterSession(model.NewId(), remoteId)
|
||||
session, err := th.App.GetRemoteClusterSession(model.NewId(), remoteID)
|
||||
require.NotNil(t, err)
|
||||
require.Nil(t, session)
|
||||
})
|
||||
|
@ -20,23 +20,23 @@ func TestApp_CheckCanInviteToSharedChannel(t *testing.T) {
|
||||
channel3 := th.CreateChannel(th.Context, th.BasicTeam)
|
||||
|
||||
data := []struct {
|
||||
channelId string
|
||||
channelID string
|
||||
home bool
|
||||
name string
|
||||
remoteId string
|
||||
remoteID string
|
||||
}{
|
||||
{channelId: channel1.Id, home: true, name: "test_home", remoteId: ""},
|
||||
{channelId: channel2.Id, home: false, name: "test_remote", remoteId: model.NewId()},
|
||||
{channelID: channel1.Id, home: true, name: "test_home", remoteID: ""},
|
||||
{channelID: channel2.Id, home: false, name: "test_remote", remoteID: model.NewId()},
|
||||
}
|
||||
|
||||
for _, d := range data {
|
||||
sc := &model.SharedChannel{
|
||||
ChannelId: d.channelId,
|
||||
ChannelId: d.channelID,
|
||||
TeamId: th.BasicTeam.Id,
|
||||
Home: d.home,
|
||||
ShareName: d.name,
|
||||
CreatorId: th.BasicUser.Id,
|
||||
RemoteId: d.remoteId,
|
||||
RemoteId: d.remoteID,
|
||||
}
|
||||
_, err := th.App.ShareChannel(th.Context, sc)
|
||||
require.NoError(t, err)
|
||||
@ -73,12 +73,12 @@ func TestApp_CheckCanInviteToSharedChannel(t *testing.T) {
|
||||
})
|
||||
|
||||
t.Run("Test CheckCanInviteToSharedChannel: Home shared channel", func(t *testing.T) {
|
||||
err := th.App.CheckCanInviteToSharedChannel(data[0].channelId)
|
||||
err := th.App.CheckCanInviteToSharedChannel(data[0].channelID)
|
||||
assert.NoError(t, err, "home channel should allow invites")
|
||||
})
|
||||
|
||||
t.Run("Test CheckCanInviteToSharedChannel: Remote shared channel", func(t *testing.T) {
|
||||
err := th.App.CheckCanInviteToSharedChannel(data[1].channelId)
|
||||
err := th.App.CheckCanInviteToSharedChannel(data[1].channelID)
|
||||
assert.Error(t, err, "home channel should not allow invites")
|
||||
})
|
||||
|
||||
|
@ -1126,13 +1126,13 @@ func TestPasswordRecovery(t *testing.T) {
|
||||
assert.Nil(t, err)
|
||||
|
||||
tokenData := struct {
|
||||
UserId string
|
||||
UserID string
|
||||
Email string
|
||||
}{}
|
||||
|
||||
err2 := json.Unmarshal([]byte(token.Extra), &tokenData)
|
||||
assert.NoError(t, err2)
|
||||
assert.Equal(t, th.BasicUser.Id, tokenData.UserId)
|
||||
assert.Equal(t, th.BasicUser.Id, tokenData.UserID)
|
||||
assert.Equal(t, th.BasicUser.Email, tokenData.Email)
|
||||
|
||||
err = th.App.ResetPasswordFromToken(th.Context, token.Token, "abcdefgh")
|
||||
|
@ -15,9 +15,9 @@ import (
|
||||
|
||||
type AdvancedPermissionsPhase2Progress struct {
|
||||
CurrentTable string `json:"current_table"`
|
||||
LastTeamId string `json:"last_team_id"`
|
||||
LastChannelId string `json:"last_channel_id"`
|
||||
LastUserId string `json:"last_user"`
|
||||
LastTeamID string `json:"last_team_id"`
|
||||
LastChannelID string `json:"last_channel_id"`
|
||||
LastUserID string `json:"last_user"`
|
||||
}
|
||||
|
||||
func (p *AdvancedPermissionsPhase2Progress) ToJSON() string {
|
||||
@ -35,15 +35,15 @@ func AdvancedPermissionsPhase2ProgressFromJSON(data io.Reader) *AdvancedPermissi
|
||||
}
|
||||
|
||||
func (p *AdvancedPermissionsPhase2Progress) IsValid() bool {
|
||||
if !model.IsValidId(p.LastChannelId) {
|
||||
if !model.IsValidId(p.LastChannelID) {
|
||||
return false
|
||||
}
|
||||
|
||||
if !model.IsValidId(p.LastTeamId) {
|
||||
if !model.IsValidId(p.LastTeamID) {
|
||||
return false
|
||||
}
|
||||
|
||||
if !model.IsValidId(p.LastUserId) {
|
||||
if !model.IsValidId(p.LastUserID) {
|
||||
return false
|
||||
}
|
||||
|
||||
@ -63,9 +63,9 @@ func (worker *Worker) runAdvancedPermissionsPhase2Migration(lastDone string) (bo
|
||||
// Haven't started the migration yet.
|
||||
progress = &AdvancedPermissionsPhase2Progress{
|
||||
CurrentTable: "TeamMembers",
|
||||
LastChannelId: strings.Repeat("0", 26),
|
||||
LastTeamId: strings.Repeat("0", 26),
|
||||
LastUserId: strings.Repeat("0", 26),
|
||||
LastChannelID: strings.Repeat("0", 26),
|
||||
LastTeamID: strings.Repeat("0", 26),
|
||||
LastUserID: strings.Repeat("0", 26),
|
||||
}
|
||||
} else {
|
||||
err := json.NewDecoder(strings.NewReader(lastDone)).Decode(&progress)
|
||||
@ -79,22 +79,22 @@ func (worker *Worker) runAdvancedPermissionsPhase2Migration(lastDone string) (bo
|
||||
|
||||
if progress.CurrentTable == "TeamMembers" {
|
||||
// Run a TeamMembers migration batch.
|
||||
result, err := worker.store.Team().MigrateTeamMembers(progress.LastTeamId, progress.LastUserId)
|
||||
result, err := worker.store.Team().MigrateTeamMembers(progress.LastTeamID, progress.LastUserID)
|
||||
if err != nil {
|
||||
return false, progress.ToJSON(), model.NewAppError("MigrationsWorker.runAdvancedPermissionsPhase2Migration", "app.team.migrate_team_members.update.app_error", nil, "", http.StatusInternalServerError).Wrap(err)
|
||||
}
|
||||
if result == nil {
|
||||
// We haven't progressed. That means that we've reached the end of this stage of the migration, and should now advance to the next stage.
|
||||
progress.LastUserId = strings.Repeat("0", 26)
|
||||
progress.LastUserID = strings.Repeat("0", 26)
|
||||
progress.CurrentTable = "ChannelMembers"
|
||||
return false, progress.ToJSON(), nil
|
||||
}
|
||||
|
||||
progress.LastTeamId = result["TeamId"]
|
||||
progress.LastUserId = result["UserId"]
|
||||
progress.LastTeamID = result["TeamId"]
|
||||
progress.LastUserID = result["UserId"]
|
||||
} else if progress.CurrentTable == "ChannelMembers" {
|
||||
// Run a ChannelMembers migration batch.
|
||||
data, err := worker.store.Channel().MigrateChannelMembers(progress.LastChannelId, progress.LastUserId)
|
||||
data, err := worker.store.Channel().MigrateChannelMembers(progress.LastChannelID, progress.LastUserID)
|
||||
if err != nil {
|
||||
return false, progress.ToJSON(), model.NewAppError("MigrationsWorker.runAdvancedPermissionsPhase2Migration", "app.channel.migrate_channel_members.select.app_error", nil, "", http.StatusInternalServerError).Wrap(err)
|
||||
}
|
||||
@ -104,8 +104,8 @@ func (worker *Worker) runAdvancedPermissionsPhase2Migration(lastDone string) (bo
|
||||
return true, progress.ToJSON(), nil
|
||||
}
|
||||
|
||||
progress.LastChannelId = data["ChannelId"]
|
||||
progress.LastUserId = data["UserId"]
|
||||
progress.LastChannelID = data["ChannelId"]
|
||||
progress.LastUserID = data["UserId"]
|
||||
}
|
||||
|
||||
return false, progress.ToJSON(), nil
|
||||
|
@ -17,7 +17,7 @@ var searchChannelStoreTests = []searchTest{
|
||||
{
|
||||
Name: "Should be able to autocomplete a channel by name",
|
||||
Fn: testAutocompleteChannelByName,
|
||||
Tags: []string{EngineMySql, EngineElasticSearch, EngineBleve},
|
||||
Tags: []string{EngineMySQL, EngineElasticSearch, EngineBleve},
|
||||
},
|
||||
{
|
||||
Name: "Should be able to autocomplete a channel by name (Postgres)",
|
||||
@ -32,7 +32,7 @@ var searchChannelStoreTests = []searchTest{
|
||||
{
|
||||
Name: "Should be able to autocomplete a channel by a part of its name when has parts splitted by - character",
|
||||
Fn: testAutocompleteChannelByNameSplittedWithDashChar,
|
||||
Tags: []string{EngineMySql, EngineElasticSearch, EngineBleve},
|
||||
Tags: []string{EngineMySQL, EngineElasticSearch, EngineBleve},
|
||||
},
|
||||
{
|
||||
Name: "Should be able to autocomplete a channel by a part of its name when has parts splitted by - character (Postgres)",
|
||||
@ -42,12 +42,12 @@ var searchChannelStoreTests = []searchTest{
|
||||
{
|
||||
Name: "Should be able to autocomplete a channel by a part of its name when has parts splitted by _ character",
|
||||
Fn: testAutocompleteChannelByNameSplittedWithUnderscoreChar,
|
||||
Tags: []string{EngineMySql, EngineElasticSearch, EngineBleve},
|
||||
Tags: []string{EngineMySQL, EngineElasticSearch, EngineBleve},
|
||||
},
|
||||
{
|
||||
Name: "Should be able to autocomplete a channel by a part of its display name when has parts splitted by whitespace character",
|
||||
Fn: testAutocompleteChannelByDisplayNameSplittedByWhitespaces,
|
||||
Tags: []string{EngineMySql, EngineElasticSearch, EngineBleve},
|
||||
Tags: []string{EngineMySQL, EngineElasticSearch, EngineBleve},
|
||||
},
|
||||
{
|
||||
Name: "Should be able to autocomplete retrieving all channels if the term is empty",
|
||||
@ -57,7 +57,7 @@ var searchChannelStoreTests = []searchTest{
|
||||
{
|
||||
Name: "Should be able to autocomplete channels in a case insensitive manner",
|
||||
Fn: testSearchChannelsInCaseInsensitiveManner,
|
||||
Tags: []string{EngineMySql, EngineElasticSearch, EngineBleve},
|
||||
Tags: []string{EngineMySQL, EngineElasticSearch, EngineBleve},
|
||||
},
|
||||
{
|
||||
Name: "Should be able to autocomplete channels in a case insensitive manner (Postgres)",
|
||||
|
@ -28,7 +28,7 @@ var searchFileInfoStoreTests = []searchTest{
|
||||
{
|
||||
Name: "Should be able to search for exact phrases in quotes",
|
||||
Fn: testFileInfoSearchExactPhraseInQuotes,
|
||||
Tags: []string{EnginePostgres, EngineMySql, EngineElasticSearch},
|
||||
Tags: []string{EnginePostgres, EngineMySQL, EngineElasticSearch},
|
||||
},
|
||||
{
|
||||
Name: "Should be able to search for email addresses with or without quotes",
|
||||
@ -93,12 +93,12 @@ var searchFileInfoStoreTests = []searchTest{
|
||||
{
|
||||
Name: "Should be able to exclude messages that contain a search term",
|
||||
Fn: testFileInfoFilterFilesWithATerm,
|
||||
Tags: []string{EngineMySql, EnginePostgres},
|
||||
Tags: []string{EngineMySQL, EnginePostgres},
|
||||
},
|
||||
{
|
||||
Name: "Should be able to search using boolean operators",
|
||||
Fn: testFileInfoSearchUsingBooleanOperators,
|
||||
Tags: []string{EngineMySql, EnginePostgres, EngineElasticSearch},
|
||||
Tags: []string{EngineMySQL, EnginePostgres, EngineElasticSearch},
|
||||
},
|
||||
{
|
||||
Name: "Should be able to search with combined filters",
|
||||
@ -108,7 +108,7 @@ var searchFileInfoStoreTests = []searchTest{
|
||||
{
|
||||
Name: "Should be able to ignore stop words",
|
||||
Fn: testFileInfoSearchIgnoringStopWords,
|
||||
Tags: []string{EngineMySql, EngineElasticSearch},
|
||||
Tags: []string{EngineMySQL, EngineElasticSearch},
|
||||
},
|
||||
{
|
||||
Name: "Should support search stemming",
|
||||
@ -139,12 +139,12 @@ var searchFileInfoStoreTests = []searchTest{
|
||||
{
|
||||
Name: "Should support terms with underscore",
|
||||
Fn: testFileInfoSupportTermsWithUnderscore,
|
||||
Tags: []string{EngineMySql, EngineElasticSearch},
|
||||
Tags: []string{EngineMySQL, EngineElasticSearch},
|
||||
},
|
||||
{
|
||||
Name: "Should be able to search in deleted/archived channels",
|
||||
Fn: testFileInfoSearchInDeletedOrArchivedChannels,
|
||||
Tags: []string{EngineMySql, EnginePostgres},
|
||||
Tags: []string{EngineMySQL, EnginePostgres},
|
||||
},
|
||||
{
|
||||
Name: "Should be able to search terms with dashes",
|
||||
@ -161,7 +161,7 @@ var searchFileInfoStoreTests = []searchTest{
|
||||
{
|
||||
Name: "Should be able to search terms with underscores",
|
||||
Fn: testFileInfoSearchTermsWithUnderscores,
|
||||
Tags: []string{EngineMySql, EngineElasticSearch},
|
||||
Tags: []string{EngineMySQL, EngineElasticSearch},
|
||||
},
|
||||
{
|
||||
Name: "Should be able to combine stemming and wildcards",
|
||||
@ -176,7 +176,7 @@ var searchFileInfoStoreTests = []searchTest{
|
||||
{
|
||||
Name: "Should not support slash as character separator",
|
||||
Fn: testFileInfoSlashShouldNotBeCharSeparator,
|
||||
Tags: []string{EngineMySql, EngineElasticSearch},
|
||||
Tags: []string{EngineMySQL, EngineElasticSearch},
|
||||
},
|
||||
{
|
||||
Name: "Should be able to search emails without quoting them",
|
||||
|
@ -33,7 +33,7 @@ var searchPostStoreTests = []searchTest{
|
||||
{
|
||||
Name: "Should be able to search for quoted patterns with AND OR combinations",
|
||||
Fn: testSearchANDORQuotesCombinations,
|
||||
Tags: []string{EnginePostgres, EngineMySql, EngineElasticSearch},
|
||||
Tags: []string{EnginePostgres, EngineMySQL, EngineElasticSearch},
|
||||
},
|
||||
{
|
||||
// Postgres supports search with and without quotes
|
||||
@ -45,7 +45,7 @@ var searchPostStoreTests = []searchTest{
|
||||
// MySql supports search with quotes only
|
||||
Name: "Should be able to search for email addresses with quotes",
|
||||
Fn: testSearchEmailAddressesWithQuotes,
|
||||
Tags: []string{EngineMySql},
|
||||
Tags: []string{EngineMySQL},
|
||||
},
|
||||
{
|
||||
Name: "Should be able to search when markdown underscores are applied",
|
||||
@ -100,12 +100,12 @@ var searchPostStoreTests = []searchTest{
|
||||
{
|
||||
Name: "Should be able to exclude messages that contain a search term",
|
||||
Fn: testFilterMessagesWithATerm,
|
||||
Tags: []string{EngineMySql, EnginePostgres},
|
||||
Tags: []string{EngineMySQL, EnginePostgres},
|
||||
},
|
||||
{
|
||||
Name: "Should be able to search using boolean operators",
|
||||
Fn: testSearchUsingBooleanOperators,
|
||||
Tags: []string{EngineMySql, EnginePostgres, EngineElasticSearch},
|
||||
Tags: []string{EngineMySQL, EnginePostgres, EngineElasticSearch},
|
||||
},
|
||||
{
|
||||
Name: "Should be able to search with combined filters",
|
||||
@ -115,7 +115,7 @@ var searchPostStoreTests = []searchTest{
|
||||
{
|
||||
Name: "Should be able to ignore stop words",
|
||||
Fn: testSearchIgnoringStopWords,
|
||||
Tags: []string{EngineMySql, EngineElasticSearch},
|
||||
Tags: []string{EngineMySQL, EngineElasticSearch},
|
||||
},
|
||||
{
|
||||
Name: "Should support search stemming",
|
||||
@ -146,7 +146,7 @@ var searchPostStoreTests = []searchTest{
|
||||
{
|
||||
Name: "Should support terms with underscore",
|
||||
Fn: testSupportTermsWithUnderscore,
|
||||
Tags: []string{EngineMySql, EngineElasticSearch},
|
||||
Tags: []string{EngineMySQL, EngineElasticSearch},
|
||||
},
|
||||
{
|
||||
Name: "Should search or exclude post using hashtags",
|
||||
@ -211,7 +211,7 @@ var searchPostStoreTests = []searchTest{
|
||||
{
|
||||
Name: "Should be able to search in deleted/archived channels",
|
||||
Fn: testSearchInDeletedOrArchivedChannels,
|
||||
Tags: []string{EngineMySql, EnginePostgres},
|
||||
Tags: []string{EngineMySQL, EnginePostgres},
|
||||
},
|
||||
{
|
||||
Name: "Should be able to search terms with dashes",
|
||||
@ -228,7 +228,7 @@ var searchPostStoreTests = []searchTest{
|
||||
{
|
||||
Name: "Should be able to search terms with underscores",
|
||||
Fn: testSearchTermsWithUnderscores,
|
||||
Tags: []string{EngineMySql, EngineElasticSearch},
|
||||
Tags: []string{EngineMySQL, EngineElasticSearch},
|
||||
},
|
||||
{
|
||||
Name: "Should be able to search posts made by bot accounts",
|
||||
@ -253,7 +253,7 @@ var searchPostStoreTests = []searchTest{
|
||||
{
|
||||
Name: "Should not support slash as character separator",
|
||||
Fn: testSlashShouldNotBeCharSeparator,
|
||||
Tags: []string{EngineMySql, EngineElasticSearch},
|
||||
Tags: []string{EngineMySQL, EngineElasticSearch},
|
||||
},
|
||||
{
|
||||
Name: "Should be able to search in comments",
|
||||
@ -263,7 +263,7 @@ var searchPostStoreTests = []searchTest{
|
||||
{
|
||||
Name: "Should be able to search terms within links",
|
||||
Fn: testSupportSearchTermsWithinLinks,
|
||||
Tags: []string{EngineMySql, EngineElasticSearch},
|
||||
Tags: []string{EngineMySQL, EngineElasticSearch},
|
||||
},
|
||||
{
|
||||
Name: "Should not return links that are embedded in markdown",
|
||||
|
@ -13,7 +13,7 @@ import (
|
||||
|
||||
const (
|
||||
EngineAll = "all"
|
||||
EngineMySql = "mysql"
|
||||
EngineMySQL = "mysql"
|
||||
EnginePostgres = "postgres"
|
||||
EngineElasticSearch = "elasticsearch"
|
||||
EngineBleve = "bleve"
|
||||
|
@ -96,12 +96,12 @@ var searchUserStoreTests = []searchTest{
|
||||
{
|
||||
Name: "Should be able to search inactive users",
|
||||
Fn: testShouldBeAbleToSearchInactiveUsers,
|
||||
Tags: []string{EngineMySql, EnginePostgres, EngineElasticSearch},
|
||||
Tags: []string{EngineMySQL, EnginePostgres, EngineElasticSearch},
|
||||
},
|
||||
{
|
||||
Name: "Should be able to search filtering by role",
|
||||
Fn: testShouldBeAbleToSearchFilteringByRole,
|
||||
Tags: []string{EngineMySql, EnginePostgres, EngineElasticSearch},
|
||||
Tags: []string{EngineMySQL, EnginePostgres, EngineElasticSearch},
|
||||
},
|
||||
{
|
||||
Name: "Should ignore leading @ when searching users",
|
||||
|
Loading…
Reference in New Issue
Block a user