diff --git a/Makefile b/Makefile index 343ed785dd..6da506db01 100644 --- a/Makefile +++ b/Makefile @@ -156,15 +156,14 @@ ifeq ($(BUILD_ENTERPRISE_READY),true) -e LDAP_ORGANISATION="Mattermost Test" \ -e LDAP_DOMAIN="mm.test.com" \ -e LDAP_ADMIN_PASSWORD="mostest" \ - -d osixia/openldap:1.1.6 > /dev/null;\ + -d osixia/openldap:1.2.2 > /dev/null;\ sleep 10; \ - docker exec -ti mattermost-openldap bash -c 'echo -e "dn: ou=testusers,dc=mm,dc=test,dc=com\nobjectclass: organizationalunit" | ldapadd -x -D "cn=admin,dc=mm,dc=test,dc=com" -w mostest';\ - docker exec -ti mattermost-openldap bash -c 'echo -e "dn: uid=test.one,ou=testusers,dc=mm,dc=test,dc=com\nobjectclass: iNetOrgPerson\nsn: User\ncn: Test1\nmail: success+testone@simulator.amazonses.com" | ldapadd -x -D "cn=admin,dc=mm,dc=test,dc=com" -w mostest';\ - docker exec -ti mattermost-openldap bash -c 'ldappasswd -s Password1 -D "cn=admin,dc=mm,dc=test,dc=com" -x "uid=test.one,ou=testusers,dc=mm,dc=test,dc=com" -w mostest';\ - docker exec -ti mattermost-openldap bash -c 'echo -e "dn: uid=test.two,ou=testusers,dc=mm,dc=test,dc=com\nobjectclass: iNetOrgPerson\nsn: User\ncn: Test2\nmail: success+testtwo@simulator.amazonses.com" | ldapadd -x -D "cn=admin,dc=mm,dc=test,dc=com" -w mostest';\ - docker exec -ti mattermost-openldap bash -c 'ldappasswd -s Password1 -D "cn=admin,dc=mm,dc=test,dc=com" -x "uid=test.two,ou=testusers,dc=mm,dc=test,dc=com" -w mostest';\ - docker exec -ti mattermost-openldap bash -c 'echo -e "dn: cn=tgroup,ou=testusers,dc=mm,dc=test,dc=com\nobjectclass: groupOfUniqueNames\nuniqueMember: uid=test.one,ou=testusers,dc=mm,dc=test,dc=com" | ldapadd -x -D "cn=admin,dc=mm,dc=test,dc=com" -w mostest';\ - elif [ $(shell docker ps --no-trunc --quiet --filter name=^/mattermost-openldap$$ | wc -l) -eq 0 ]; then \ + docker cp tests/add-users.ldif mattermost-openldap:/add-users.ldif;\ + docker cp tests/add-groups.ldif mattermost-openldap:/add-groups.ldif;\ + docker cp tests/qa-data.ldif mattermost-openldap:/qa-data.ldif;\ + docker exec -ti mattermost-openldap bash -c 'ldapadd -x -D "cn=admin,dc=mm,dc=test,dc=com" -w mostest -f /add-users.ldif';\ + docker exec -ti mattermost-openldap bash -c 'ldapadd -x -D "cn=admin,dc=mm,dc=test,dc=com" -w mostest -f /add-groups.ldif';\ + elif [ $(shell docker ps | grep -ci mattermost-openldap) -eq 0 ]; then \ echo restarting mattermost-openldap; \ docker start mattermost-openldap > /dev/null; \ sleep 10; \ @@ -510,7 +509,7 @@ config-ldap: ## Configures LDAP. @echo Setting up configuration for local LDAP @sed -i'' -e 's|"LdapServer": ".*"|"LdapServer": "dockerhost"|g' config/config.json - @sed -i'' -e 's|"BaseDN": ".*"|"BaseDN": "ou=testusers,dc=mm,dc=test,dc=com"|g' config/config.json + @sed -i'' -e 's|"BaseDN": ".*"|"BaseDN": "dc=mm,dc=test,dc=com"|g' config/config.json @sed -i'' -e 's|"BindUsername": ".*"|"BindUsername": "cn=admin,dc=mm,dc=test,dc=com"|g' config/config.json @sed -i'' -e 's|"BindPassword": ".*"|"BindPassword": "mostest"|g' config/config.json @sed -i'' -e 's|"FirstNameAttribute": ".*"|"FirstNameAttribute": "cn"|g' config/config.json @@ -519,6 +518,9 @@ config-ldap: ## Configures LDAP. @sed -i'' -e 's|"EmailAttribute": ".*"|"EmailAttribute": "mail"|g' config/config.json @sed -i'' -e 's|"UsernameAttribute": ".*"|"UsernameAttribute": "uid"|g' config/config.json @sed -i'' -e 's|"IdAttribute": ".*"|"IdAttribute": "uid"|g' config/config.json + @sed -i'' -e 's|"LoginIdAttribute": ".*"|"LoginIdAttribute": "uid"|g' config/config.json + @sed -i'' -e 's|"GroupDisplayNameAttribute": ".*"|"GroupDisplayNameAttribute": "cn"|g' config/config.json + @sed -i'' -e 's|"GroupIdAttribute": ".*"|"GroupIdAttribute": "entryUUID"|g' config/config.json config-reset: ## Resets the config/config.json file to the default. @echo Resetting configuration to default diff --git a/api4/api.go b/api4/api.go index abaadf879f..b062d6141b 100644 --- a/api4/api.go +++ b/api4/api.go @@ -108,6 +108,7 @@ type Routes struct { ReactionByNameForPostForUser *mux.Router // 'api/v4/users/{user_id:[A-Za-z0-9]+}/posts/{post_id:[A-Za-z0-9]+}/reactions/{emoji_name:[A-Za-z0-9_-+]+}' TermsOfService *mux.Router // 'api/v4/terms_of_service + Groups *mux.Router // 'api/v4/groups' } type API struct { @@ -205,6 +206,7 @@ func Init(configservice configservice.ConfigService, globalOptionsFunc app.AppOp api.BaseRoutes.Image = api.BaseRoutes.ApiRoot.PathPrefix("/image").Subrouter() api.BaseRoutes.TermsOfService = api.BaseRoutes.ApiRoot.PathPrefix("/terms_of_service").Subrouter() + api.BaseRoutes.Groups = api.BaseRoutes.ApiRoot.PathPrefix("/groups").Subrouter() api.InitUser() api.InitTeam() @@ -234,6 +236,7 @@ func Init(configservice configservice.ConfigService, globalOptionsFunc app.AppOp api.InitScheme() api.InitImage() api.InitTermsOfService() + api.InitGroup() api.InitAction() root.Handle("/api/v4/{anything:.*}", http.HandlerFunc(api.Handle404)) diff --git a/api4/channel.go b/api4/channel.go index 9dafab4a10..3435537548 100644 --- a/api4/channel.go +++ b/api4/channel.go @@ -11,8 +11,10 @@ import ( ) func (api *API) InitChannel() { + api.BaseRoutes.Channels.Handle("", api.ApiSessionRequired(getAllChannels)).Methods("GET") api.BaseRoutes.Channels.Handle("", api.ApiSessionRequired(createChannel)).Methods("POST") api.BaseRoutes.Channels.Handle("/direct", api.ApiSessionRequired(createDirectChannel)).Methods("POST") + api.BaseRoutes.Channels.Handle("/search", api.ApiSessionRequired(searchAllChannels)).Methods("POST") api.BaseRoutes.Channels.Handle("/group", api.ApiSessionRequired(createGroupChannel)).Methods("POST") api.BaseRoutes.Channels.Handle("/members/{user_id:[A-Za-z0-9]+}/view", api.ApiSessionRequired(viewChannel)).Methods("POST") api.BaseRoutes.Channels.Handle("/{channel_id:[A-Za-z0-9]+}/scheme", api.ApiSessionRequired(updateChannelScheme)).Methods("PUT") @@ -490,6 +492,21 @@ func getPinnedPosts(c *Context, w http.ResponseWriter, r *http.Request) { w.Write([]byte(clientPostList.ToJson())) } +func getAllChannels(c *Context, w http.ResponseWriter, r *http.Request) { + if !c.App.SessionHasPermissionTo(c.App.Session, model.PERMISSION_MANAGE_SYSTEM) { + c.SetPermissionError(model.PERMISSION_MANAGE_SYSTEM) + return + } + + channels, err := c.App.GetAllChannels(c.Params.Page, c.Params.PerPage, false) + if err != nil { + c.Err = err + return + } + + w.Write([]byte(channels.ToJson())) +} + func getPublicChannelsForTeam(c *Context, w http.ResponseWriter, r *http.Request) { c.RequireTeamId() if c.Err != nil { @@ -693,6 +710,31 @@ func searchChannelsForTeam(c *Context, w http.ResponseWriter, r *http.Request) { w.Write([]byte(channels.ToJson())) } +func searchAllChannels(c *Context, w http.ResponseWriter, r *http.Request) { + props := model.ChannelSearchFromJson(r.Body) + if props == nil { + c.SetInvalidParam("channel_search") + return + } + + if !c.App.SessionHasPermissionTo(c.App.Session, model.PERMISSION_MANAGE_SYSTEM) { + c.SetPermissionError(model.PERMISSION_MANAGE_SYSTEM) + return + } + + includeDeleted := r.URL.Query().Get("include_deleted") == "true" + + channels, err := c.App.SearchAllChannels(props.Term, includeDeleted) + if err != nil { + c.Err = err + return + } + + // Don't fill in channels props, since unused by client and potentially expensive. + + w.Write([]byte(channels.ToJson())) +} + func deleteChannel(c *Context, w http.ResponseWriter, r *http.Request) { c.RequireChannelId() if c.Err != nil { diff --git a/api4/channel_test.go b/api4/channel_test.go index 3e3a24ffb7..82543ea3a3 100644 --- a/api4/channel_test.go +++ b/api4/channel_test.go @@ -772,6 +772,36 @@ func TestGetChannelsForTeamForUser(t *testing.T) { CheckNoError(t, resp) } +func TestGetAllChannels(t *testing.T) { + th := Setup().InitBasic() + defer th.TearDown() + Client := th.Client + + channels, resp := th.SystemAdminClient.GetAllChannels(0, 20, "") + CheckNoError(t, resp) + + // At least, all the not-deleted channels created during the InitBasic + require.True(t, len(*channels) >= 3) + for _, c := range *channels { + require.NotEqual(t, c.TeamId, "") + } + + channels, resp = th.SystemAdminClient.GetAllChannels(0, 10, "") + CheckNoError(t, resp) + require.True(t, len(*channels) >= 3) + + channels, resp = th.SystemAdminClient.GetAllChannels(1, 1, "") + CheckNoError(t, resp) + require.Len(t, *channels, 1) + + channels, resp = th.SystemAdminClient.GetAllChannels(10000, 10000, "") + CheckNoError(t, resp) + require.Len(t, *channels, 0) + + _, resp = Client.GetAllChannels(0, 20, "") + CheckForbiddenStatus(t, resp) +} + func TestSearchChannels(t *testing.T) { th := Setup().InitBasic() defer th.TearDown() @@ -827,6 +857,37 @@ func TestSearchChannels(t *testing.T) { CheckNoError(t, resp) } +func TestSearchAllChannels(t *testing.T) { + th := Setup().InitBasic() + defer th.TearDown() + Client := th.Client + + search := &model.ChannelSearch{Term: th.BasicChannel.Name} + + channels, resp := th.SystemAdminClient.SearchAllChannels(search) + CheckNoError(t, resp) + + assert.Len(t, *channels, 1) + assert.Equal(t, (*channels)[0].Id, th.BasicChannel.Id) + + search.Term = th.BasicPrivateChannel.Name + channels, resp = th.SystemAdminClient.SearchAllChannels(search) + CheckNoError(t, resp) + + assert.Len(t, *channels, 1) + assert.Equal(t, (*channels)[0].Id, th.BasicPrivateChannel.Id) + + search.Term = "" + channels, resp = th.SystemAdminClient.SearchAllChannels(search) + CheckNoError(t, resp) + // At least, all the not-deleted channels created during the InitBasic + assert.True(t, len(*channels) >= 3) + + search.Term = th.BasicChannel.Name + _, resp = Client.SearchAllChannels(search) + CheckForbiddenStatus(t, resp) +} + func TestDeleteChannel(t *testing.T) { th := Setup().InitBasic() defer th.TearDown() @@ -2345,7 +2406,6 @@ func TestUpdateChannelScheme(t *testing.T) { // Test an invalid scheme scope. _, resp = th.SystemAdminClient.UpdateChannelScheme(channel.Id, teamScheme.Id) - fmt.Printf("resp: %+v\n", resp) CheckBadRequestStatus(t, resp) // Test that an unauthenticated user gets rejected. diff --git a/api4/group.go b/api4/group.go new file mode 100644 index 0000000000..d90082f10c --- /dev/null +++ b/api4/group.go @@ -0,0 +1,433 @@ +// Copyright (c) 2018-present Mattermost, Inc. All Rights Reserved. +// See License.txt for license information. + +package api4 + +import ( + "database/sql" + "encoding/json" + "fmt" + "io/ioutil" + "net/http" + + "github.com/mattermost/mattermost-server/model" +) + +const ( + groupMemberActionCreate = iota + groupMemberActionDelete +) + +func (api *API) InitGroup() { + // GET /api/v4/groups/:group_id + api.BaseRoutes.Groups.Handle("/{group_id:[A-Za-z0-9]+}", + api.ApiSessionRequired(getGroup)).Methods("GET") + + // PUT /api/v4/groups/:group_id/patch + api.BaseRoutes.Groups.Handle("/{group_id:[A-Za-z0-9]+}/patch", + api.ApiSessionRequired(patchGroup)).Methods("PUT") + + // POST /api/v4/groups/:group_id/teams/:team_id/link + // POST /api/v4/groups/:group_id/channels/:channel_id/link + api.BaseRoutes.Groups.Handle("/{group_id:[A-Za-z0-9]+}/{syncable_type:teams|channels}/{syncable_id:[A-Za-z0-9]+}/link", + api.ApiSessionRequired(linkGroupSyncable)).Methods("POST") + + // DELETE /api/v4/groups/:group_id/teams/:team_id/link + // DELETE /api/v4/groups/:group_id/channels/:channel_id/link + api.BaseRoutes.Groups.Handle("/{group_id:[A-Za-z0-9]+}/{syncable_type:teams|channels}/{syncable_id:[A-Za-z0-9]+}/link", + api.ApiSessionRequired(unlinkGroupSyncable)).Methods("DELETE") + + // GET /api/v4/groups/:group_id/teams/:team_id + // GET /api/v4/groups/:group_id/channels/:channel_id + api.BaseRoutes.Groups.Handle("/{group_id:[A-Za-z0-9]+}/{syncable_type:teams|channels}/{syncable_id:[A-Za-z0-9]+}", + api.ApiSessionRequired(getGroupSyncable)).Methods("GET") + + // GET /api/v4/groups/:group_id/teams + // GET /api/v4/groups/:group_id/channels + api.BaseRoutes.Groups.Handle("/{group_id:[A-Za-z0-9]+}/{syncable_type:teams|channels}", + api.ApiSessionRequired(getGroupSyncables)).Methods("GET") + + // PUT /api/v4/groups/:group_id/teams/:team_id/patch + // PUT /api/v4/groups/:group_id/channels/:channel_id/patch + api.BaseRoutes.Groups.Handle("/{group_id:[A-Za-z0-9]+}/{syncable_type:teams|channels}/{syncable_id:[A-Za-z0-9]+}/patch", + api.ApiSessionRequired(patchGroupSyncable)).Methods("PUT") + + // GET /api/v4/groups/:group_id/members?page=0&per_page=100 + api.BaseRoutes.Groups.Handle("/{group_id:[A-Za-z0-9]+}/members", + api.ApiSessionRequired(getGroupMembers)).Methods("GET") +} + +func getGroup(c *Context, w http.ResponseWriter, r *http.Request) { + c.RequireGroupId() + if c.Err != nil { + return + } + + if c.App.License() == nil || !*c.App.License().Features.LDAPGroups { + c.Err = model.NewAppError("Api4.getGroup", "api.ldap_groups.license_error", nil, "", http.StatusNotImplemented) + return + } + + if !c.App.SessionHasPermissionTo(c.App.Session, model.PERMISSION_MANAGE_SYSTEM) { + c.SetPermissionError(model.PERMISSION_MANAGE_SYSTEM) + return + } + + group, err := c.App.GetGroup(c.Params.GroupId) + if err != nil { + c.Err = err + return + } + + b, marshalErr := json.Marshal(group) + if marshalErr != nil { + c.Err = model.NewAppError("Api4.getGroup", "api.marshal_error", nil, marshalErr.Error(), http.StatusInternalServerError) + return + } + + w.Write(b) +} + +func patchGroup(c *Context, w http.ResponseWriter, r *http.Request) { + c.RequireGroupId() + if c.Err != nil { + return + } + + groupPatch := model.GroupPatchFromJson(r.Body) + if groupPatch == nil { + c.SetInvalidParam("group") + return + } + + if c.App.License() == nil || !*c.App.License().Features.LDAPGroups { + c.Err = model.NewAppError("Api4.patchGroup", "api.ldap_groups.license_error", nil, "", http.StatusNotImplemented) + return + } + + if !c.App.SessionHasPermissionTo(c.App.Session, model.PERMISSION_MANAGE_SYSTEM) { + c.SetPermissionError(model.PERMISSION_MANAGE_SYSTEM) + return + } + + group, err := c.App.GetGroup(c.Params.GroupId) + if err != nil { + c.Err = err + return + } + + group.Patch(groupPatch) + + group, err = c.App.UpdateGroup(group) + if err != nil { + c.Err = err + return + } + + b, marshalErr := json.Marshal(group) + if marshalErr != nil { + c.Err = model.NewAppError("Api4.patchGroup", "api.marshal_error", nil, marshalErr.Error(), http.StatusInternalServerError) + return + } + + w.Write(b) +} + +func linkGroupSyncable(c *Context, w http.ResponseWriter, r *http.Request) { + c.RequireGroupId() + if c.Err != nil { + return + } + + c.RequireSyncableId() + if c.Err != nil { + return + } + syncableID := c.Params.SyncableId + + c.RequireSyncableType() + if c.Err != nil { + return + } + syncableType := c.Params.SyncableType + + body, err := ioutil.ReadAll(r.Body) + if err != nil { + c.Err = model.NewAppError("Api4.createGroupSyncable", "api.io_error", nil, err.Error(), http.StatusBadRequest) + return + } + + var patch *model.GroupSyncablePatch + err = json.Unmarshal(body, &patch) + if err != nil || patch == nil { + c.SetInvalidParam(fmt.Sprintf("Group%s", syncableType.String())) + return + } + + if c.App.License() == nil || !*c.App.License().Features.LDAPGroups { + c.Err = model.NewAppError("Api4.createGroupSyncable", "api.ldap_groups.license_error", nil, "", http.StatusNotImplemented) + return + } + + if !c.App.SessionHasPermissionTo(c.App.Session, model.PERMISSION_MANAGE_SYSTEM) { + c.SetPermissionError(model.PERMISSION_MANAGE_SYSTEM) + return + } + + groupSyncable, appErr := c.App.GetGroupSyncable(c.Params.GroupId, syncableID, syncableType) + if appErr != nil && appErr.DetailedError != sql.ErrNoRows.Error() { + c.Err = appErr + return + } + + if groupSyncable == nil { + groupSyncable = &model.GroupSyncable{ + GroupId: c.Params.GroupId, + SyncableId: syncableID, + Type: syncableType, + } + groupSyncable.Patch(patch) + groupSyncable, appErr = c.App.CreateGroupSyncable(groupSyncable) + if appErr != nil { + c.Err = appErr + return + } + } else { + groupSyncable.DeleteAt = 0 + groupSyncable.Patch(patch) + groupSyncable, appErr = c.App.UpdateGroupSyncable(groupSyncable) + if appErr != nil { + c.Err = appErr + return + } + } + + w.WriteHeader(http.StatusCreated) + + b, marshalErr := json.Marshal(groupSyncable) + if marshalErr != nil { + c.Err = model.NewAppError("Api4.createGroupSyncable", "api.marshal_error", nil, marshalErr.Error(), http.StatusInternalServerError) + return + } + + w.Write(b) +} + +func getGroupSyncable(c *Context, w http.ResponseWriter, r *http.Request) { + c.RequireGroupId() + if c.Err != nil { + return + } + + c.RequireSyncableId() + if c.Err != nil { + return + } + syncableID := c.Params.SyncableId + + c.RequireSyncableType() + if c.Err != nil { + return + } + syncableType := c.Params.SyncableType + + if c.App.License() == nil || !*c.App.License().Features.LDAPGroups { + c.Err = model.NewAppError("Api4.getGroupSyncable", "api.ldap_groups.license_error", nil, "", http.StatusNotImplemented) + return + } + + if !c.App.SessionHasPermissionTo(c.App.Session, model.PERMISSION_MANAGE_SYSTEM) { + c.SetPermissionError(model.PERMISSION_MANAGE_SYSTEM) + return + } + + groupSyncable, err := c.App.GetGroupSyncable(c.Params.GroupId, syncableID, syncableType) + if err != nil { + c.Err = err + return + } + + b, marshalErr := json.Marshal(groupSyncable) + if marshalErr != nil { + c.Err = model.NewAppError("Api4.getGroupSyncable", "api.marshal_error", nil, marshalErr.Error(), http.StatusInternalServerError) + return + } + + w.Write(b) +} + +func getGroupSyncables(c *Context, w http.ResponseWriter, r *http.Request) { + c.RequireGroupId() + if c.Err != nil { + return + } + + c.RequireSyncableType() + if c.Err != nil { + return + } + syncableType := c.Params.SyncableType + + if c.App.License() == nil || !*c.App.License().Features.LDAPGroups { + c.Err = model.NewAppError("Api4.getGroupSyncables", "api.ldap_groups.license_error", nil, "", http.StatusNotImplemented) + return + } + + if !c.App.SessionHasPermissionTo(c.App.Session, model.PERMISSION_MANAGE_SYSTEM) { + c.SetPermissionError(model.PERMISSION_MANAGE_SYSTEM) + return + } + + groupSyncables, err := c.App.GetGroupSyncables(c.Params.GroupId, syncableType) + if err != nil { + c.Err = err + return + } + + b, marshalErr := json.Marshal(groupSyncables) + if marshalErr != nil { + c.Err = model.NewAppError("Api4.getGroupSyncables", "api.marshal_error", nil, marshalErr.Error(), http.StatusInternalServerError) + return + } + + w.Write(b) +} + +func patchGroupSyncable(c *Context, w http.ResponseWriter, r *http.Request) { + c.RequireGroupId() + if c.Err != nil { + return + } + + c.RequireSyncableId() + if c.Err != nil { + return + } + syncableID := c.Params.SyncableId + + c.RequireSyncableType() + if c.Err != nil { + return + } + syncableType := c.Params.SyncableType + + body, err := ioutil.ReadAll(r.Body) + if err != nil { + c.Err = model.NewAppError("Api4.patchGroupSyncable", "api.io_error", nil, err.Error(), http.StatusBadRequest) + return + } + + var patch *model.GroupSyncablePatch + err = json.Unmarshal(body, &patch) + if err != nil || patch == nil { + c.SetInvalidParam(fmt.Sprintf("Group[%s]Patch", syncableType.String())) + return + } + + if c.App.License() == nil || !*c.App.License().Features.LDAPGroups { + c.Err = model.NewAppError("Api4.patchGroupSyncable", "api.ldap_groups.license_error", nil, "", + http.StatusNotImplemented) + return + } + + if !c.App.SessionHasPermissionTo(c.App.Session, model.PERMISSION_MANAGE_SYSTEM) { + c.SetPermissionError(model.PERMISSION_MANAGE_SYSTEM) + return + } + + groupSyncable, appErr := c.App.GetGroupSyncable(c.Params.GroupId, syncableID, syncableType) + if appErr != nil { + c.Err = appErr + return + } + + groupSyncable.Patch(patch) + + groupSyncable, appErr = c.App.UpdateGroupSyncable(groupSyncable) + if appErr != nil { + c.Err = appErr + return + } + + b, marshalErr := json.Marshal(groupSyncable) + if marshalErr != nil { + c.Err = model.NewAppError("Api4.patchGroupSyncable", "api.marshal_error", nil, marshalErr.Error(), http.StatusInternalServerError) + return + } + + w.Write(b) +} + +func unlinkGroupSyncable(c *Context, w http.ResponseWriter, r *http.Request) { + c.RequireGroupId() + if c.Err != nil { + return + } + + c.RequireSyncableId() + if c.Err != nil { + return + } + syncableID := c.Params.SyncableId + + c.RequireSyncableType() + if c.Err != nil { + return + } + syncableType := c.Params.SyncableType + + if c.App.License() == nil || !*c.App.License().Features.LDAPGroups { + c.Err = model.NewAppError("Api4.unlinkGroupSyncable", "api.ldap_groups.license_error", nil, "", http.StatusNotImplemented) + return + } + + if !c.App.SessionHasPermissionTo(c.App.Session, model.PERMISSION_MANAGE_SYSTEM) { + c.SetPermissionError(model.PERMISSION_MANAGE_SYSTEM) + return + } + + _, err := c.App.DeleteGroupSyncable(c.Params.GroupId, syncableID, syncableType) + if err != nil { + c.Err = err + return + } + + ReturnStatusOK(w) +} + +func getGroupMembers(c *Context, w http.ResponseWriter, r *http.Request) { + c.RequireGroupId() + if c.Err != nil { + return + } + + if c.App.License() == nil || !*c.App.License().Features.LDAPGroups { + c.Err = model.NewAppError("Api4.getGroupMembers", "api.ldap_groups.license_error", nil, "", http.StatusNotImplemented) + return + } + + if !c.App.SessionHasPermissionTo(c.App.Session, model.PERMISSION_MANAGE_SYSTEM) { + c.SetPermissionError(model.PERMISSION_MANAGE_SYSTEM) + return + } + + members, count, err := c.App.GetGroupMemberUsersPage(c.Params.GroupId, c.Params.Page, c.Params.PerPage) + if err != nil { + c.Err = err + return + } + + b, marshalErr := json.Marshal(struct { + Members []*model.User `json:"members"` + Count int `json:"total_member_count"` + }{ + Members: members, + Count: count, + }) + if marshalErr != nil { + c.Err = model.NewAppError("Api4.getGroupMembers", "api.marshal_error", nil, marshalErr.Error(), http.StatusInternalServerError) + return + } + + w.Write(b) +} diff --git a/api4/group_test.go b/api4/group_test.go new file mode 100644 index 0000000000..b379a8878a --- /dev/null +++ b/api4/group_test.go @@ -0,0 +1,622 @@ +// Copyright (c) 2018-present Mattermost, Inc. All Rights Reserved. +// See License.txt for license information. + +package api4 + +import ( + "fmt" + "net/http" + "testing" + + "github.com/stretchr/testify/assert" + + "github.com/mattermost/mattermost-server/model" +) + +func TestGetGroup(t *testing.T) { + th := Setup().InitBasic() + defer th.TearDown() + + id := model.NewId() + g, err := th.App.CreateGroup(&model.Group{ + DisplayName: "dn_" + id, + Name: "name" + id, + Source: model.GroupSourceLdap, + Description: "description_" + id, + RemoteId: model.NewId(), + }) + assert.Nil(t, err) + + _, response := th.Client.GetGroup(g.Id, "") + CheckNotImplementedStatus(t, response) + + _, response = th.SystemAdminClient.GetGroup(g.Id, "") + CheckNotImplementedStatus(t, response) + + th.App.SetLicense(model.NewTestLicense("ldap")) + + group, response := th.SystemAdminClient.GetGroup(g.Id, "") + CheckNoError(t, response) + + assert.Equal(t, g.DisplayName, group.DisplayName) + assert.Equal(t, g.Name, group.Name) + assert.Equal(t, g.Source, group.Source) + assert.Equal(t, g.Description, group.Description) + assert.Equal(t, g.RemoteId, group.RemoteId) + assert.Equal(t, g.CreateAt, group.CreateAt) + assert.Equal(t, g.UpdateAt, group.UpdateAt) + assert.Equal(t, g.DeleteAt, group.DeleteAt) + + _, response = th.SystemAdminClient.GetGroup(model.NewId(), "") + CheckNotFoundStatus(t, response) + + _, response = th.SystemAdminClient.GetGroup("12345", "") + CheckBadRequestStatus(t, response) + + th.SystemAdminClient.Logout() + _, response = th.SystemAdminClient.GetGroup(group.Id, "") + CheckUnauthorizedStatus(t, response) +} + +func TestPatchGroup(t *testing.T) { + th := Setup().InitBasic() + defer th.TearDown() + + id := model.NewId() + g, err := th.App.CreateGroup(&model.Group{ + DisplayName: "dn_" + id, + Name: "name" + id, + Source: model.GroupSourceLdap, + Description: "description_" + id, + RemoteId: model.NewId(), + }) + assert.Nil(t, err) + + updateFmt := "%s_updated" + + newName := fmt.Sprintf(updateFmt, g.Name) + newDisplayName := fmt.Sprintf(updateFmt, g.DisplayName) + newDescription := fmt.Sprintf(updateFmt, g.Description) + + gp := &model.GroupPatch{ + Name: &newName, + DisplayName: &newDisplayName, + Description: &newDescription, + } + + _, response := th.Client.PatchGroup(g.Id, gp) + CheckNotImplementedStatus(t, response) + + _, response = th.SystemAdminClient.PatchGroup(g.Id, gp) + CheckNotImplementedStatus(t, response) + + th.App.SetLicense(model.NewTestLicense("ldap")) + + group2, response := th.SystemAdminClient.PatchGroup(g.Id, gp) + CheckOKStatus(t, response) + + group, response := th.SystemAdminClient.GetGroup(g.Id, "") + CheckNoError(t, response) + + assert.Equal(t, *gp.DisplayName, group.DisplayName) + assert.Equal(t, *gp.DisplayName, group2.DisplayName) + assert.Equal(t, *gp.Name, group.Name) + assert.Equal(t, *gp.Name, group2.Name) + assert.Equal(t, *gp.Description, group.Description) + assert.Equal(t, *gp.Description, group2.Description) + + assert.Equal(t, group2.UpdateAt, group.UpdateAt) + + assert.Equal(t, g.Source, group.Source) + assert.Equal(t, g.Source, group2.Source) + assert.Equal(t, g.RemoteId, group.RemoteId) + assert.Equal(t, g.RemoteId, group2.RemoteId) + assert.Equal(t, g.CreateAt, group.CreateAt) + assert.Equal(t, g.CreateAt, group2.CreateAt) + assert.Equal(t, g.DeleteAt, group.DeleteAt) + assert.Equal(t, g.DeleteAt, group2.DeleteAt) + + _, response = th.SystemAdminClient.PatchGroup(model.NewId(), gp) + CheckNotFoundStatus(t, response) + + th.SystemAdminClient.Logout() + _, response = th.SystemAdminClient.PatchGroup(group.Id, gp) + CheckUnauthorizedStatus(t, response) +} + +func TestLinkGroupTeam(t *testing.T) { + th := Setup().InitBasic() + defer th.TearDown() + + id := model.NewId() + g, err := th.App.CreateGroup(&model.Group{ + DisplayName: "dn_" + id, + Name: "name" + id, + Source: model.GroupSourceLdap, + Description: "description_" + id, + RemoteId: model.NewId(), + }) + assert.Nil(t, err) + + patch := &model.GroupSyncablePatch{ + CanLeave: model.NewBool(true), + AutoAdd: model.NewBool(true), + } + + _, response := th.Client.LinkGroupSyncable(g.Id, th.BasicTeam.Id, model.GroupSyncableTypeTeam, patch) + CheckNotImplementedStatus(t, response) + + _, response = th.SystemAdminClient.LinkGroupSyncable(g.Id, th.BasicTeam.Id, model.GroupSyncableTypeTeam, patch) + CheckNotImplementedStatus(t, response) + + th.App.SetLicense(model.NewTestLicense("ldap")) + + groupTeam, response := th.SystemAdminClient.LinkGroupSyncable(g.Id, th.BasicTeam.Id, model.GroupSyncableTypeTeam, patch) + assert.Equal(t, http.StatusCreated, response.StatusCode) + assert.NotNil(t, groupTeam) +} + +func TestLinkGroupChannel(t *testing.T) { + th := Setup().InitBasic() + defer th.TearDown() + + id := model.NewId() + g, err := th.App.CreateGroup(&model.Group{ + DisplayName: "dn_" + id, + Name: "name" + id, + Source: model.GroupSourceLdap, + Description: "description_" + id, + RemoteId: model.NewId(), + }) + assert.Nil(t, err) + + patch := &model.GroupSyncablePatch{ + CanLeave: model.NewBool(true), + AutoAdd: model.NewBool(true), + } + + _, response := th.Client.LinkGroupSyncable(g.Id, th.BasicChannel.Id, model.GroupSyncableTypeChannel, patch) + CheckNotImplementedStatus(t, response) + + _, response = th.SystemAdminClient.LinkGroupSyncable(g.Id, th.BasicChannel.Id, model.GroupSyncableTypeChannel, patch) + CheckNotImplementedStatus(t, response) + + th.App.SetLicense(model.NewTestLicense("ldap")) + + _, response = th.SystemAdminClient.LinkGroupSyncable(g.Id, th.BasicChannel.Id, model.GroupSyncableTypeChannel, patch) + assert.Equal(t, http.StatusCreated, response.StatusCode) +} + +func TestUnlinkGroupTeam(t *testing.T) { + th := Setup().InitBasic() + defer th.TearDown() + + id := model.NewId() + g, err := th.App.CreateGroup(&model.Group{ + DisplayName: "dn_" + id, + Name: "name" + id, + Source: model.GroupSourceLdap, + Description: "description_" + id, + RemoteId: model.NewId(), + }) + assert.Nil(t, err) + + patch := &model.GroupSyncablePatch{ + CanLeave: model.NewBool(true), + AutoAdd: model.NewBool(true), + } + + th.App.SetLicense(model.NewTestLicense("ldap")) + + _, response := th.SystemAdminClient.LinkGroupSyncable(g.Id, th.BasicTeam.Id, model.GroupSyncableTypeTeam, patch) + assert.Equal(t, http.StatusCreated, response.StatusCode) + + th.App.SetLicense(nil) + + response = th.Client.UnlinkGroupSyncable(g.Id, th.BasicTeam.Id, model.GroupSyncableTypeTeam) + CheckNotImplementedStatus(t, response) + + response = th.SystemAdminClient.UnlinkGroupSyncable(g.Id, th.BasicTeam.Id, model.GroupSyncableTypeTeam) + CheckNotImplementedStatus(t, response) + + th.App.SetLicense(model.NewTestLicense("ldap")) + + response = th.SystemAdminClient.UnlinkGroupSyncable(g.Id, th.BasicTeam.Id, model.GroupSyncableTypeTeam) + CheckOKStatus(t, response) +} + +func TestUnlinkGroupChannel(t *testing.T) { + th := Setup().InitBasic() + defer th.TearDown() + + id := model.NewId() + g, err := th.App.CreateGroup(&model.Group{ + DisplayName: "dn_" + id, + Name: "name" + id, + Source: model.GroupSourceLdap, + Description: "description_" + id, + RemoteId: model.NewId(), + }) + assert.Nil(t, err) + + patch := &model.GroupSyncablePatch{ + CanLeave: model.NewBool(true), + AutoAdd: model.NewBool(true), + } + + th.App.SetLicense(model.NewTestLicense("ldap")) + + _, response := th.SystemAdminClient.LinkGroupSyncable(g.Id, th.BasicChannel.Id, model.GroupSyncableTypeChannel, patch) + assert.Equal(t, http.StatusCreated, response.StatusCode) + + th.App.SetLicense(nil) + + response = th.Client.UnlinkGroupSyncable(g.Id, th.BasicChannel.Id, model.GroupSyncableTypeChannel) + CheckNotImplementedStatus(t, response) + + response = th.SystemAdminClient.UnlinkGroupSyncable(g.Id, th.BasicChannel.Id, model.GroupSyncableTypeChannel) + CheckNotImplementedStatus(t, response) + + th.App.SetLicense(model.NewTestLicense("ldap")) + + response = th.SystemAdminClient.UnlinkGroupSyncable(g.Id, th.BasicChannel.Id, model.GroupSyncableTypeChannel) + CheckOKStatus(t, response) +} + +func TestGetGroupTeam(t *testing.T) { + th := Setup().InitBasic() + defer th.TearDown() + + id := model.NewId() + g, err := th.App.CreateGroup(&model.Group{ + DisplayName: "dn_" + id, + Name: "name" + id, + Source: model.GroupSourceLdap, + Description: "description_" + id, + RemoteId: model.NewId(), + }) + assert.Nil(t, err) + + _, response := th.Client.GetGroupSyncable(g.Id, th.BasicTeam.Id, model.GroupSyncableTypeTeam, "") + CheckNotImplementedStatus(t, response) + + _, response = th.SystemAdminClient.GetGroupSyncable(g.Id, th.BasicTeam.Id, model.GroupSyncableTypeTeam, "") + CheckNotImplementedStatus(t, response) + + th.App.SetLicense(model.NewTestLicense("ldap")) + + patch := &model.GroupSyncablePatch{ + CanLeave: model.NewBool(true), + AutoAdd: model.NewBool(true), + } + + _, response = th.SystemAdminClient.LinkGroupSyncable(g.Id, th.BasicTeam.Id, model.GroupSyncableTypeTeam, patch) + assert.Equal(t, http.StatusCreated, response.StatusCode) + + groupSyncable, response := th.SystemAdminClient.GetGroupSyncable(g.Id, th.BasicTeam.Id, model.GroupSyncableTypeTeam, "") + CheckOKStatus(t, response) + assert.NotNil(t, groupSyncable) + + assert.Equal(t, g.Id, groupSyncable.GroupId) + assert.Equal(t, th.BasicTeam.Id, groupSyncable.SyncableId) + assert.Equal(t, *patch.AutoAdd, groupSyncable.AutoAdd) + // assert.Equal(t, *patch.CanLeave, groupSyncable.CanLeave) // TODO: Re-add this test in phase 2 of LDAP groups sync. + + _, response = th.SystemAdminClient.GetGroupSyncable(model.NewId(), th.BasicTeam.Id, model.GroupSyncableTypeTeam, "") + CheckNotFoundStatus(t, response) + + _, response = th.SystemAdminClient.GetGroupSyncable(g.Id, model.NewId(), model.GroupSyncableTypeTeam, "") + CheckNotFoundStatus(t, response) + + _, response = th.SystemAdminClient.GetGroupSyncable("asdfasdfe3", th.BasicTeam.Id, model.GroupSyncableTypeTeam, "") + CheckBadRequestStatus(t, response) + + _, response = th.SystemAdminClient.GetGroupSyncable(g.Id, "asdfasdfe3", model.GroupSyncableTypeTeam, "") + CheckBadRequestStatus(t, response) + + th.SystemAdminClient.Logout() + _, response = th.SystemAdminClient.GetGroupSyncable(g.Id, th.BasicTeam.Id, model.GroupSyncableTypeTeam, "") + CheckUnauthorizedStatus(t, response) +} + +func TestGetGroupChannel(t *testing.T) { + th := Setup().InitBasic() + defer th.TearDown() + + id := model.NewId() + g, err := th.App.CreateGroup(&model.Group{ + DisplayName: "dn_" + id, + Name: "name" + id, + Source: model.GroupSourceLdap, + Description: "description_" + id, + RemoteId: model.NewId(), + }) + assert.Nil(t, err) + + _, response := th.Client.GetGroupSyncable(g.Id, th.BasicChannel.Id, model.GroupSyncableTypeChannel, "") + CheckNotImplementedStatus(t, response) + + _, response = th.SystemAdminClient.GetGroupSyncable(g.Id, th.BasicChannel.Id, model.GroupSyncableTypeChannel, "") + CheckNotImplementedStatus(t, response) + + th.App.SetLicense(model.NewTestLicense("ldap")) + + patch := &model.GroupSyncablePatch{ + CanLeave: model.NewBool(true), + AutoAdd: model.NewBool(true), + } + + _, response = th.SystemAdminClient.LinkGroupSyncable(g.Id, th.BasicChannel.Id, model.GroupSyncableTypeChannel, patch) + assert.Equal(t, http.StatusCreated, response.StatusCode) + + groupSyncable, response := th.SystemAdminClient.GetGroupSyncable(g.Id, th.BasicChannel.Id, model.GroupSyncableTypeChannel, "") + CheckOKStatus(t, response) + assert.NotNil(t, groupSyncable) + + assert.Equal(t, g.Id, groupSyncable.GroupId) + assert.Equal(t, th.BasicChannel.Id, groupSyncable.SyncableId) + assert.Equal(t, *patch.AutoAdd, groupSyncable.AutoAdd) + // assert.Equal(t, *patch.CanLeave, groupSyncable.CanLeave) // TODO: Re-add this test in phase 2 of LDAP groups sync. + + _, response = th.SystemAdminClient.GetGroupSyncable(model.NewId(), th.BasicChannel.Id, model.GroupSyncableTypeChannel, "") + CheckNotFoundStatus(t, response) + + _, response = th.SystemAdminClient.GetGroupSyncable(g.Id, model.NewId(), model.GroupSyncableTypeChannel, "") + CheckNotFoundStatus(t, response) + + _, response = th.SystemAdminClient.GetGroupSyncable("asdfasdfe3", th.BasicChannel.Id, model.GroupSyncableTypeChannel, "") + CheckBadRequestStatus(t, response) + + _, response = th.SystemAdminClient.GetGroupSyncable(g.Id, "asdfasdfe3", model.GroupSyncableTypeChannel, "") + CheckBadRequestStatus(t, response) + + th.SystemAdminClient.Logout() + _, response = th.SystemAdminClient.GetGroupSyncable(g.Id, th.BasicChannel.Id, model.GroupSyncableTypeChannel, "") + CheckUnauthorizedStatus(t, response) +} + +func TestGetGroupTeams(t *testing.T) { + th := Setup().InitBasic() + defer th.TearDown() + + id := model.NewId() + g, err := th.App.CreateGroup(&model.Group{ + DisplayName: "dn_" + id, + Name: "name" + id, + Source: model.GroupSourceLdap, + Description: "description_" + id, + RemoteId: model.NewId(), + }) + assert.Nil(t, err) + + th.App.SetLicense(model.NewTestLicense("ldap")) + + patch := &model.GroupSyncablePatch{ + CanLeave: model.NewBool(true), + AutoAdd: model.NewBool(true), + } + + for i := 0; i < 10; i++ { + team := th.CreateTeam() + _, response := th.SystemAdminClient.LinkGroupSyncable(g.Id, team.Id, model.GroupSyncableTypeTeam, patch) + assert.Equal(t, http.StatusCreated, response.StatusCode) + } + + th.App.SetLicense(nil) + + _, response := th.Client.GetGroupSyncables(g.Id, model.GroupSyncableTypeTeam, "") + CheckNotImplementedStatus(t, response) + + _, response = th.SystemAdminClient.GetGroupSyncables(g.Id, model.GroupSyncableTypeTeam, "") + CheckNotImplementedStatus(t, response) + + th.App.SetLicense(model.NewTestLicense("ldap")) + + _, response = th.Client.GetGroupSyncables(g.Id, model.GroupSyncableTypeTeam, "") + assert.Equal(t, http.StatusForbidden, response.StatusCode) + + groupSyncables, response := th.SystemAdminClient.GetGroupSyncables(g.Id, model.GroupSyncableTypeTeam, "") + CheckOKStatus(t, response) + + assert.Len(t, groupSyncables, 10) + + th.SystemAdminClient.Logout() + _, response = th.SystemAdminClient.GetGroupSyncables(g.Id, model.GroupSyncableTypeTeam, "") + CheckUnauthorizedStatus(t, response) +} + +func TestGetGroupChannels(t *testing.T) { + th := Setup().InitBasic() + defer th.TearDown() + + id := model.NewId() + g, err := th.App.CreateGroup(&model.Group{ + DisplayName: "dn_" + id, + Name: "name" + id, + Source: model.GroupSourceLdap, + Description: "description_" + id, + RemoteId: model.NewId(), + }) + assert.Nil(t, err) + + th.App.SetLicense(model.NewTestLicense("ldap")) + + patch := &model.GroupSyncablePatch{ + CanLeave: model.NewBool(true), + AutoAdd: model.NewBool(true), + } + + for i := 0; i < 10; i++ { + channel := th.CreatePublicChannel() + _, response := th.SystemAdminClient.LinkGroupSyncable(g.Id, channel.Id, model.GroupSyncableTypeChannel, patch) + assert.Equal(t, http.StatusCreated, response.StatusCode) + } + + th.App.SetLicense(nil) + + _, response := th.Client.GetGroupSyncables(g.Id, model.GroupSyncableTypeChannel, "") + CheckNotImplementedStatus(t, response) + + _, response = th.SystemAdminClient.GetGroupSyncables(g.Id, model.GroupSyncableTypeChannel, "") + CheckNotImplementedStatus(t, response) + + th.App.SetLicense(model.NewTestLicense("ldap")) + + _, response = th.Client.GetGroupSyncables(g.Id, model.GroupSyncableTypeChannel, "") + assert.Equal(t, http.StatusForbidden, response.StatusCode) + + groupSyncables, response := th.SystemAdminClient.GetGroupSyncables(g.Id, model.GroupSyncableTypeChannel, "") + CheckOKStatus(t, response) + + assert.Len(t, groupSyncables, 10) + + th.SystemAdminClient.Logout() + _, response = th.SystemAdminClient.GetGroupSyncables(g.Id, model.GroupSyncableTypeChannel, "") + CheckUnauthorizedStatus(t, response) +} + +func TestPatchGroupTeam(t *testing.T) { + th := Setup().InitBasic() + defer th.TearDown() + + id := model.NewId() + g, err := th.App.CreateGroup(&model.Group{ + DisplayName: "dn_" + id, + Name: "name" + id, + Source: model.GroupSourceLdap, + Description: "description_" + id, + RemoteId: model.NewId(), + }) + assert.Nil(t, err) + + patch := &model.GroupSyncablePatch{ + CanLeave: model.NewBool(true), + AutoAdd: model.NewBool(true), + } + + th.App.SetLicense(model.NewTestLicense("ldap")) + + groupSyncable, response := th.SystemAdminClient.LinkGroupSyncable(g.Id, th.BasicTeam.Id, model.GroupSyncableTypeTeam, patch) + assert.Equal(t, http.StatusCreated, response.StatusCode) + assert.NotNil(t, groupSyncable) + // assert.True(t, groupSyncable.CanLeave) // TODO: Re-add this test in phase 2 of LDAP groups sync. + assert.True(t, groupSyncable.AutoAdd) + + _, response = th.Client.PatchGroupSyncable(g.Id, th.BasicTeam.Id, model.GroupSyncableTypeTeam, patch) + assert.Equal(t, http.StatusForbidden, response.StatusCode) + + th.App.SetLicense(nil) + + _, response = th.SystemAdminClient.PatchGroupSyncable(g.Id, th.BasicTeam.Id, model.GroupSyncableTypeTeam, patch) + CheckNotImplementedStatus(t, response) + + th.App.SetLicense(model.NewTestLicense("ldap")) + + patch.AutoAdd = model.NewBool(false) + groupSyncable, response = th.SystemAdminClient.PatchGroupSyncable(g.Id, th.BasicTeam.Id, model.GroupSyncableTypeTeam, patch) + CheckOKStatus(t, response) + assert.False(t, groupSyncable.AutoAdd) + + assert.Equal(t, g.Id, groupSyncable.GroupId) + assert.Equal(t, th.BasicTeam.Id, groupSyncable.SyncableId) + assert.Equal(t, model.GroupSyncableTypeTeam, groupSyncable.Type) + + // TODO: Re-add this test in phase 2 of LDAP groups sync. + // patch.CanLeave = model.NewBool(false) + // _, response = th.SystemAdminClient.PatchGroupSyncable(g.Id, th.BasicTeam.Id, model.GroupSyncableTypeTeam, patch) + // CheckBadRequestStatus(t, response) + + patch.AutoAdd = model.NewBool(true) + groupSyncable, response = th.SystemAdminClient.PatchGroupSyncable(g.Id, th.BasicTeam.Id, model.GroupSyncableTypeTeam, patch) + CheckOKStatus(t, response) + assert.False(t, groupSyncable.CanLeave) + + _, response = th.SystemAdminClient.PatchGroupSyncable(model.NewId(), th.BasicTeam.Id, model.GroupSyncableTypeTeam, patch) + CheckNotFoundStatus(t, response) + + _, response = th.SystemAdminClient.PatchGroupSyncable(g.Id, model.NewId(), model.GroupSyncableTypeTeam, patch) + CheckNotFoundStatus(t, response) + + _, response = th.SystemAdminClient.PatchGroupSyncable("abc", th.BasicTeam.Id, model.GroupSyncableTypeTeam, patch) + CheckBadRequestStatus(t, response) + + _, response = th.SystemAdminClient.PatchGroupSyncable(g.Id, "abc", model.GroupSyncableTypeTeam, patch) + CheckBadRequestStatus(t, response) + + th.SystemAdminClient.Logout() + _, response = th.SystemAdminClient.PatchGroupSyncable(g.Id, th.BasicTeam.Id, model.GroupSyncableTypeTeam, patch) + CheckUnauthorizedStatus(t, response) +} + +func TestPatchGroupChannel(t *testing.T) { + th := Setup().InitBasic() + defer th.TearDown() + + id := model.NewId() + g, err := th.App.CreateGroup(&model.Group{ + DisplayName: "dn_" + id, + Name: "name" + id, + Source: model.GroupSourceLdap, + Description: "description_" + id, + RemoteId: model.NewId(), + }) + assert.Nil(t, err) + + patch := &model.GroupSyncablePatch{ + CanLeave: model.NewBool(true), + AutoAdd: model.NewBool(true), + } + + th.App.SetLicense(model.NewTestLicense("ldap")) + + groupSyncable, response := th.SystemAdminClient.LinkGroupSyncable(g.Id, th.BasicChannel.Id, model.GroupSyncableTypeChannel, patch) + assert.Equal(t, http.StatusCreated, response.StatusCode) + assert.NotNil(t, groupSyncable) + // assert.True(t, groupSyncable.CanLeave) // TODO: Re-add this test in phase 2 of LDAP groups sync. + assert.True(t, groupSyncable.AutoAdd) + + _, response = th.Client.PatchGroupSyncable(g.Id, th.BasicChannel.Id, model.GroupSyncableTypeChannel, patch) + assert.Equal(t, http.StatusForbidden, response.StatusCode) + + th.App.SetLicense(nil) + + _, response = th.SystemAdminClient.PatchGroupSyncable(g.Id, th.BasicChannel.Id, model.GroupSyncableTypeChannel, patch) + CheckNotImplementedStatus(t, response) + + th.App.SetLicense(model.NewTestLicense("ldap")) + + patch.AutoAdd = model.NewBool(false) + groupSyncable, response = th.SystemAdminClient.PatchGroupSyncable(g.Id, th.BasicChannel.Id, model.GroupSyncableTypeChannel, patch) + CheckOKStatus(t, response) + assert.False(t, groupSyncable.AutoAdd) + + assert.Equal(t, g.Id, groupSyncable.GroupId) + assert.Equal(t, th.BasicChannel.Id, groupSyncable.SyncableId) + assert.Equal(t, model.GroupSyncableTypeChannel, groupSyncable.Type) + + // TODO: Re-add this test in phase 2 of LDAP groups sync. + // patch.CanLeave = model.NewBool(false) + // _, response = th.SystemAdminClient.PatchGroupSyncable(g.Id, th.BasicChannel.Id, model.GroupSyncableTypeChannel, patch) + // CheckBadRequestStatus(t, response) + + patch.AutoAdd = model.NewBool(true) + groupSyncable, response = th.SystemAdminClient.PatchGroupSyncable(g.Id, th.BasicChannel.Id, model.GroupSyncableTypeChannel, patch) + CheckOKStatus(t, response) + assert.False(t, groupSyncable.CanLeave) + + _, response = th.SystemAdminClient.PatchGroupSyncable(model.NewId(), th.BasicChannel.Id, model.GroupSyncableTypeChannel, patch) + CheckNotFoundStatus(t, response) + + _, response = th.SystemAdminClient.PatchGroupSyncable(g.Id, model.NewId(), model.GroupSyncableTypeChannel, patch) + CheckNotFoundStatus(t, response) + + _, response = th.SystemAdminClient.PatchGroupSyncable("abc", th.BasicChannel.Id, model.GroupSyncableTypeChannel, patch) + CheckBadRequestStatus(t, response) + + _, response = th.SystemAdminClient.PatchGroupSyncable(g.Id, "abc", model.GroupSyncableTypeChannel, patch) + CheckBadRequestStatus(t, response) + + th.SystemAdminClient.Logout() + _, response = th.SystemAdminClient.PatchGroupSyncable(g.Id, th.BasicChannel.Id, model.GroupSyncableTypeChannel, patch) + CheckUnauthorizedStatus(t, response) +} diff --git a/api4/ldap.go b/api4/ldap.go index 5f2afb485e..69766a4add 100644 --- a/api4/ldap.go +++ b/api4/ldap.go @@ -4,14 +4,32 @@ package api4 import ( + "database/sql" + "encoding/json" "net/http" "github.com/mattermost/mattermost-server/model" ) +type mixedUnlinkedGroup struct { + Id *string `json:"mattermost_group_id"` + DisplayName string `json:"name"` + RemoteId string `json:"primary_key"` + HasSyncables *bool `json:"has_syncables"` +} + func (api *API) InitLdap() { api.BaseRoutes.LDAP.Handle("/sync", api.ApiSessionRequired(syncLdap)).Methods("POST") api.BaseRoutes.LDAP.Handle("/test", api.ApiSessionRequired(testLdap)).Methods("POST") + + // GET /api/v4/ldap/groups?page=0&per_page=1000 + api.BaseRoutes.LDAP.Handle("/groups", api.ApiSessionRequired(getLdapGroups)).Methods("GET") + + // POST /api/v4/ldap/groups/:remote_id/link + api.BaseRoutes.LDAP.Handle(`/groups/{remote_id}/link`, api.ApiSessionRequired(linkLdapGroup)).Methods("POST") + + // DELETE /api/v4/ldap/groups/:remote_id/link + api.BaseRoutes.LDAP.Handle(`/groups/{remote_id}/link`, api.ApiSessionRequired(unlinkLdapGroup)).Methods("DELETE") } func syncLdap(c *Context, w http.ResponseWriter, r *http.Request) { @@ -38,3 +56,159 @@ func testLdap(c *Context, w http.ResponseWriter, r *http.Request) { ReturnStatusOK(w) } + +func getLdapGroups(c *Context, w http.ResponseWriter, r *http.Request) { + if !c.App.SessionHasPermissionTo(c.App.Session, model.PERMISSION_MANAGE_SYSTEM) { + c.SetPermissionError(model.PERMISSION_MANAGE_SYSTEM) + return + } + + if c.App.License() == nil || !*c.App.License().Features.LDAPGroups { + c.Err = model.NewAppError("Api4.getLdapGroups", "api.ldap_groups.license_error", nil, "", http.StatusNotImplemented) + return + } + + groups, total, err := c.App.GetAllLdapGroupsPage(c.Params.Page, c.Params.PerPage) + if err != nil { + c.Err = err + return + } + + mugs := []*mixedUnlinkedGroup{} + for _, group := range groups { + mug := &mixedUnlinkedGroup{ + DisplayName: group.DisplayName, + RemoteId: group.RemoteId, + } + if len(group.Id) == 26 { + mug.Id = &group.Id + mug.HasSyncables = &group.HasSyncables + } + mugs = append(mugs, mug) + } + + b, marshalErr := json.Marshal(struct { + Count int `json:"count"` + Groups []*mixedUnlinkedGroup `json:"groups"` + }{Count: total, Groups: mugs}) + if marshalErr != nil { + c.Err = model.NewAppError("Api4.getLdapGroups", "api.marshal_error", nil, marshalErr.Error(), http.StatusInternalServerError) + return + } + + w.Write(b) +} + +func linkLdapGroup(c *Context, w http.ResponseWriter, r *http.Request) { + c.RequireRemoteId() + if c.Err != nil { + return + } + + if !c.App.SessionHasPermissionTo(c.App.Session, model.PERMISSION_MANAGE_SYSTEM) { + c.SetPermissionError(model.PERMISSION_MANAGE_SYSTEM) + return + } + + if c.App.License() == nil || !*c.App.License().Features.LDAPGroups { + c.Err = model.NewAppError("Api4.linkLdapGroup", "api.ldap_groups.license_error", nil, "", http.StatusNotImplemented) + return + } + + ldapGroup, err := c.App.GetLdapGroup(c.Params.RemoteId) + if err != nil { + c.Err = err + return + } + + if ldapGroup == nil { + c.Err = model.NewAppError("Api4.linkLdapGroup", "api.ldap_group.not_found", nil, "", http.StatusNotFound) + return + } + + group, err := c.App.GetGroupByRemoteID(ldapGroup.RemoteId, model.GroupSourceLdap) + if err != nil && err.DetailedError != sql.ErrNoRows.Error() { + c.Err = err + return + } + + var status int + var newOrUpdatedGroup *model.Group + + // Group has been previously linked + if group != nil { + if group.DeleteAt == 0 { + newOrUpdatedGroup = group + } else { + group.DeleteAt = 0 + group.DisplayName = ldapGroup.DisplayName + group.RemoteId = ldapGroup.RemoteId + newOrUpdatedGroup, err = c.App.UpdateGroup(group) + if err != nil { + c.Err = err + return + } + } + status = http.StatusOK + } else { + // Group has never been linked + // + // TODO: In a future phase of LDAP groups sync `Name` will be used for at-mentions and will be editable on + // the front-end so it will not have an initial value of `model.NewId()` but rather a slugified version of + // the LDAP group name with an appended duplicate-breaker. + newGroup := &model.Group{ + Name: model.NewId(), + DisplayName: ldapGroup.DisplayName, + RemoteId: ldapGroup.RemoteId, + Source: model.GroupSourceLdap, + } + newOrUpdatedGroup, err = c.App.CreateGroup(newGroup) + if err != nil { + c.Err = err + return + } + status = http.StatusCreated + } + + b, marshalErr := json.Marshal(newOrUpdatedGroup) + if marshalErr != nil { + c.Err = model.NewAppError("Api4.linkLdapGroup", "api.marshal_error", nil, marshalErr.Error(), http.StatusInternalServerError) + return + } + + w.WriteHeader(status) + w.Write(b) +} + +func unlinkLdapGroup(c *Context, w http.ResponseWriter, r *http.Request) { + c.RequireRemoteId() + if c.Err != nil { + return + } + + if !c.App.SessionHasPermissionTo(c.App.Session, model.PERMISSION_MANAGE_SYSTEM) { + c.SetPermissionError(model.PERMISSION_MANAGE_SYSTEM) + return + } + + if c.App.License() == nil || !*c.App.License().Features.LDAPGroups { + c.Err = model.NewAppError("Api4.unlinkLdapGroup", "api.ldap_groups.license_error", nil, "", http.StatusNotImplemented) + return + } + + group, err := c.App.GetGroupByRemoteID(c.Params.RemoteId, model.GroupSourceLdap) + if err != nil { + c.Err = err + return + } + + if group.DeleteAt == 0 { + _, err = c.App.DeleteGroup(group.Id) + if err != nil { + c.Err = err + return + } + } + + ReturnStatusOK(w) +} diff --git a/api4/ldap_test.go b/api4/ldap_test.go index 5c7f53b1d0..e738a6b78a 100644 --- a/api4/ldap_test.go +++ b/api4/ldap_test.go @@ -7,7 +7,7 @@ import ( "testing" ) -func TestLdapTest(t *testing.T) { +func TestTestLdap(t *testing.T) { th := Setup().InitBasic() defer th.TearDown() @@ -18,7 +18,7 @@ func TestLdapTest(t *testing.T) { CheckNotImplementedStatus(t, resp) } -func TestLdapSync(t *testing.T) { +func TestSyncLdap(t *testing.T) { th := Setup().InitBasic() defer th.TearDown() @@ -28,3 +28,40 @@ func TestLdapSync(t *testing.T) { _, resp = th.Client.SyncLdap() CheckForbiddenStatus(t, resp) } + +func TestGetLdapGroups(t *testing.T) { + th := Setup().InitBasic() + defer th.TearDown() + + _, resp := th.Client.GetLdapGroups() + CheckForbiddenStatus(t, resp) + + _, resp = th.SystemAdminClient.GetLdapGroups() + CheckNotImplementedStatus(t, resp) +} + +func TestLinkLdapGroup(t *testing.T) { + const entryUUID string = "foo" + + th := Setup().InitBasic() + defer th.TearDown() + + _, resp := th.Client.LinkLdapGroup(entryUUID) + CheckForbiddenStatus(t, resp) + + _, resp = th.SystemAdminClient.LinkLdapGroup(entryUUID) + CheckNotImplementedStatus(t, resp) +} + +func TestUnlinkLdapGroup(t *testing.T) { + const entryUUID string = "foo" + + th := Setup().InitBasic() + defer th.TearDown() + + _, resp := th.Client.UnlinkLdapGroup(entryUUID) + CheckForbiddenStatus(t, resp) + + _, resp = th.SystemAdminClient.UnlinkLdapGroup(entryUUID) + CheckNotImplementedStatus(t, resp) +} diff --git a/api4/scheme.go b/api4/scheme.go index 99463cdd01..c1438ab555 100644 --- a/api4/scheme.go +++ b/api4/scheme.go @@ -67,10 +67,6 @@ func getScheme(c *Context, w http.ResponseWriter, r *http.Request) { } func getSchemes(c *Context, w http.ResponseWriter, r *http.Request) { - if c.Err != nil { - return - } - if !c.App.SessionHasPermissionTo(c.App.Session, model.PERMISSION_MANAGE_SYSTEM) { c.SetPermissionError(model.PERMISSION_MANAGE_SYSTEM) return diff --git a/api4/team_test.go b/api4/team_test.go index 76ead3dcbb..eca9cbeb2a 100644 --- a/api4/team_test.go +++ b/api4/team_test.go @@ -2203,7 +2203,6 @@ func TestUpdateTeamScheme(t *testing.T) { // Test an invalid scheme scope. _, resp = th.SystemAdminClient.UpdateTeamScheme(team.Id, channelScheme.Id) - fmt.Printf("resp: %+v\n", resp) CheckBadRequestStatus(t, resp) // Test that an unauthenticated user gets rejected. diff --git a/app/channel.go b/app/channel.go index d9e4d7533d..0a31692e34 100644 --- a/app/channel.go +++ b/app/channel.go @@ -1116,6 +1116,14 @@ func (a *App) GetChannelsForUser(teamId string, userId string, includeDeleted bo return result.Data.(*model.ChannelList), nil } +func (a *App) GetAllChannels(page, perPage int, includeDeleted bool) (*model.ChannelListWithTeamData, *model.AppError) { + result := <-a.Srv.Store.Channel().GetAllChannels(page*perPage, perPage, includeDeleted) + if result.Err != nil { + return nil, result.Err + } + return result.Data.(*model.ChannelListWithTeamData), nil +} + func (a *App) GetDeletedChannels(teamId string, offset int, limit int) (*model.ChannelList, *model.AppError) { result := <-a.Srv.Store.Channel().GetDeleted(teamId, offset, limit) if result.Err != nil { @@ -1577,6 +1585,14 @@ func (a *App) AutocompleteChannelsForSearch(teamId string, userId string, term s return result.Data.(*model.ChannelList), nil } +func (a *App) SearchAllChannels(term string, includeDeleted bool) (*model.ChannelListWithTeamData, *model.AppError) { + result := <-a.Srv.Store.Channel().SearchAllChannels(term, *a.Config().TeamSettings.ExperimentalViewArchivedChannels && includeDeleted) + if result.Err != nil { + return nil, result.Err + } + return result.Data.(*model.ChannelListWithTeamData), nil +} + func (a *App) SearchChannels(teamId string, term string) (*model.ChannelList, *model.AppError) { includeDeleted := *a.Config().TeamSettings.ExperimentalViewArchivedChannels diff --git a/app/diagnostics.go b/app/diagnostics.go index 36e01ec55f..ead5dfba9f 100644 --- a/app/diagnostics.go +++ b/app/diagnostics.go @@ -264,6 +264,7 @@ func (a *App) trackConfig() { "experimental_enable_hardened_mode": *cfg.ServiceSettings.ExperimentalEnableHardenedMode, "enable_email_invitations": *cfg.ServiceSettings.EnableEmailInvitations, "experimental_channel_organization": *cfg.ServiceSettings.ExperimentalChannelOrganization, + "experimental_ldap_group_sync": *cfg.ServiceSettings.ExperimentalLdapGroupSync, }) a.SendDiagnostic(TRACK_CONFIG_TEAM, map[string]interface{}{ @@ -418,25 +419,28 @@ func (a *App) trackConfig() { }) a.SendDiagnostic(TRACK_CONFIG_LDAP, map[string]interface{}{ - "enable": *cfg.LdapSettings.Enable, - "enable_sync": *cfg.LdapSettings.EnableSync, - "connection_security": *cfg.LdapSettings.ConnectionSecurity, - "skip_certificate_verification": *cfg.LdapSettings.SkipCertificateVerification, - "sync_interval_minutes": *cfg.LdapSettings.SyncIntervalMinutes, - "query_timeout": *cfg.LdapSettings.QueryTimeout, - "max_page_size": *cfg.LdapSettings.MaxPageSize, - "isdefault_first_name_attribute": isDefault(*cfg.LdapSettings.FirstNameAttribute, model.LDAP_SETTINGS_DEFAULT_FIRST_NAME_ATTRIBUTE), - "isdefault_last_name_attribute": isDefault(*cfg.LdapSettings.LastNameAttribute, model.LDAP_SETTINGS_DEFAULT_LAST_NAME_ATTRIBUTE), - "isdefault_email_attribute": isDefault(*cfg.LdapSettings.EmailAttribute, model.LDAP_SETTINGS_DEFAULT_EMAIL_ATTRIBUTE), - "isdefault_username_attribute": isDefault(*cfg.LdapSettings.UsernameAttribute, model.LDAP_SETTINGS_DEFAULT_USERNAME_ATTRIBUTE), - "isdefault_nickname_attribute": isDefault(*cfg.LdapSettings.NicknameAttribute, model.LDAP_SETTINGS_DEFAULT_NICKNAME_ATTRIBUTE), - "isdefault_id_attribute": isDefault(*cfg.LdapSettings.IdAttribute, model.LDAP_SETTINGS_DEFAULT_ID_ATTRIBUTE), - "isdefault_position_attribute": isDefault(*cfg.LdapSettings.PositionAttribute, model.LDAP_SETTINGS_DEFAULT_POSITION_ATTRIBUTE), - "isdefault_login_id_attribute": isDefault(*cfg.LdapSettings.LoginIdAttribute, ""), - "isdefault_login_field_name": isDefault(*cfg.LdapSettings.LoginFieldName, model.LDAP_SETTINGS_DEFAULT_LOGIN_FIELD_NAME), - "isdefault_login_button_color": isDefault(*cfg.LdapSettings.LoginButtonColor, ""), - "isdefault_login_button_border_color": isDefault(*cfg.LdapSettings.LoginButtonBorderColor, ""), - "isdefault_login_button_text_color": isDefault(*cfg.LdapSettings.LoginButtonTextColor, ""), + "enable": *cfg.LdapSettings.Enable, + "enable_sync": *cfg.LdapSettings.EnableSync, + "connection_security": *cfg.LdapSettings.ConnectionSecurity, + "skip_certificate_verification": *cfg.LdapSettings.SkipCertificateVerification, + "sync_interval_minutes": *cfg.LdapSettings.SyncIntervalMinutes, + "query_timeout": *cfg.LdapSettings.QueryTimeout, + "max_page_size": *cfg.LdapSettings.MaxPageSize, + "isdefault_first_name_attribute": isDefault(*cfg.LdapSettings.FirstNameAttribute, model.LDAP_SETTINGS_DEFAULT_FIRST_NAME_ATTRIBUTE), + "isdefault_last_name_attribute": isDefault(*cfg.LdapSettings.LastNameAttribute, model.LDAP_SETTINGS_DEFAULT_LAST_NAME_ATTRIBUTE), + "isdefault_email_attribute": isDefault(*cfg.LdapSettings.EmailAttribute, model.LDAP_SETTINGS_DEFAULT_EMAIL_ATTRIBUTE), + "isdefault_username_attribute": isDefault(*cfg.LdapSettings.UsernameAttribute, model.LDAP_SETTINGS_DEFAULT_USERNAME_ATTRIBUTE), + "isdefault_nickname_attribute": isDefault(*cfg.LdapSettings.NicknameAttribute, model.LDAP_SETTINGS_DEFAULT_NICKNAME_ATTRIBUTE), + "isdefault_id_attribute": isDefault(*cfg.LdapSettings.IdAttribute, model.LDAP_SETTINGS_DEFAULT_ID_ATTRIBUTE), + "isdefault_position_attribute": isDefault(*cfg.LdapSettings.PositionAttribute, model.LDAP_SETTINGS_DEFAULT_POSITION_ATTRIBUTE), + "isdefault_login_id_attribute": isDefault(*cfg.LdapSettings.LoginIdAttribute, ""), + "isdefault_login_field_name": isDefault(*cfg.LdapSettings.LoginFieldName, model.LDAP_SETTINGS_DEFAULT_LOGIN_FIELD_NAME), + "isdefault_login_button_color": isDefault(*cfg.LdapSettings.LoginButtonColor, ""), + "isdefault_login_button_border_color": isDefault(*cfg.LdapSettings.LoginButtonBorderColor, ""), + "isdefault_login_button_text_color": isDefault(*cfg.LdapSettings.LoginButtonTextColor, ""), + "isempty_group_filter": isDefault(*cfg.LdapSettings.GroupFilter, ""), + "isdefault_group_display_name_attribute": isDefault(*cfg.LdapSettings.GroupDisplayNameAttribute, model.LDAP_SETTINGS_DEFAULT_GROUP_DISPLAY_NAME_ATTRIBUTE), + "isdefault_group_id_attribute": isDefault(*cfg.LdapSettings.GroupIdAttribute, model.LDAP_SETTINGS_DEFAULT_GROUP_ID_ATTRIBUTE), }) a.SendDiagnostic(TRACK_CONFIG_COMPLIANCE, map[string]interface{}{ diff --git a/app/group.go b/app/group.go new file mode 100644 index 0000000000..102e5d2a0f --- /dev/null +++ b/app/group.go @@ -0,0 +1,150 @@ +// Copyright (c) 2018-present Mattermost, Inc. All Rights Reserved. +// See License.txt for license information. + +package app + +import ( + "github.com/mattermost/mattermost-server/model" +) + +func (a *App) GetGroup(id string) (*model.Group, *model.AppError) { + result := <-a.Srv.Store.Group().Get(id) + if result.Err != nil { + return nil, result.Err + } + return result.Data.(*model.Group), nil +} + +func (a *App) GetGroupByRemoteID(remoteID string, groupSource model.GroupSource) (*model.Group, *model.AppError) { + result := <-a.Srv.Store.Group().GetByRemoteID(remoteID, groupSource) + if result.Err != nil { + return nil, result.Err + } + return result.Data.(*model.Group), nil +} + +func (a *App) GetGroupsBySource(groupSource model.GroupSource) ([]*model.Group, *model.AppError) { + result := <-a.Srv.Store.Group().GetAllBySource(groupSource) + if result.Err != nil { + return nil, result.Err + } + return result.Data.([]*model.Group), nil +} + +func (a *App) CreateGroup(group *model.Group) (*model.Group, *model.AppError) { + result := <-a.Srv.Store.Group().Create(group) + if result.Err != nil { + return nil, result.Err + } + return result.Data.(*model.Group), nil +} + +func (a *App) UpdateGroup(group *model.Group) (*model.Group, *model.AppError) { + result := <-a.Srv.Store.Group().Update(group) + if result.Err != nil { + return nil, result.Err + } + return result.Data.(*model.Group), nil +} + +func (a *App) DeleteGroup(groupID string) (*model.Group, *model.AppError) { + result := <-a.Srv.Store.Group().Delete(groupID) + if result.Err != nil { + return nil, result.Err + } + return result.Data.(*model.Group), nil +} + +func (a *App) GetGroupMemberUsers(groupID string) ([]*model.User, *model.AppError) { + result := <-a.Srv.Store.Group().GetMemberUsers(groupID) + if result.Err != nil { + return nil, result.Err + } + return result.Data.([]*model.User), nil +} + +func (a *App) GetGroupMemberUsersPage(groupID string, page int, perPage int) ([]*model.User, int, *model.AppError) { + result := <-a.Srv.Store.Group().GetMemberUsersPage(groupID, page, perPage) + if result.Err != nil { + return nil, 0, result.Err + } + members := result.Data.([]*model.User) + result = <-a.Srv.Store.Group().GetMemberCount(groupID) + if result.Err != nil { + return nil, 0, result.Err + } + count := int(result.Data.(int64)) + return members, count, nil +} + +func (a *App) CreateOrRestoreGroupMember(groupID string, userID string) (*model.GroupMember, *model.AppError) { + result := <-a.Srv.Store.Group().CreateOrRestoreMember(groupID, userID) + if result.Err != nil { + return nil, result.Err + } + return result.Data.(*model.GroupMember), nil +} + +func (a *App) DeleteGroupMember(groupID string, userID string) (*model.GroupMember, *model.AppError) { + result := <-a.Srv.Store.Group().DeleteMember(groupID, userID) + if result.Err != nil { + return nil, result.Err + } + return result.Data.(*model.GroupMember), nil +} + +func (a *App) CreateGroupSyncable(groupSyncable *model.GroupSyncable) (*model.GroupSyncable, *model.AppError) { + result := <-a.Srv.Store.Group().CreateGroupSyncable(groupSyncable) + if result.Err != nil { + return nil, result.Err + } + return result.Data.(*model.GroupSyncable), nil +} + +func (a *App) GetGroupSyncable(groupID string, syncableID string, syncableType model.GroupSyncableType) (*model.GroupSyncable, *model.AppError) { + result := <-a.Srv.Store.Group().GetGroupSyncable(groupID, syncableID, syncableType) + if result.Err != nil { + return nil, result.Err + } + return result.Data.(*model.GroupSyncable), nil +} + +func (a *App) GetGroupSyncables(groupID string, syncableType model.GroupSyncableType) ([]*model.GroupSyncable, *model.AppError) { + result := <-a.Srv.Store.Group().GetAllGroupSyncablesByGroupId(groupID, syncableType) + if result.Err != nil { + return nil, result.Err + } + return result.Data.([]*model.GroupSyncable), nil +} + +func (a *App) UpdateGroupSyncable(groupSyncable *model.GroupSyncable) (*model.GroupSyncable, *model.AppError) { + result := <-a.Srv.Store.Group().UpdateGroupSyncable(groupSyncable) + if result.Err != nil { + return nil, result.Err + } + return result.Data.(*model.GroupSyncable), nil +} + +func (a *App) DeleteGroupSyncable(groupID string, syncableID string, syncableType model.GroupSyncableType) (*model.GroupSyncable, *model.AppError) { + result := <-a.Srv.Store.Group().DeleteGroupSyncable(groupID, syncableID, syncableType) + if result.Err != nil { + return nil, result.Err + } + return result.Data.(*model.GroupSyncable), nil +} + +func (a *App) PendingAutoAddTeamMembers(minGroupMembersCreateAt int64) ([]*model.UserTeamIDPair, *model.AppError) { + result := <-a.Srv.Store.Group().PendingAutoAddTeamMembers(minGroupMembersCreateAt) + if result.Err != nil { + return nil, result.Err + } + return result.Data.([]*model.UserTeamIDPair), nil +} + +func (a *App) PendingAutoAddChannelMembers(minGroupMembersCreateAt int64) ([]*model.UserChannelIDPair, *model.AppError) { + result := <-a.Srv.Store.Group().PendingAutoAddChannelMembers(minGroupMembersCreateAt) + if result.Err != nil { + return nil, result.Err + } + return result.Data.([]*model.UserChannelIDPair), nil +} diff --git a/app/group_test.go b/app/group_test.go new file mode 100644 index 0000000000..e5cc67b8e4 --- /dev/null +++ b/app/group_test.go @@ -0,0 +1,223 @@ +// Copyright (c) 2016-present Mattermost, Inc. All Rights Reserved. +// See License.txt for license information. + +package app + +import ( + "testing" + + "github.com/mattermost/mattermost-server/model" + "github.com/stretchr/testify/require" +) + +func TestGetGroup(t *testing.T) { + th := Setup().InitBasic() + defer th.TearDown() + group := th.CreateGroup() + + group, err := th.App.GetGroup(group.Id) + require.Nil(t, err) + require.NotNil(t, group) + + group, err = th.App.GetGroup(model.NewId()) + require.NotNil(t, err) + require.Nil(t, group) +} + +func TestGetGroupByRemoteID(t *testing.T) { + th := Setup().InitBasic() + defer th.TearDown() + group := th.CreateGroup() + + g, err := th.App.GetGroupByRemoteID(group.RemoteId, model.GroupSourceLdap) + require.Nil(t, err) + require.NotNil(t, g) + + g, err = th.App.GetGroupByRemoteID(model.NewId(), model.GroupSourceLdap) + require.NotNil(t, err) + require.Nil(t, g) +} + +func TestGetGroupsByType(t *testing.T) { + th := Setup().InitBasic() + defer th.TearDown() + th.CreateGroup() + th.CreateGroup() + th.CreateGroup() + + groups, err := th.App.GetGroupsBySource(model.GroupSourceLdap) + require.Nil(t, err) + require.NotEmpty(t, groups) + + groups, err = th.App.GetGroupsBySource(model.GroupSource("blah")) + require.Nil(t, err) + require.Empty(t, groups) +} + +func TestCreateGroup(t *testing.T) { + th := Setup().InitBasic() + defer th.TearDown() + + id := model.NewId() + group := &model.Group{ + DisplayName: "dn_" + id, + Name: "name" + id, + Source: model.GroupSourceLdap, + RemoteId: model.NewId(), + } + + g, err := th.App.CreateGroup(group) + require.Nil(t, err) + require.NotNil(t, g) + + g, err = th.App.CreateGroup(group) + require.NotNil(t, err) + require.Nil(t, g) +} + +func TestUpdateGroup(t *testing.T) { + th := Setup().InitBasic() + defer th.TearDown() + group := th.CreateGroup() + group.DisplayName = model.NewId() + + g, err := th.App.UpdateGroup(group) + require.Nil(t, err) + require.NotNil(t, g) +} + +func TestDeleteGroup(t *testing.T) { + th := Setup().InitBasic() + defer th.TearDown() + group := th.CreateGroup() + + g, err := th.App.DeleteGroup(group.Id) + require.Nil(t, err) + require.NotNil(t, g) + + g, err = th.App.DeleteGroup(group.Id) + require.NotNil(t, err) + require.Nil(t, g) +} + +func TestCreateOrRestoreGroupMember(t *testing.T) { + th := Setup().InitBasic() + defer th.TearDown() + group := th.CreateGroup() + + g, err := th.App.CreateOrRestoreGroupMember(group.Id, th.BasicUser.Id) + require.Nil(t, err) + require.NotNil(t, g) + + g, err = th.App.CreateOrRestoreGroupMember(group.Id, th.BasicUser.Id) + require.NotNil(t, err) + require.Nil(t, g) +} + +func TestDeleteGroupMember(t *testing.T) { + th := Setup().InitBasic() + defer th.TearDown() + group := th.CreateGroup() + groupMember, err := th.App.CreateOrRestoreGroupMember(group.Id, th.BasicUser.Id) + require.Nil(t, err) + require.NotNil(t, groupMember) + + groupMember, err = th.App.DeleteGroupMember(groupMember.GroupId, groupMember.UserId) + require.Nil(t, err) + require.NotNil(t, groupMember) + + groupMember, err = th.App.DeleteGroupMember(groupMember.GroupId, groupMember.UserId) + require.NotNil(t, err) + require.Nil(t, groupMember) +} + +func TestCreateGroupSyncable(t *testing.T) { + th := Setup().InitBasic() + defer th.TearDown() + group := th.CreateGroup() + groupSyncable := &model.GroupSyncable{ + GroupId: group.Id, + CanLeave: true, + AutoAdd: false, + SyncableId: th.BasicTeam.Id, + Type: model.GroupSyncableTypeTeam, + } + + gs, err := th.App.CreateGroupSyncable(groupSyncable) + require.Nil(t, err) + require.NotNil(t, gs) + + gs, err = th.App.CreateGroupSyncable(groupSyncable) + require.NotNil(t, err) + require.Nil(t, gs) +} + +func TestGetGroupSyncable(t *testing.T) { + th := Setup().InitBasic() + defer th.TearDown() + group := th.CreateGroup() + groupSyncable := &model.GroupSyncable{ + GroupId: group.Id, + CanLeave: true, + AutoAdd: false, + SyncableId: th.BasicTeam.Id, + Type: model.GroupSyncableTypeTeam, + } + + gs, err := th.App.CreateGroupSyncable(groupSyncable) + require.Nil(t, err) + require.NotNil(t, gs) + + gs, err = th.App.GetGroupSyncable(group.Id, th.BasicTeam.Id, model.GroupSyncableTypeTeam) + require.Nil(t, err) + require.NotNil(t, gs) +} + +func TestGetGroupSyncables(t *testing.T) { + th := Setup().InitBasic() + defer th.TearDown() + group := th.CreateGroup() + + // Create a group team + groupSyncable := &model.GroupSyncable{ + GroupId: group.Id, + CanLeave: true, + AutoAdd: false, + SyncableId: th.BasicTeam.Id, + Type: model.GroupSyncableTypeTeam, + } + + gs, err := th.App.CreateGroupSyncable(groupSyncable) + require.Nil(t, err) + require.NotNil(t, gs) + + groupTeams, err := th.App.GetGroupSyncables(group.Id, model.GroupSyncableTypeTeam) + require.Nil(t, err) + + require.NotEmpty(t, groupTeams) +} + +func TestDeleteGroupSyncable(t *testing.T) { + th := Setup().InitBasic() + defer th.TearDown() + group := th.CreateGroup() + groupChannel := &model.GroupSyncable{ + GroupId: group.Id, + CanLeave: true, + AutoAdd: false, + SyncableId: th.BasicChannel.Id, + Type: model.GroupSyncableTypeChannel, + } + + gs, err := th.App.CreateGroupSyncable(groupChannel) + require.Nil(t, err) + require.NotNil(t, gs) + + gs, err = th.App.DeleteGroupSyncable(group.Id, th.BasicChannel.Id, model.GroupSyncableTypeChannel) + require.Nil(t, err) + require.NotNil(t, gs) + + gs, err = th.App.DeleteGroupSyncable(group.Id, th.BasicChannel.Id, model.GroupSyncableTypeChannel) + require.NotNil(t, err) + require.Nil(t, gs) +} diff --git a/app/helper_test.go b/app/helper_test.go index 8b0facc896..d8f749b92e 100644 --- a/app/helper_test.go +++ b/app/helper_test.go @@ -343,6 +343,28 @@ func (me *TestHelper) CreateScheme() (*model.Scheme, []*model.Role) { return scheme, roles } +func (me *TestHelper) CreateGroup() *model.Group { + id := model.NewId() + group := &model.Group{ + DisplayName: "dn_" + id, + Name: "name" + id, + Source: model.GroupSourceLdap, + Description: "description_" + id, + RemoteId: model.NewId(), + } + + utils.DisableDebugLogForTest() + var err *model.AppError + if group, err = me.App.CreateGroup(group); err != nil { + mlog.Error(err.Error()) + + time.Sleep(time.Second) + panic(err) + } + utils.EnableDebugLogForTest() + return group +} + func (me *TestHelper) CreateEmoji() *model.Emoji { utils.DisableDebugLogForTest() diff --git a/app/ldap.go b/app/ldap.go index d254c656cd..ffe73c1456 100644 --- a/app/ldap.go +++ b/app/ldap.go @@ -40,6 +40,46 @@ func (a *App) TestLdap() *model.AppError { return nil } +// GetLdapGroup retrieves a single LDAP group by the given LDAP group id. +func (a *App) GetLdapGroup(ldapGroupID string) (*model.Group, *model.AppError) { + var group *model.Group + + if a.Ldap != nil { + var err *model.AppError + group, err = a.Ldap.GetGroup(ldapGroupID) + if err != nil { + return nil, err + } + } else { + ae := model.NewAppError("GetLdapGroup", "ent.ldap.app_error", nil, "", http.StatusNotImplemented) + mlog.Error(fmt.Sprintf("%v", ae.Error())) + return nil, ae + } + + return group, nil +} + +// GetAllLdapGroupsPage retrieves all LDAP groups under the configured base DN using the default or configured group +// filter. +func (a *App) GetAllLdapGroupsPage(page int, perPage int) ([]*model.Group, int, *model.AppError) { + var groups []*model.Group + var total int + + if a.Ldap != nil { + var err *model.AppError + groups, total, err = a.Ldap.GetAllGroupsPage(page, perPage) + if err != nil { + return nil, 0, err + } + } else { + ae := model.NewAppError("GetAllLdapGroupsPage", "ent.ldap.app_error", nil, "", http.StatusNotImplemented) + mlog.Error(fmt.Sprintf("%v", ae.Error())) + return nil, 0, ae + } + + return groups, total, nil +} + func (a *App) SwitchEmailToLdap(email, password, code, ldapLoginId, ldapPassword string) (string, *model.AppError) { if a.License() != nil && !*a.Config().ServiceSettings.ExperimentalEnableAuthenticationTransfer { return "", model.NewAppError("emailToLdap", "api.user.email_to_ldap.not_available.app_error", nil, "", http.StatusForbidden) diff --git a/app/syncables.go b/app/syncables.go new file mode 100644 index 0000000000..c386b7da9d --- /dev/null +++ b/app/syncables.go @@ -0,0 +1,63 @@ +// Copyright (c) 2016-present Mattermost, Inc. All Rights Reserved. +// See License.txt for license information. + +package app + +import ( + "github.com/mattermost/mattermost-server/mlog" +) + +// PopulateSyncablesSince adds users to teams and channels based on their group memberships and how those groups are +// configured to sync with teams and channels for group members on or after the given timestamp. +func (a *App) PopulateSyncablesSince(groupMembersCreatedAfter int64) error { + userTeamIDs, appErr := a.PendingAutoAddTeamMembers(groupMembersCreatedAfter) + if appErr != nil { + return appErr + } + + for _, userTeam := range userTeamIDs { + _, err := a.AddTeamMember(userTeam.TeamID, userTeam.UserID) + if err != nil { + return err + } + + a.Log.Info("added teammember", + mlog.String("user_id", userTeam.UserID), + mlog.String("team_id", userTeam.TeamID), + ) + } + + userChannelIDs, appErr := a.PendingAutoAddChannelMembers(groupMembersCreatedAfter) + if appErr != nil { + return appErr + } + + for _, userChannel := range userChannelIDs { + channel, err := a.GetChannel(userChannel.ChannelID) + if err != nil { + return err + } + + // First add user to team + _, err = a.AddTeamMember(channel.TeamId, userChannel.UserID) + if err != nil { + return err + } + a.Log.Info("added teammember", + mlog.String("user_id", userChannel.UserID), + mlog.String("team_id", channel.TeamId), + ) + + _, err = a.AddChannelMember(userChannel.UserID, channel, "", "", false) + if err != nil { + return err + } + + a.Log.Info("added channelmember", + mlog.String("user_id", userChannel.UserID), + mlog.String("channel_id", userChannel.ChannelID), + ) + } + + return nil +} diff --git a/app/syncables_test.go b/app/syncables_test.go new file mode 100644 index 0000000000..3396ee2a6c --- /dev/null +++ b/app/syncables_test.go @@ -0,0 +1,346 @@ +package app + +import ( + "testing" + + "github.com/mattermost/mattermost-server/model" +) + +func TestPopulateSyncablesSince(t *testing.T) { + th := Setup().InitBasic() + defer th.TearDown() + + singersTeam, err := th.App.CreateTeam(&model.Team{ + DisplayName: "Singers", + Name: model.NewId(), + Email: "singers@test.com", + Type: model.TEAM_OPEN, + }) + if err != nil { + t.Errorf("test team not created: %s", err.Error()) + } + + nerdsTeam, err := th.App.CreateTeam(&model.Team{ + DisplayName: "Nerds", + Name: model.NewId(), + Email: "nerds@test.com", + Type: model.TEAM_INVITE, + }) + if err != nil { + t.Errorf("test team not created: %s", err.Error()) + } + + practiceChannel, err := th.App.CreateChannel(&model.Channel{ + TeamId: singersTeam.Id, + DisplayName: "Practices", + Name: model.NewId(), + Type: model.CHANNEL_OPEN, + }, false) + if err != nil { + t.Errorf("test channel not created: %s", err.Error()) + } + + experimentsChannel, err := th.App.CreateChannel(&model.Channel{ + TeamId: singersTeam.Id, + DisplayName: "Experiments", + Name: model.NewId(), + Type: model.CHANNEL_PRIVATE, + }, false) + if err != nil { + t.Errorf("test channel not created: %s", err.Error()) + } + + gleeGroup, err := th.App.CreateGroup(&model.Group{ + Name: model.NewId(), + DisplayName: "Glee Club", + RemoteId: model.NewId(), + Source: model.GroupSourceLdap, + }) + if err != nil { + t.Errorf("test group not created: %s", err.Error()) + } + + scienceGroup, err := th.App.CreateGroup(&model.Group{ + Name: model.NewId(), + DisplayName: "Science Club", + RemoteId: model.NewId(), + Source: model.GroupSourceLdap, + }) + if err != nil { + t.Errorf("test group not created: %s", err.Error()) + } + + _, err = th.App.CreateGroupSyncable(&model.GroupSyncable{ + CanLeave: true, + AutoAdd: true, + GroupId: gleeGroup.Id, + SyncableId: practiceChannel.Id, + Type: model.GroupSyncableTypeChannel, + }) + if err != nil { + t.Errorf("test groupchannel not created: %s", err.Error()) + } + + scienceTeamGroupSyncable, err := th.App.CreateGroupSyncable(&model.GroupSyncable{ + CanLeave: true, + AutoAdd: false, + GroupId: scienceGroup.Id, + SyncableId: nerdsTeam.Id, + Type: model.GroupSyncableTypeTeam, + }) + if err != nil { + t.Errorf("test groupteam not created: %s", err.Error()) + } + + scienceChannelGroupSyncable, err := th.App.CreateGroupSyncable(&model.GroupSyncable{ + CanLeave: true, + AutoAdd: false, + GroupId: scienceGroup.Id, + SyncableId: experimentsChannel.Id, + Type: model.GroupSyncableTypeChannel, + }) + if err != nil { + t.Errorf("test groupchannel not created: %s", err.Error()) + } + + singer1 := th.BasicUser + scientist1 := th.BasicUser2 + + _, err = th.App.CreateOrRestoreGroupMember(gleeGroup.Id, singer1.Id) + if err != nil { + t.Errorf("test groupmember not created: %s", err.Error()) + } + + scientistGroupMember, err := th.App.CreateOrRestoreGroupMember(scienceGroup.Id, scientist1.Id) + if err != nil { + t.Errorf("test groupmember not created: %s", err.Error()) + } + + pErr := th.App.PopulateSyncablesSince(0) + if pErr != nil { + t.Errorf("faild to populate syncables: %s", pErr.Error()) + } + + // Singer should be in team and channel + _, err = th.App.GetTeamMember(singersTeam.Id, singer1.Id) + if err != nil { + t.Errorf("error retrieving team member: %s", err.Error()) + } + _, err = th.App.GetChannelMember(practiceChannel.Id, singer1.Id) + if err != nil { + t.Errorf("error retrieving channel member: %s", err.Error()) + } + + tMembers, err := th.App.GetTeamMembers(singersTeam.Id, 0, 999) + if err != nil { + t.Errorf("error retrieving team members: %s", err.Error()) + } + expected := 1 + actual := len(tMembers) + if actual != expected { + t.Errorf("expected %d team members but got %d", expected, actual) + } + + cMembersCount, err := th.App.GetChannelMemberCount(practiceChannel.Id) + if err != nil { + t.Errorf("error retrieving team members: %s", err.Error()) + } + if cMembersCount != int64(expected) { + t.Errorf("expected %d team member but got %d", expected, cMembersCount) + } + + // Scientist should not be in team or channel + _, err = th.App.GetTeamMember(nerdsTeam.Id, scientist1.Id) + if err.Id != "store.sql_team.get_member.missing.app_error" { + t.Errorf("wrong error: %s", err.Id) + } + + _, err = th.App.GetChannelMember(experimentsChannel.Id, scientist1.Id) + if err.Id != "store.sql_channel.get_member.missing.app_error" { + t.Errorf("wrong error: %s", err.Id) + } + + tMembers, err = th.App.GetTeamMembers(nerdsTeam.Id, 0, 999) + if err != nil { + t.Errorf("error retrieving team members: %s", err.Error()) + } + expected = 0 + actual = len(tMembers) + if actual != expected { + t.Errorf("expected %d team members but got %d", expected, actual) + } + + cMembersCount, err = th.App.GetChannelMemberCount(experimentsChannel.Id) + if err != nil { + t.Errorf("error retrieving team members: %s", err.Error()) + } + if cMembersCount != int64(expected) { + t.Errorf("expected %d team members but got %d", expected, cMembersCount) + } + + // update AutoAdd to true + scienceTeamGroupSyncable.AutoAdd = true + scienceTeamGroupSyncable, err = th.App.UpdateGroupSyncable(scienceTeamGroupSyncable) + if err != nil { + t.Errorf("error updating group syncable: %s", err.Error()) + } + + // Sync everything after syncable was created (proving that team updates trigger re-sync) + pErr = th.App.PopulateSyncablesSince(scientistGroupMember.CreateAt + 1) + if pErr != nil { + t.Errorf("faild to populate syncables: %s", pErr.Error()) + } + + // Scientist should be in team but not the channel + _, err = th.App.GetTeamMember(nerdsTeam.Id, scientist1.Id) + if err != nil { + t.Errorf("error retrieving team member: %s", err.Error()) + } + + _, err = th.App.GetChannelMember(experimentsChannel.Id, scientist1.Id) + if err.Id != "store.sql_channel.get_member.missing.app_error" { + t.Errorf("wrong error: %s", err.Id) + } + + tMembers, err = th.App.GetTeamMembers(nerdsTeam.Id, 0, 999) + if err != nil { + t.Errorf("error retrieving team members: %s", err.Error()) + } + expected = 1 + actual = len(tMembers) + if actual != expected { + t.Errorf("expected %d team members but got %d", expected, actual) + } + + expected = 0 + cMembersCount, err = th.App.GetChannelMemberCount(experimentsChannel.Id) + if err != nil { + t.Errorf("error retrieving team members: %s", err.Error()) + } + if cMembersCount != int64(expected) { + t.Errorf("expected %d team members but got %d", expected, cMembersCount) + } + + // Update the channel syncable + scienceChannelGroupSyncable.AutoAdd = true + scienceChannelGroupSyncable, err = th.App.UpdateGroupSyncable(scienceChannelGroupSyncable) + if err != nil { + t.Errorf("error updating group syncable: %s", err.Error()) + } + + // Sync everything after syncable was created (proving that channel updates trigger re-sync) + pErr = th.App.PopulateSyncablesSince(scientistGroupMember.CreateAt + 1) + if pErr != nil { + t.Errorf("faild to populate syncables: %s", pErr.Error()) + } + + expected = 1 + cMembersCount, err = th.App.GetChannelMemberCount(experimentsChannel.Id) + if err != nil { + t.Errorf("error retrieving team members: %s", err.Error()) + } + if cMembersCount != int64(expected) { + t.Errorf("expected %d team members but got %d", expected, cMembersCount) + } + + // singer leaves team and channel + err = th.App.LeaveChannel(practiceChannel.Id, singer1.Id) + if err != nil { + t.Errorf("error leaving channel: %s", err.Error()) + } + err = th.App.LeaveTeam(singersTeam, singer1, "") + if err != nil { + t.Errorf("error leaving team: %s", err.Error()) + } + + // Even re-syncing from the beginning doesn't re-add to channel or team + pErr = th.App.PopulateSyncablesSince(0) + if pErr != nil { + t.Errorf("faild to populate syncables: %s", pErr.Error()) + } + + // Singer should not be in team or channel + tMember, err := th.App.GetTeamMember(singersTeam.Id, singer1.Id) + if err != nil { + t.Errorf("error retrieving team member: %s", err.Error()) + } + if tMember.DeleteAt == 0 { + t.Error("expected team member to remain deleted") + } + + _, err = th.App.GetChannelMember(practiceChannel.Id, singer1.Id) + if err == nil { + t.Error("Expected channel member to remain deleted") + } + + // Ensure members are in channel + _, err = th.App.AddChannelMember(scientist1.Id, experimentsChannel, "", "", false) + if err != nil { + t.Errorf("unable to add user to channel: %s", err.Error()) + } + + // Add other user so that user can leave channel + _, err = th.App.AddTeamMember(singersTeam.Id, singer1.Id) + if err != nil { + t.Errorf("unable to add user to team: %s", err.Error()) + } + _, err = th.App.AddChannelMember(singer1.Id, experimentsChannel, "", "", false) + if err != nil { + t.Errorf("unable to add user to channel: %s", err.Error()) + } + + // the channel syncable is updated + scienceChannelGroupSyncable.CanLeave = false + scienceChannelGroupSyncable, err = th.App.UpdateGroupSyncable(scienceChannelGroupSyncable) + if err != nil { + t.Errorf("error updating group syncable: %s", err.Error()) + } + + pErr = th.App.PopulateSyncablesSince(0) + if pErr != nil { + t.Errorf("faild to populate syncables: %s", pErr.Error()) + } + + timeBeforeLeaving := model.GetMillis() + + // User leaves channel + err = th.App.LeaveChannel(experimentsChannel.Id, scientist1.Id) + if err != nil { + t.Errorf("unable to add user to channel: %s", err.Error()) + } + + timeAfterLeaving := model.GetMillis() + + // Purging channelmemberhistory doesn't re-add user to channel + result := <-th.App.Srv.Store.ChannelMemberHistory().PermanentDeleteBatch(timeBeforeLeaving, 1000) + if result.Err != nil { + t.Errorf("error permanently deleting channelmemberhistory: %s", result.Err.Error()) + } + + pErr = th.App.PopulateSyncablesSince(scienceChannelGroupSyncable.UpdateAt) + if pErr != nil { + t.Errorf("failed to populate syncables: %s", pErr.Error()) + } + + _, err = th.App.GetChannelMember(experimentsChannel.Id, scientist1.Id) + if err == nil { + t.Error("Expected channel member to remain deleted") + } + + // Purging channelmemberhistory doesn't re-add user to channel + result = <-th.App.Srv.Jobs.Store.ChannelMemberHistory().PermanentDeleteBatch(timeAfterLeaving, 1000) + if result.Err != nil { + t.Errorf("error permanently deleting channelmemberhistory: %s", result.Err.Error()) + } + + pErr = th.App.PopulateSyncablesSince(scienceChannelGroupSyncable.UpdateAt) + if pErr != nil { + t.Errorf("failed to populate syncables: %s", pErr.Error()) + } + + // Channel member is re-added. + _, err = th.App.GetChannelMember(experimentsChannel.Id, scientist1.Id) + if err != nil { + t.Errorf("expected channel member: %s", err.Error()) + } +} diff --git a/build/Jenkinsfile.pr b/build/Jenkinsfile.pr index 414db896ca..2dc64e540c 100644 --- a/build/Jenkinsfile.pr +++ b/build/Jenkinsfile.pr @@ -113,13 +113,8 @@ pipeline { sh """ docker-compose --no-ansi run --rm start_dependencies docker-compose --no-ansi ps - - docker-compose --no-ansi exec -T openldap bash -c 'echo -e "dn: ou=testusers,dc=mm,dc=test,dc=com\nobjectclass: organizationalunit" | ldapadd -x -D "cn=admin,dc=mm,dc=test,dc=com" -w mostest'; - docker-compose --no-ansi exec -T openldap bash -c 'echo -e "dn: uid=test.one,ou=testusers,dc=mm,dc=test,dc=com\nobjectclass: iNetOrgPerson\nsn: User\ncn: Test1\nmail: success+testone@simulator.amazonses.com" | ldapadd -x -D "cn=admin,dc=mm,dc=test,dc=com" -w mostest'; - docker-compose --no-ansi exec -T openldap bash -c 'ldappasswd -s Password1 -D "cn=admin,dc=mm,dc=test,dc=com" -x "uid=test.one,ou=testusers,dc=mm,dc=test,dc=com" -w mostest'; - docker-compose --no-ansi exec -T openldap bash -c 'echo -e "dn: uid=test.two,ou=testusers,dc=mm,dc=test,dc=com\nobjectclass: iNetOrgPerson\nsn: User\ncn: Test2\nmail: success+testtwo@simulator.amazonses.com" | ldapadd -x -D "cn=admin,dc=mm,dc=test,dc=com" -w mostest'; - docker-compose --no-ansi exec -T openldap bash -c 'ldappasswd -s Password1 -D "cn=admin,dc=mm,dc=test,dc=com" -x "uid=test.two,ou=testusers,dc=mm,dc=test,dc=com" -w mostest'; - docker-compose --no-ansi exec -T openldap bash -c 'echo -e "dn: cn=tgroup,ou=testusers,dc=mm,dc=test,dc=com\nobjectclass: groupOfUniqueNames\nuniqueMember: uid=test.one,ou=testusers,dc=mm,dc=test,dc=com" | ldapadd -x -D "cn=admin,dc=mm,dc=test,dc=com" -w mostest'; + docker-compose --no-ansi exec -T openldap bash -c 'ldapadd -x -D "cn=admin,dc=mm,dc=test,dc=com" -w mostest -f /add-users.ldif'; + docker-compose --no-ansi exec -T openldap bash -c 'ldapadd -x -D "cn=admin,dc=mm,dc=test,dc=com" -w mostest -f /add-groups.ldif'; """ } } diff --git a/build/docker-compose.yml b/build/docker-compose.yml index 6aa47e1767..bac8ff7179 100644 --- a/build/docker-compose.yml +++ b/build/docker-compose.yml @@ -38,7 +38,7 @@ services: networks: - mm-test openldap: - image: "osixia/openldap:1.1.6" + image: "osixia/openldap:1.2.2" restart: always networks: - mm-test @@ -47,6 +47,9 @@ services: LDAP_ORGANISATION: "Mattermost Test" LDAP_DOMAIN: "mm.test.com" LDAP_ADMIN_PASSWORD: "mostest" + volumes: + - "../tests/add-users.ldif:/add-users.ldif" + - "../tests/add-groups.ldif:/add-groups.ldif" elasticsearch: image: "mattermost/mattermost-elasticsearch-docker:6.5.1" networks: diff --git a/config/default.json b/config/default.json index 3f7b0e9a41..7b940dc997 100644 --- a/config/default.json +++ b/config/default.json @@ -76,7 +76,8 @@ "ImageProxyURL": "", "EnableAPITeamDeletion": false, "ExperimentalEnableHardenedMode": false, - "EnableEmailInvitations": false + "EnableEmailInvitations": false, + "ExperimentalLdapGroupSync": false }, "TeamSettings": { "SiteName": "Mattermost", @@ -277,6 +278,9 @@ "BindUsername": "", "BindPassword": "", "UserFilter": "", + "GroupFilter": "", + "GroupDisplayNameAttribute": "", + "GroupIdAttribute": "", "FirstNameAttribute": "", "LastNameAttribute": "", "EmailAttribute": "", @@ -410,4 +414,4 @@ "Plugins": {}, "PluginStates": {} } -} +} \ No newline at end of file diff --git a/einterfaces/ldap.go b/einterfaces/ldap.go index 31e8b7cf86..da4f1d844c 100644 --- a/einterfaces/ldap.go +++ b/einterfaces/ldap.go @@ -19,4 +19,7 @@ type LdapInterface interface { RunTest() *model.AppError GetAllLdapUsers() ([]*model.User, *model.AppError) MigrateIDAttribute(toAttribute string) error + GetGroup(groupUID string) (*model.Group, *model.AppError) + GetAllGroupsPage(page int, perPage int) ([]*model.Group, int, *model.AppError) + FirstLoginSync(user *model.User) *model.AppError } diff --git a/i18n/en.json b/i18n/en.json index d0411b2f8b..91040f0e56 100644 --- a/i18n/en.json +++ b/i18n/en.json @@ -6765,5 +6765,149 @@ { "id": "web.incoming_webhook.user.app_error", "translation": "Couldn't find the user" + }, + { + "id": "api.io_error", + "translation": "input/output error" + }, + { + "id": "api.marshal_error", + "translation": "marshal error" + }, + { + "id": "api.ldap_groups.license_error", + "translation": "your license does not support ldap groups" + }, + { + "id": "api.ldap_group.not_found", + "translation": "ldap group not found" + }, + { + "id": "ent.ldap_groups.reachable_groups_error", + "translation": "error retrieving groups for user" + }, + { + "id": "ent.ldap_groups.no_rows", + "translation": "no groups found with matching uid" + }, + { + "id": "ent.ldap_groups.members_of_group_error", + "translation": "error retrieving members of group" + }, + { + "id": "ent.ldap_groups.group_search_error", + "translation": "error retrieving ldap group" + }, + { + "id": "ent.ldap_groups.groups_search_error", + "translation": "error retrieving ldap groups" + }, + { + "id": "ent.ldap.syncronize.get_all_groups.app_error", + "translation": "error retrieving groups" + }, + { + "id": "ent.ldap.syncronize.populate_syncables", + "translation": "error populating syncables" + }, + { + "id": "model.group.create_at.app_error", + "translation": "invalid create at property for group" + }, + { + "id": "model.group.description.app_error", + "translation": "invalid description property for group" + }, + { + "id": "model.group.display_name.app_error", + "translation": "invalid display name property for group" + }, + { + "id": "model.group.id.app_error", + "translation": "invalid id property for group" + }, + { + "id": "model.group.name.app_error", + "translation": "invalid name property for group" + }, + { + "id": "model.group.remote_id.app_error", + "translation": "invalid remote id property for group" + }, + { + "id": "model.group.source.app_error", + "translation": "invalid source property for group" + }, + { + "id": "model.group_syncable.type.app_error", + "translation": "invalid type property for group syncable" + }, + { + "id": "model.group_syncable.unmarshaljson.duplicate_type_identifier", + "translation": "type identifier already exists: perhaps json contains both 'team_id' and 'channel_id'" + }, + { + "id": "model.group.update_at.app_error", + "translation": "invalid update at property for group" + }, + { + "id": "model.group_member.group_id.app_error", + "translation": "invalid group id property for group member" + }, + { + "id": "model.group_member.user_id.app_error", + "translation": "invalid user id property for group member" + }, + { + "id": "model.group_syncable.group_id.app_error", + "translation": "invalid group id property for group syncable" + }, + { + "id": "model.group_syncable.syncable_id.app_error", + "translation": "invalid syncable id for group syncable" + }, + { + "id": "store.sql_group.group_syncable_already_deleted", + "translation": "group syncable was already deleted" + }, + { + "id": "store.insert_error", + "translation": "insert error" + }, + { + "id": "model.group.delete_at.app_error", + "translation": "invalid delete at property for group" + }, + { + "id": "store.sql_group.no_rows", + "translation": "no matching group found" + }, + { + "id": "store.sql_group.no_rows_changed", + "translation": "no rows changed" + }, + { + "id": "store.sql_group.nothing_to_update", + "translation": "nothing to update for group syncable" + }, + { + "id": "store.select_error", + "translation": "select error" + }, + { + "id": "store.sql_group.unique_constraint", + "translation": "a group with that name already exists" + }, + { + "id": "store.sql_group.uniqueness_error", + "translation": "group member already exists" + }, + { + "id": "store.update_error", + "translation": "update error" + }, + { + "id": "ent.ldap.app_error", + "translation": "ldap interface was nil" } -] +] \ No newline at end of file diff --git a/model/channel.go b/model/channel.go index f8867babb0..16e48743cd 100644 --- a/model/channel.go +++ b/model/channel.go @@ -53,6 +53,13 @@ type Channel struct { Props map[string]interface{} `json:"props" db:"-"` } +type ChannelWithTeamData struct { + Channel + TeamDisplayName string `json:"team_display_name"` + TeamName string `json:"team_name"` + TeamUpdateAt int64 `json:"team_update_at"` +} + type ChannelPatch struct { DisplayName *string `json:"display_name"` Name *string `json:"name"` diff --git a/model/channel_list.go b/model/channel_list.go index 1b3bda46d4..b1db60ecc3 100644 --- a/model/channel_list.go +++ b/model/channel_list.go @@ -51,3 +51,45 @@ func ChannelSliceFromJson(data io.Reader) []*Channel { json.NewDecoder(data).Decode(&o) return o } + +type ChannelListWithTeamData []*ChannelWithTeamData + +func (o *ChannelListWithTeamData) ToJson() string { + if b, err := json.Marshal(o); err != nil { + return "[]" + } else { + return string(b) + } +} + +func (o *ChannelListWithTeamData) Etag() string { + + id := "0" + var t int64 = 0 + var delta int64 = 0 + + for _, v := range *o { + if v.LastPostAt > t { + t = v.LastPostAt + id = v.Id + } + + if v.UpdateAt > t { + t = v.UpdateAt + id = v.Id + } + + if v.TeamUpdateAt > t { + t = v.TeamUpdateAt + id = v.Id + } + } + + return Etag(id, t, delta, len(*o)) +} + +func ChannelListWithTeamDataFromJson(data io.Reader) *ChannelListWithTeamData { + var o *ChannelListWithTeamData + json.NewDecoder(data).Decode(&o) + return o +} diff --git a/model/client4.go b/model/client4.go index 3bb1947fe7..18e8933a26 100644 --- a/model/client4.go +++ b/model/client4.go @@ -409,6 +409,22 @@ func (c *Client4) GetTermsOfServiceRoute() string { return "/terms_of_service" } +func (c *Client4) GetGroupsRoute() string { + return "/groups" +} + +func (c *Client4) GetGroupRoute(groupID string) string { + return fmt.Sprintf("%s/%s", c.GetGroupsRoute(), groupID) +} + +func (c *Client4) GetGroupSyncableRoute(groupID, syncableID string, syncableType GroupSyncableType) string { + return fmt.Sprintf("%s/%ss/%s", c.GetGroupRoute(groupID), strings.ToLower(syncableType.String()), syncableID) +} + +func (c *Client4) GetGroupSyncablesRoute(groupID string, syncableType GroupSyncableType) string { + return fmt.Sprintf("%s/%ss", c.GetGroupRoute(groupID), strings.ToLower(syncableType.String())) +} + func (c *Client4) DoApiGet(url string, etag string) (*http.Response, *AppError) { return c.DoApiRequest(http.MethodGet, c.ApiUrl+url, "", etag) } @@ -1706,6 +1722,17 @@ func (c *Client4) RemoveTeamIcon(teamId string) (bool, *Response) { // Channel Section +// GetAllChannels get all the channels. Must be a system administrator. +func (c *Client4) GetAllChannels(page int, perPage int, etag string) (*ChannelListWithTeamData, *Response) { + query := fmt.Sprintf("?page=%v&per_page=%v", page, perPage) + r, err := c.DoApiGet(c.GetChannelsRoute()+query, etag) + if err != nil { + return nil, BuildErrorResponse(r, err) + } + defer closeBody(r) + return ChannelListWithTeamDataFromJson(r.Body), BuildResponse(r) +} + // CreateChannel creates a channel based on the provided channel struct. func (c *Client4) CreateChannel(channel *Channel) (*Channel, *Response) { r, err := c.DoApiPost(c.GetChannelsRoute(), channel.ToJson()) @@ -1870,6 +1897,16 @@ func (c *Client4) SearchChannels(teamId string, search *ChannelSearch) ([]*Chann return ChannelSliceFromJson(r.Body), BuildResponse(r) } +// SearchAllChannels search in all the channels. Must be a system administrator. +func (c *Client4) SearchAllChannels(search *ChannelSearch) (*ChannelListWithTeamData, *Response) { + r, err := c.DoApiPost(c.GetChannelsRoute()+"/search", search.ToJson()) + if err != nil { + return nil, BuildErrorResponse(r, err) + } + defer closeBody(r) + return ChannelListWithTeamDataFromJson(r.Body), BuildResponse(r) +} + // DeleteChannel deletes channel based on the provided channel id string. func (c *Client4) DeleteChannel(channelId string) (bool, *Response) { r, err := c.DoApiDelete(c.GetChannelRoute(channelId)) @@ -3066,6 +3103,45 @@ func (c *Client4) TestLdap() (bool, *Response) { return CheckStatusOK(r), BuildResponse(r) } +// GetLdapGroups retrieves the immediate child groups of the given parent group. +func (c *Client4) GetLdapGroups() ([]*Group, *Response) { + path := fmt.Sprintf("%s/groups", c.GetLdapRoute()) + + r, appErr := c.DoApiGet(path, "") + if appErr != nil { + return nil, BuildErrorResponse(r, appErr) + } + defer closeBody(r) + + return GroupsFromJson(r.Body), BuildResponse(r) +} + +// LinkLdapGroup creates or undeletes a Mattermost group and associates it to the given LDAP group DN. +func (c *Client4) LinkLdapGroup(dn string) (*Group, *Response) { + path := fmt.Sprintf("%s/groups/%s/link", c.GetLdapRoute(), dn) + + r, appErr := c.DoApiPost(path, "") + if appErr != nil { + return nil, BuildErrorResponse(r, appErr) + } + defer closeBody(r) + + return GroupFromJson(r.Body), BuildResponse(r) +} + +// UnlinkLdapGroup deletes the Mattermost group associated with the given LDAP group DN. +func (c *Client4) UnlinkLdapGroup(dn string) (*Group, *Response) { + path := fmt.Sprintf("%s/groups/%s/link", c.GetLdapRoute(), dn) + + r, appErr := c.DoApiDelete(path) + if appErr != nil { + return nil, BuildErrorResponse(r, appErr) + } + defer closeBody(r) + + return GroupFromJson(r.Body), BuildResponse(r) +} + // Audits Section // GetAudits returns a list of audits for the whole system. @@ -4030,3 +4106,71 @@ func (c *Client4) CreateTermsOfService(text, userId string) (*TermsOfService, *R defer closeBody(r) return TermsOfServiceFromJson(r.Body), BuildResponse(r) } + +func (c *Client4) GetGroup(groupID, etag string) (*Group, *Response) { + r, appErr := c.DoApiGet(c.GetGroupRoute(groupID), etag) + if appErr != nil { + return nil, BuildErrorResponse(r, appErr) + } + defer closeBody(r) + return GroupFromJson(r.Body), BuildResponse(r) +} + +func (c *Client4) PatchGroup(groupID string, patch *GroupPatch) (*Group, *Response) { + payload, _ := json.Marshal(patch) + r, appErr := c.DoApiPut(c.GetGroupRoute(groupID)+"/patch", string(payload)) + if appErr != nil { + return nil, BuildErrorResponse(r, appErr) + } + defer closeBody(r) + return GroupFromJson(r.Body), BuildResponse(r) +} + +func (c *Client4) LinkGroupSyncable(groupID, syncableID string, syncableType GroupSyncableType, patch *GroupSyncablePatch) (*GroupSyncable, *Response) { + payload, _ := json.Marshal(patch) + url := fmt.Sprintf("%s/link", c.GetGroupSyncableRoute(groupID, syncableID, syncableType)) + r, appErr := c.DoApiPost(url, string(payload)) + if appErr != nil { + return nil, BuildErrorResponse(r, appErr) + } + defer closeBody(r) + return GroupSyncableFromJson(r.Body), BuildResponse(r) +} + +func (c *Client4) UnlinkGroupSyncable(groupID, syncableID string, syncableType GroupSyncableType) *Response { + url := fmt.Sprintf("%s/link", c.GetGroupSyncableRoute(groupID, syncableID, syncableType)) + r, appErr := c.DoApiDelete(url) + if appErr != nil { + return BuildErrorResponse(r, appErr) + } + defer closeBody(r) + return BuildResponse(r) +} + +func (c *Client4) GetGroupSyncable(groupID, syncableID string, syncableType GroupSyncableType, etag string) (*GroupSyncable, *Response) { + r, appErr := c.DoApiGet(c.GetGroupSyncableRoute(groupID, syncableID, syncableType), etag) + if appErr != nil { + return nil, BuildErrorResponse(r, appErr) + } + defer closeBody(r) + return GroupSyncableFromJson(r.Body), BuildResponse(r) +} + +func (c *Client4) GetGroupSyncables(groupID string, syncableType GroupSyncableType, etag string) ([]*GroupSyncable, *Response) { + r, appErr := c.DoApiGet(c.GetGroupSyncablesRoute(groupID, syncableType), etag) + if appErr != nil { + return nil, BuildErrorResponse(r, appErr) + } + defer closeBody(r) + return GroupSyncablesFromJson(r.Body), BuildResponse(r) +} + +func (c *Client4) PatchGroupSyncable(groupID, syncableID string, syncableType GroupSyncableType, patch *GroupSyncablePatch) (*GroupSyncable, *Response) { + payload, _ := json.Marshal(patch) + r, appErr := c.DoApiPut(c.GetGroupSyncableRoute(groupID, syncableID, syncableType)+"/patch", string(payload)) + if appErr != nil { + return nil, BuildErrorResponse(r, appErr) + } + defer closeBody(r) + return GroupSyncableFromJson(r.Body), BuildResponse(r) +} diff --git a/model/cluster_message.go b/model/cluster_message.go index d02da3ee1f..c175bf0fcd 100644 --- a/model/cluster_message.go +++ b/model/cluster_message.go @@ -23,6 +23,7 @@ const ( CLUSTER_EVENT_CLEAR_SESSION_CACHE_FOR_USER = "clear_session_user" CLUSTER_EVENT_INVALIDATE_CACHE_FOR_ROLES = "inv_roles" CLUSTER_EVENT_INVALIDATE_CACHE_FOR_SCHEMES = "inv_schemes" + CLUSTER_EVENT_INVALIDATE_CACHE_FOR_GROUPS = "inv_groups" CLUSTER_SEND_BEST_EFFORT = "best_effort" CLUSTER_SEND_RELIABLE = "reliable" diff --git a/model/config.go b/model/config.go index 164d6b4f30..f015703f19 100644 --- a/model/config.go +++ b/model/config.go @@ -114,14 +114,16 @@ const ( SUPPORT_SETTINGS_DEFAULT_SUPPORT_EMAIL = "feedback@mattermost.com" SUPPORT_SETTINGS_DEFAULT_RE_ACCEPTANCE_PERIOD = 365 - LDAP_SETTINGS_DEFAULT_FIRST_NAME_ATTRIBUTE = "" - LDAP_SETTINGS_DEFAULT_LAST_NAME_ATTRIBUTE = "" - LDAP_SETTINGS_DEFAULT_EMAIL_ATTRIBUTE = "" - LDAP_SETTINGS_DEFAULT_USERNAME_ATTRIBUTE = "" - LDAP_SETTINGS_DEFAULT_NICKNAME_ATTRIBUTE = "" - LDAP_SETTINGS_DEFAULT_ID_ATTRIBUTE = "" - LDAP_SETTINGS_DEFAULT_POSITION_ATTRIBUTE = "" - LDAP_SETTINGS_DEFAULT_LOGIN_FIELD_NAME = "" + LDAP_SETTINGS_DEFAULT_FIRST_NAME_ATTRIBUTE = "" + LDAP_SETTINGS_DEFAULT_LAST_NAME_ATTRIBUTE = "" + LDAP_SETTINGS_DEFAULT_EMAIL_ATTRIBUTE = "" + LDAP_SETTINGS_DEFAULT_USERNAME_ATTRIBUTE = "" + LDAP_SETTINGS_DEFAULT_NICKNAME_ATTRIBUTE = "" + LDAP_SETTINGS_DEFAULT_ID_ATTRIBUTE = "" + LDAP_SETTINGS_DEFAULT_POSITION_ATTRIBUTE = "" + LDAP_SETTINGS_DEFAULT_LOGIN_FIELD_NAME = "" + LDAP_SETTINGS_DEFAULT_GROUP_DISPLAY_NAME_ATTRIBUTE = "" + LDAP_SETTINGS_DEFAULT_GROUP_ID_ATTRIBUTE = "" SAML_SETTINGS_DEFAULT_ID_ATTRIBUTE = "" SAML_SETTINGS_DEFAULT_FIRST_NAME_ATTRIBUTE = "" @@ -276,6 +278,7 @@ type ServiceSettings struct { EnableAPITeamDeletion *bool ExperimentalEnableHardenedMode *bool EnableEmailInvitations *bool + ExperimentalLdapGroupSync *bool } func (s *ServiceSettings) SetDefaults() { @@ -566,6 +569,10 @@ func (s *ServiceSettings) SetDefaults() { if s.ExperimentalEnableHardenedMode == nil { s.ExperimentalEnableHardenedMode = NewBool(false) } + + if s.ExperimentalLdapGroupSync == nil { + s.ExperimentalLdapGroupSync = NewBool(false) + } } type ClusterSettings struct { @@ -1322,7 +1329,12 @@ type LdapSettings struct { BindPassword *string // Filtering - UserFilter *string + UserFilter *string + GroupFilter *string + + // Group Mapping + GroupDisplayNameAttribute *string + GroupIdAttribute *string // User Mapping FirstNameAttribute *string @@ -1388,6 +1400,18 @@ func (s *LdapSettings) SetDefaults() { s.UserFilter = NewString("") } + if s.GroupFilter == nil { + s.GroupFilter = NewString("") + } + + if s.GroupDisplayNameAttribute == nil { + s.GroupDisplayNameAttribute = NewString(LDAP_SETTINGS_DEFAULT_GROUP_DISPLAY_NAME_ATTRIBUTE) + } + + if s.GroupIdAttribute == nil { + s.GroupIdAttribute = NewString(LDAP_SETTINGS_DEFAULT_GROUP_ID_ATTRIBUTE) + } + if s.FirstNameAttribute == nil { s.FirstNameAttribute = NewString(LDAP_SETTINGS_DEFAULT_FIRST_NAME_ATTRIBUTE) } diff --git a/model/group.go b/model/group.go new file mode 100644 index 0000000000..f0754848c8 --- /dev/null +++ b/model/group.go @@ -0,0 +1,135 @@ +// Copyright (c) 2018-present Mattermost, Inc. All Rights Reserved. +// See License.txt for license information. + +package model + +import ( + "encoding/json" + "io" + "net/http" +) + +const ( + GroupSourceLdap GroupSource = "ldap" + + GroupNameMaxLength = 64 + GroupSourceMaxLength = 64 + GroupDisplayNameMaxLength = 128 + GroupDescriptionMaxLength = 1024 + GroupRemoteIDMaxLength = 48 +) + +type GroupSource string + +var allGroupSources = []GroupSource{ + GroupSourceLdap, +} + +var groupSourcesRequiringRemoteID = []GroupSource{ + GroupSourceLdap, +} + +type Group struct { + Id string `json:"id"` + Name string `json:"name"` + DisplayName string `json:"display_name"` + Description string `json:"description"` + Source GroupSource `json:"source"` + RemoteId string `json:"remote_id"` + CreateAt int64 `json:"create_at"` + UpdateAt int64 `json:"update_at"` + DeleteAt int64 `json:"delete_at"` + HasSyncables bool `db:"-" json:"has_syncables"` +} + +type GroupPatch struct { + Name *string `json:"name"` + DisplayName *string `json:"display_name"` + Description *string `json:"description"` +} + +func (group *Group) Patch(patch *GroupPatch) { + if patch.Name != nil { + group.Name = *patch.Name + } + if patch.DisplayName != nil { + group.DisplayName = *patch.DisplayName + } + if patch.Description != nil { + group.Description = *patch.Description + } +} + +func (group *Group) IsValidForCreate() *AppError { + if l := len(group.Name); l == 0 || l > GroupNameMaxLength { + return NewAppError("Group.IsValidForCreate", "model.group.name.app_error", map[string]interface{}{"GroupNameMaxLength": GroupNameMaxLength}, "", http.StatusBadRequest) + } + + if l := len(group.DisplayName); l == 0 || l > GroupDisplayNameMaxLength { + return NewAppError("Group.IsValidForCreate", "model.group.display_name.app_error", map[string]interface{}{"GroupDisplayNameMaxLength": GroupDisplayNameMaxLength}, "", http.StatusBadRequest) + } + + if len(group.Description) > GroupDescriptionMaxLength { + return NewAppError("Group.IsValidForCreate", "model.group.description.app_error", map[string]interface{}{"GroupDescriptionMaxLength": GroupDescriptionMaxLength}, "", http.StatusBadRequest) + } + + isValidSource := false + for _, groupSource := range allGroupSources { + if group.Source == groupSource { + isValidSource = true + break + } + } + if !isValidSource { + return NewAppError("Group.IsValidForCreate", "model.group.source.app_error", nil, "", http.StatusBadRequest) + } + + if len(group.RemoteId) > GroupRemoteIDMaxLength || (len(group.RemoteId) == 0 && group.requiresRemoteId()) { + return NewAppError("Group.IsValidForCreate", "model.group.remote_id.app_error", nil, "", http.StatusBadRequest) + } + + return nil +} + +func (group *Group) requiresRemoteId() bool { + for _, groupSource := range groupSourcesRequiringRemoteID { + if groupSource == group.Source { + return true + } + } + return false +} + +func (group *Group) IsValidForUpdate() *AppError { + if len(group.Id) != 26 { + return NewAppError("Group.IsValidForUpdate", "model.group.id.app_error", nil, "", http.StatusBadRequest) + } + if group.CreateAt == 0 { + return NewAppError("Group.IsValidForUpdate", "model.group.create_at.app_error", nil, "", http.StatusBadRequest) + } + if group.UpdateAt == 0 { + return NewAppError("Group.IsValidForUpdate", "model.group.update_at.app_error", nil, "", http.StatusBadRequest) + } + if err := group.IsValidForCreate(); err != nil { + return err + } + return nil +} + +func GroupFromJson(data io.Reader) *Group { + var group *Group + json.NewDecoder(data).Decode(&group) + return group +} + +func GroupsFromJson(data io.Reader) []*Group { + var groups []*Group + json.NewDecoder(data).Decode(&groups) + return groups +} + +func GroupPatchFromJson(data io.Reader) *GroupPatch { + var groupPatch *GroupPatch + json.NewDecoder(data).Decode(&groupPatch) + return groupPatch +} diff --git a/model/group_member.go b/model/group_member.go new file mode 100644 index 0000000000..0f1a0baf01 --- /dev/null +++ b/model/group_member.go @@ -0,0 +1,23 @@ +// Copyright (c) 2018-present Mattermost, Inc. All Rights Reserved. +// See License.txt for license information. + +package model + +import "net/http" + +type GroupMember struct { + GroupId string `json:"group_id"` + UserId string `json:"user_id"` + CreateAt int64 `json:"create_at"` + DeleteAt int64 `json:"delete_at"` +} + +func (gm *GroupMember) IsValid() *AppError { + if !IsValidId(gm.GroupId) { + return NewAppError("GroupMember.IsValid", "model.group_member.group_id.app_error", nil, "", http.StatusBadRequest) + } + if !IsValidId(gm.UserId) { + return NewAppError("GroupMember.IsValid", "model.group_member.user_id.app_error", nil, "", http.StatusBadRequest) + } + return nil +} diff --git a/model/group_syncable.go b/model/group_syncable.go new file mode 100644 index 0000000000..841c854c6b --- /dev/null +++ b/model/group_syncable.go @@ -0,0 +1,169 @@ +// Copyright (c) 2018-present Mattermost, Inc. All Rights Reserved. +// See License.txt for license information. + +package model + +import ( + "encoding/json" + "fmt" + "io" + "io/ioutil" + "net/http" +) + +type GroupSyncableType string + +const ( + GroupSyncableTypeTeam GroupSyncableType = "Team" + GroupSyncableTypeChannel GroupSyncableType = "Channel" +) + +func (gst GroupSyncableType) String() string { + return string(gst) +} + +type GroupSyncable struct { + GroupId string `json:"group_id"` + + // SyncableId represents the Id of the model that is being synced with the group, for example a ChannelId or + // TeamId. + SyncableId string `db:"-" json:"-"` + + CanLeave bool `db:"-" json:"can_leave"` + AutoAdd bool `json:"auto_add"` + CreateAt int64 `json:"create_at"` + DeleteAt int64 `json:"delete_at"` + UpdateAt int64 `json:"update_at"` + Type GroupSyncableType `db:"-" json:"-"` + + // Values joined in from the associated team and/or channel + ChannelDisplayName string `db:"-" json:"-"` + TeamDisplayName string `db:"-" json:"-"` + TeamType string `db:"-" json:"-"` + ChannelType string `db:"-" json:"-"` + TeamID string `db:"-" json:"-"` +} + +func (syncable *GroupSyncable) IsValid() *AppError { + if !IsValidId(syncable.GroupId) { + return NewAppError("GroupSyncable.SyncableIsValid", "model.group_syncable.group_id.app_error", nil, "", http.StatusBadRequest) + } + if !IsValidId(syncable.SyncableId) { + return NewAppError("GroupSyncable.SyncableIsValid", "model.group_syncable.syncable_id.app_error", nil, "", http.StatusBadRequest) + } + // TODO: Add this validation check for phase 2 of LDAP group sync. + // if syncable.AutoAdd == false && syncable.CanLeave == false { + // return NewAppError("GroupSyncable.SyncableIsValid", "model.group_syncable.invalid_state", nil, "", http.StatusBadRequest) + // } + return nil +} + +func (syncable *GroupSyncable) UnmarshalJSON(b []byte) error { + var kvp map[string]interface{} + err := json.Unmarshal(b, &kvp) + if err != nil { + return err + } + for key, value := range kvp { + switch key { + case "team_id": + syncable.SyncableId = value.(string) + syncable.Type = GroupSyncableTypeTeam + case "channel_id": + syncable.SyncableId = value.(string) + syncable.Type = GroupSyncableTypeChannel + case "group_id": + syncable.GroupId = value.(string) + case "can_leave": + syncable.CanLeave = value.(bool) + case "auto_add": + syncable.AutoAdd = value.(bool) + default: + } + } + return nil +} + +func (syncable *GroupSyncable) MarshalJSON() ([]byte, error) { + type Alias GroupSyncable + + switch syncable.Type { + case GroupSyncableTypeTeam: + return json.Marshal(&struct { + TeamID string `json:"team_id"` + TeamDisplayName string `json:"team_display_name,omitempty"` + TeamType string `json:"team_type,omitempty"` + *Alias + }{ + TeamDisplayName: syncable.TeamDisplayName, + TeamType: syncable.TeamType, + TeamID: syncable.SyncableId, + Alias: (*Alias)(syncable), + }) + case GroupSyncableTypeChannel: + return json.Marshal(&struct { + ChannelID string `json:"channel_id"` + ChannelDisplayName string `json:"channel_display_name,omitempty"` + ChannelType string `json:"channel_type,omitempty"` + + TeamID string `json:"team_id,omitempty"` + TeamDisplayName string `json:"team_display_name,omitempty"` + TeamType string `json:"team_type,omitempty"` + + *Alias + }{ + ChannelID: syncable.SyncableId, + ChannelDisplayName: syncable.ChannelDisplayName, + ChannelType: syncable.ChannelType, + + TeamID: syncable.TeamID, + TeamDisplayName: syncable.TeamDisplayName, + TeamType: syncable.TeamType, + + Alias: (*Alias)(syncable), + }) + default: + return nil, &json.MarshalerError{ + Err: fmt.Errorf("unknown syncable type: %s", syncable.Type), + } + } +} + +type GroupSyncablePatch struct { + CanLeave *bool `json:"can_leave"` + AutoAdd *bool `json:"auto_add"` +} + +func (syncable *GroupSyncable) Patch(patch *GroupSyncablePatch) { + // TODO: Add this validation check for phase 2 of LDAP group sync. + // if patch.CanLeave != nil { + // syncable.CanLeave = *patch.CanLeave + // } + if patch.AutoAdd != nil { + syncable.AutoAdd = *patch.AutoAdd + } +} + +type UserTeamIDPair struct { + UserID string + TeamID string +} + +type UserChannelIDPair struct { + UserID string + ChannelID string +} + +func GroupSyncableFromJson(data io.Reader) *GroupSyncable { + groupSyncable := &GroupSyncable{} + bodyBytes, _ := ioutil.ReadAll(data) + json.Unmarshal(bodyBytes, groupSyncable) + return groupSyncable +} + +func GroupSyncablesFromJson(data io.Reader) []*GroupSyncable { + groupSyncables := []*GroupSyncable{} + bodyBytes, _ := ioutil.ReadAll(data) + json.Unmarshal(bodyBytes, &groupSyncables) + return groupSyncables +} diff --git a/model/license.go b/model/license.go index e2ef3b0d58..7ab21debc0 100644 --- a/model/license.go +++ b/model/license.go @@ -42,6 +42,7 @@ type Customer struct { type Features struct { Users *int `json:"users"` LDAP *bool `json:"ldap"` + LDAPGroups *bool `json:"ldap_groups"` MFA *bool `json:"mfa"` GoogleOAuth *bool `json:"google_oauth"` Office365OAuth *bool `json:"office365_oauth"` @@ -66,6 +67,7 @@ type Features struct { func (f *Features) ToMap() map[string]interface{} { return map[string]interface{}{ "ldap": *f.LDAP, + "ldap_groups": *f.LDAPGroups, "mfa": *f.MFA, "google": *f.GoogleOAuth, "office365": *f.Office365OAuth, @@ -96,6 +98,10 @@ func (f *Features) SetDefaults() { f.LDAP = NewBool(*f.FutureFeatures) } + if f.LDAPGroups == nil { + f.LDAPGroups = NewBool(*f.FutureFeatures) + } + if f.MFA == nil { f.MFA = NewBool(*f.FutureFeatures) } diff --git a/model/license_test.go b/model/license_test.go index b454734326..d550c327f3 100644 --- a/model/license_test.go +++ b/model/license_test.go @@ -15,6 +15,7 @@ func TestLicenseFeaturesToMap(t *testing.T) { m := f.ToMap() CheckTrue(t, m["ldap"].(bool)) + CheckTrue(t, m["ldap_groups"].(bool)) CheckTrue(t, m["mfa"].(bool)) CheckTrue(t, m["google"].(bool)) CheckTrue(t, m["office365"].(bool)) @@ -37,6 +38,7 @@ func TestLicenseFeaturesSetDefaults(t *testing.T) { CheckInt(t, *f.Users, 0) CheckTrue(t, *f.LDAP) + CheckTrue(t, *f.LDAPGroups) CheckTrue(t, *f.MFA) CheckTrue(t, *f.GoogleOAuth) CheckTrue(t, *f.Office365OAuth) @@ -58,6 +60,7 @@ func TestLicenseFeaturesSetDefaults(t *testing.T) { *f.Users = 300 *f.FutureFeatures = false *f.LDAP = true + *f.LDAPGroups = true *f.MFA = true *f.GoogleOAuth = true *f.Office365OAuth = true @@ -76,6 +79,7 @@ func TestLicenseFeaturesSetDefaults(t *testing.T) { CheckInt(t, *f.Users, 300) CheckTrue(t, *f.LDAP) + CheckTrue(t, *f.LDAPGroups) CheckTrue(t, *f.MFA) CheckTrue(t, *f.GoogleOAuth) CheckTrue(t, *f.Office365OAuth) @@ -159,6 +163,7 @@ func TestLicenseToFromJson(t *testing.T) { CheckInt(t, *f1.Users, *f.Users) CheckBool(t, *f1.LDAP, *f.LDAP) + CheckBool(t, *f1.LDAPGroups, *f.LDAPGroups) CheckBool(t, *f1.MFA, *f.MFA) CheckBool(t, *f1.GoogleOAuth, *f.GoogleOAuth) CheckBool(t, *f1.Office365OAuth, *f.Office365OAuth) diff --git a/store/layered_store.go b/store/layered_store.go index 4ea81f6ede..a9ed76ed82 100644 --- a/store/layered_store.go +++ b/store/layered_store.go @@ -29,6 +29,7 @@ type LayeredStore struct { LocalCacheLayer *LocalCacheSupplier RedisLayer *RedisSupplier LayerChainHead LayeredStoreSupplier + GroupStore GroupStore } func NewLayeredStore(db LayeredStoreDatabaseLayer, metrics einterfaces.MetricsInterface, cluster einterfaces.ClusterInterface) Store { @@ -41,6 +42,7 @@ func NewLayeredStore(db LayeredStoreDatabaseLayer, metrics einterfaces.MetricsIn store.ReactionStore = &LayeredReactionStore{store} store.RoleStore = &LayeredRoleStore{store} store.SchemeStore = &LayeredSchemeStore{store} + store.GroupStore = &LayeredGroupStore{store} // Setup the chain if ENABLE_EXPERIMENTAL_REDIS { @@ -181,6 +183,10 @@ func (s *LayeredStore) Scheme() SchemeStore { return s.SchemeStore } +func (s *LayeredStore) Group() GroupStore { + return s.GroupStore +} + func (s *LayeredStore) MarkSystemRanUnitTests() { s.DatabaseLayer.MarkSystemRanUnitTests() } @@ -333,3 +339,115 @@ func (s *LayeredSchemeStore) PermanentDeleteAll() StoreChannel { return supplier.SchemePermanentDeleteAll(s.TmpContext) }) } + +type LayeredGroupStore struct { + *LayeredStore +} + +func (s *LayeredGroupStore) Create(group *model.Group) StoreChannel { + return s.RunQuery(func(supplier LayeredStoreSupplier) *LayeredStoreSupplierResult { + return supplier.GroupCreate(s.TmpContext, group) + }) +} + +func (s *LayeredGroupStore) Get(groupID string) StoreChannel { + return s.RunQuery(func(supplier LayeredStoreSupplier) *LayeredStoreSupplierResult { + return supplier.GroupGet(s.TmpContext, groupID) + }) +} + +func (s *LayeredGroupStore) GetByRemoteID(remoteID string, groupSource model.GroupSource) StoreChannel { + return s.RunQuery(func(supplier LayeredStoreSupplier) *LayeredStoreSupplierResult { + return supplier.GroupGetByRemoteID(s.TmpContext, remoteID, groupSource) + }) +} + +func (s *LayeredGroupStore) GetAllBySource(groupSource model.GroupSource) StoreChannel { + return s.RunQuery(func(supplier LayeredStoreSupplier) *LayeredStoreSupplierResult { + return supplier.GroupGetAllBySource(s.TmpContext, groupSource) + }) +} + +func (s *LayeredGroupStore) Update(group *model.Group) StoreChannel { + return s.RunQuery(func(supplier LayeredStoreSupplier) *LayeredStoreSupplierResult { + return supplier.GroupUpdate(s.TmpContext, group) + }) +} + +func (s *LayeredGroupStore) Delete(groupID string) StoreChannel { + return s.RunQuery(func(supplier LayeredStoreSupplier) *LayeredStoreSupplierResult { + return supplier.GroupDelete(s.TmpContext, groupID) + }) +} + +func (s *LayeredGroupStore) GetMemberUsers(groupID string) StoreChannel { + return s.RunQuery(func(supplier LayeredStoreSupplier) *LayeredStoreSupplierResult { + return supplier.GroupGetMemberUsers(s.TmpContext, groupID) + }) +} + +func (s *LayeredGroupStore) GetMemberUsersPage(groupID string, offset int, limit int) StoreChannel { + return s.RunQuery(func(supplier LayeredStoreSupplier) *LayeredStoreSupplierResult { + return supplier.GroupGetMemberUsersPage(s.TmpContext, groupID, offset, limit) + }) +} + +func (s *LayeredGroupStore) GetMemberCount(groupID string) StoreChannel { + return s.RunQuery(func(supplier LayeredStoreSupplier) *LayeredStoreSupplierResult { + return supplier.GroupGetMemberCount(s.TmpContext, groupID) + }) +} + +func (s *LayeredGroupStore) CreateOrRestoreMember(groupID string, userID string) StoreChannel { + return s.RunQuery(func(supplier LayeredStoreSupplier) *LayeredStoreSupplierResult { + return supplier.GroupCreateOrRestoreMember(s.TmpContext, groupID, userID) + }) +} + +func (s *LayeredGroupStore) DeleteMember(groupID string, userID string) StoreChannel { + return s.RunQuery(func(supplier LayeredStoreSupplier) *LayeredStoreSupplierResult { + return supplier.GroupDeleteMember(s.TmpContext, groupID, userID) + }) +} + +func (s *LayeredGroupStore) CreateGroupSyncable(groupSyncable *model.GroupSyncable) StoreChannel { + return s.RunQuery(func(supplier LayeredStoreSupplier) *LayeredStoreSupplierResult { + return supplier.GroupCreateGroupSyncable(s.TmpContext, groupSyncable) + }) +} + +func (s *LayeredGroupStore) GetGroupSyncable(groupID string, syncableID string, syncableType model.GroupSyncableType) StoreChannel { + return s.RunQuery(func(supplier LayeredStoreSupplier) *LayeredStoreSupplierResult { + return supplier.GroupGetGroupSyncable(s.TmpContext, groupID, syncableID, syncableType) + }) +} + +func (s *LayeredGroupStore) GetAllGroupSyncablesByGroupId(groupID string, syncableType model.GroupSyncableType) StoreChannel { + return s.RunQuery(func(supplier LayeredStoreSupplier) *LayeredStoreSupplierResult { + return supplier.GroupGetAllGroupSyncablesByGroup(s.TmpContext, groupID, syncableType) + }) +} + +func (s *LayeredGroupStore) UpdateGroupSyncable(groupSyncable *model.GroupSyncable) StoreChannel { + return s.RunQuery(func(supplier LayeredStoreSupplier) *LayeredStoreSupplierResult { + return supplier.GroupUpdateGroupSyncable(s.TmpContext, groupSyncable) + }) +} + +func (s *LayeredGroupStore) DeleteGroupSyncable(groupID string, syncableID string, syncableType model.GroupSyncableType) StoreChannel { + return s.RunQuery(func(supplier LayeredStoreSupplier) *LayeredStoreSupplierResult { + return supplier.GroupDeleteGroupSyncable(s.TmpContext, groupID, syncableID, syncableType) + }) +} + +func (s *LayeredGroupStore) PendingAutoAddTeamMembers(minGroupMembersCreateAt int64) StoreChannel { + return s.RunQuery(func(supplier LayeredStoreSupplier) *LayeredStoreSupplierResult { + return supplier.PendingAutoAddTeamMembers(s.TmpContext, minGroupMembersCreateAt) + }) +} + +func (s *LayeredGroupStore) PendingAutoAddChannelMembers(minGroupMembersCreateAt int64) StoreChannel { + return s.RunQuery(func(supplier LayeredStoreSupplier) *LayeredStoreSupplierResult { + return supplier.PendingAutoAddChannelMembers(s.TmpContext, minGroupMembersCreateAt) + }) +} diff --git a/store/layered_store_supplier.go b/store/layered_store_supplier.go index 45ec00068f..72ff0bea38 100644 --- a/store/layered_store_supplier.go +++ b/store/layered_store_supplier.go @@ -46,4 +46,27 @@ type LayeredStoreSupplier interface { SchemeDelete(ctx context.Context, schemeId string, hints ...LayeredStoreHint) *LayeredStoreSupplierResult SchemeGetAllPage(ctx context.Context, scope string, offset int, limit int, hints ...LayeredStoreHint) *LayeredStoreSupplierResult SchemePermanentDeleteAll(ctx context.Context, hints ...LayeredStoreHint) *LayeredStoreSupplierResult + + // Groups + GroupCreate(ctx context.Context, group *model.Group, hints ...LayeredStoreHint) *LayeredStoreSupplierResult + GroupGet(ctx context.Context, groupID string, hints ...LayeredStoreHint) *LayeredStoreSupplierResult + GroupGetByRemoteID(ctx context.Context, remoteID string, groupSource model.GroupSource, hints ...LayeredStoreHint) *LayeredStoreSupplierResult + GroupGetAllBySource(ctx context.Context, groupSource model.GroupSource, hints ...LayeredStoreHint) *LayeredStoreSupplierResult + GroupUpdate(ctx context.Context, group *model.Group, hints ...LayeredStoreHint) *LayeredStoreSupplierResult + GroupDelete(ctx context.Context, groupID string, hints ...LayeredStoreHint) *LayeredStoreSupplierResult + + GroupGetMemberUsers(ctx context.Context, groupID string, hints ...LayeredStoreHint) *LayeredStoreSupplierResult + GroupGetMemberUsersPage(ctx context.Context, groupID string, offset int, limit int, hints ...LayeredStoreHint) *LayeredStoreSupplierResult + GroupGetMemberCount(ctx context.Context, groupID string, hints ...LayeredStoreHint) *LayeredStoreSupplierResult + GroupCreateOrRestoreMember(ctx context.Context, groupID string, userID string, hints ...LayeredStoreHint) *LayeredStoreSupplierResult + GroupDeleteMember(ctx context.Context, groupID string, userID string, hints ...LayeredStoreHint) *LayeredStoreSupplierResult + + GroupCreateGroupSyncable(ctx context.Context, groupSyncable *model.GroupSyncable, hints ...LayeredStoreHint) *LayeredStoreSupplierResult + GroupGetGroupSyncable(ctx context.Context, groupID string, syncableID string, syncableType model.GroupSyncableType, hints ...LayeredStoreHint) *LayeredStoreSupplierResult + GroupGetAllGroupSyncablesByGroup(ctx context.Context, groupID string, syncableType model.GroupSyncableType, hints ...LayeredStoreHint) *LayeredStoreSupplierResult + GroupUpdateGroupSyncable(ctx context.Context, groupSyncable *model.GroupSyncable, hints ...LayeredStoreHint) *LayeredStoreSupplierResult + GroupDeleteGroupSyncable(ctx context.Context, groupID string, syncableID string, syncableType model.GroupSyncableType, hints ...LayeredStoreHint) *LayeredStoreSupplierResult + + PendingAutoAddTeamMembers(ctx context.Context, minGroupMembersCreateAt int64, hints ...LayeredStoreHint) *LayeredStoreSupplierResult + PendingAutoAddChannelMembers(ctx context.Context, minGroupMembersCreateAt int64, hints ...LayeredStoreHint) *LayeredStoreSupplierResult } diff --git a/store/local_cache_supplier.go b/store/local_cache_supplier.go index 3f87e7c5f9..3d24a86fda 100644 --- a/store/local_cache_supplier.go +++ b/store/local_cache_supplier.go @@ -21,6 +21,9 @@ const ( SCHEME_CACHE_SIZE = 20000 SCHEME_CACHE_SEC = 30 * 60 + GROUP_CACHE_SIZE = 20000 + GROUP_CACHE_SEC = 30 * 60 + CLEAR_CACHE_MESSAGE_DATA = "" ) @@ -31,6 +34,7 @@ type LocalCacheSupplier struct { schemeCache *utils.Cache metrics einterfaces.MetricsInterface cluster einterfaces.ClusterInterface + groupCache *utils.Cache } // Caching Interface @@ -50,6 +54,7 @@ func NewLocalCacheSupplier(metrics einterfaces.MetricsInterface, cluster einterf reactionCache: utils.NewLruWithParams(REACTION_CACHE_SIZE, "Reaction", REACTION_CACHE_SEC, model.CLUSTER_EVENT_INVALIDATE_CACHE_FOR_REACTIONS), roleCache: utils.NewLruWithParams(ROLE_CACHE_SIZE, "Role", ROLE_CACHE_SEC, model.CLUSTER_EVENT_INVALIDATE_CACHE_FOR_ROLES), schemeCache: utils.NewLruWithParams(SCHEME_CACHE_SIZE, "Scheme", SCHEME_CACHE_SEC, model.CLUSTER_EVENT_INVALIDATE_CACHE_FOR_SCHEMES), + groupCache: utils.NewLruWithParams(GROUP_CACHE_SIZE, "Group", GROUP_CACHE_SEC, model.CLUSTER_EVENT_INVALIDATE_CACHE_FOR_GROUPS), metrics: metrics, cluster: cluster, } @@ -57,6 +62,7 @@ func NewLocalCacheSupplier(metrics einterfaces.MetricsInterface, cluster einterf if cluster != nil { cluster.RegisterClusterMessageHandler(model.CLUSTER_EVENT_INVALIDATE_CACHE_FOR_REACTIONS, supplier.handleClusterInvalidateReaction) cluster.RegisterClusterMessageHandler(model.CLUSTER_EVENT_INVALIDATE_CACHE_FOR_ROLES, supplier.handleClusterInvalidateRole) + cluster.RegisterClusterMessageHandler(model.CLUSTER_EVENT_INVALIDATE_CACHE_FOR_GROUPS, supplier.handleClusterInvalidateGroup) } return supplier diff --git a/store/local_cache_supplier_groups.go b/store/local_cache_supplier_groups.go new file mode 100644 index 0000000000..4bedbb7cca --- /dev/null +++ b/store/local_cache_supplier_groups.go @@ -0,0 +1,102 @@ +// Copyright (c) 2018-present Mattermost, Inc. All Rights Reserved. +// See License.txt for license information. + +package store + +import ( + "context" + + "github.com/mattermost/mattermost-server/model" +) + +func (s *LocalCacheSupplier) handleClusterInvalidateGroup(msg *model.ClusterMessage) { + if msg.Data == CLEAR_CACHE_MESSAGE_DATA { + s.groupCache.Purge() + } else { + s.groupCache.Remove(msg.Data) + } +} + +func (s *LocalCacheSupplier) GroupCreate(ctx context.Context, group *model.Group, hints ...LayeredStoreHint) *LayeredStoreSupplierResult { + return s.Next().GroupCreate(ctx, group, hints...) +} + +func (s *LocalCacheSupplier) GroupGet(ctx context.Context, groupID string, hints ...LayeredStoreHint) *LayeredStoreSupplierResult { + if result := s.doStandardReadCache(ctx, s.groupCache, groupID, hints...); result != nil { + return result + } + + result := s.Next().GroupGet(ctx, groupID, hints...) + + s.doStandardAddToCache(ctx, s.groupCache, groupID, result, hints...) + + return result +} + +func (s *LocalCacheSupplier) GroupGetByRemoteID(ctx context.Context, remoteID string, groupSource model.GroupSource, hints ...LayeredStoreHint) *LayeredStoreSupplierResult { + return s.Next().GroupGetByRemoteID(ctx, remoteID, groupSource, hints...) +} + +func (s *LocalCacheSupplier) GroupGetAllBySource(ctx context.Context, groupSource model.GroupSource, hints ...LayeredStoreHint) *LayeredStoreSupplierResult { + return s.Next().GroupGetAllBySource(ctx, groupSource, hints...) +} + +func (s *LocalCacheSupplier) GroupUpdate(ctx context.Context, group *model.Group, hints ...LayeredStoreHint) *LayeredStoreSupplierResult { + defer s.doInvalidateCacheCluster(s.groupCache, group.Id) + return s.Next().GroupUpdate(ctx, group, hints...) +} + +func (s *LocalCacheSupplier) GroupDelete(ctx context.Context, groupID string, hints ...LayeredStoreHint) *LayeredStoreSupplierResult { + defer s.doInvalidateCacheCluster(s.groupCache, groupID) + defer s.doClearCacheCluster(s.groupCache) + + return s.Next().GroupDelete(ctx, groupID, hints...) +} + +func (s *LocalCacheSupplier) GroupGetMemberUsers(ctx context.Context, groupID string, hints ...LayeredStoreHint) *LayeredStoreSupplierResult { + return s.Next().GroupGetMemberUsers(ctx, groupID, hints...) +} + +func (s *LocalCacheSupplier) GroupGetMemberUsersPage(ctx context.Context, groupID string, offset int, limit int, hints ...LayeredStoreHint) *LayeredStoreSupplierResult { + return s.Next().GroupGetMemberUsersPage(ctx, groupID, offset, limit, hints...) +} + +func (s *LocalCacheSupplier) GroupGetMemberCount(ctx context.Context, groupID string, hints ...LayeredStoreHint) *LayeredStoreSupplierResult { + return s.Next().GroupGetMemberCount(ctx, groupID, hints...) +} + +func (s *LocalCacheSupplier) GroupCreateOrRestoreMember(ctx context.Context, groupID string, userID string, hints ...LayeredStoreHint) *LayeredStoreSupplierResult { + return s.Next().GroupCreateOrRestoreMember(ctx, groupID, userID, hints...) +} + +func (s *LocalCacheSupplier) GroupDeleteMember(ctx context.Context, groupID string, userID string, hints ...LayeredStoreHint) *LayeredStoreSupplierResult { + return s.Next().GroupDeleteMember(ctx, groupID, userID, hints...) +} + +func (s *LocalCacheSupplier) GroupCreateGroupSyncable(ctx context.Context, groupSyncable *model.GroupSyncable, hints ...LayeredStoreHint) *LayeredStoreSupplierResult { + return s.Next().GroupCreateGroupSyncable(ctx, groupSyncable, hints...) +} + +func (s *LocalCacheSupplier) GroupGetGroupSyncable(ctx context.Context, groupID string, syncableID string, syncableType model.GroupSyncableType, hints ...LayeredStoreHint) *LayeredStoreSupplierResult { + return s.Next().GroupGetGroupSyncable(ctx, groupID, syncableID, syncableType, hints...) +} + +func (s *LocalCacheSupplier) GroupGetAllGroupSyncablesByGroup(ctx context.Context, groupID string, syncableType model.GroupSyncableType, hints ...LayeredStoreHint) *LayeredStoreSupplierResult { + return s.Next().GroupGetAllGroupSyncablesByGroup(ctx, groupID, syncableType, hints...) +} + +func (s *LocalCacheSupplier) GroupUpdateGroupSyncable(ctx context.Context, groupSyncable *model.GroupSyncable, hints ...LayeredStoreHint) *LayeredStoreSupplierResult { + return s.Next().GroupUpdateGroupSyncable(ctx, groupSyncable, hints...) +} + +func (s *LocalCacheSupplier) GroupDeleteGroupSyncable(ctx context.Context, groupID string, syncableID string, syncableType model.GroupSyncableType, hints ...LayeredStoreHint) *LayeredStoreSupplierResult { + return s.Next().GroupDeleteGroupSyncable(ctx, groupID, syncableID, syncableType, hints...) +} + +func (s *LocalCacheSupplier) PendingAutoAddTeamMembers(ctx context.Context, minGroupMembersCreateAt int64, hints ...LayeredStoreHint) *LayeredStoreSupplierResult { + return s.Next().PendingAutoAddTeamMembers(ctx, minGroupMembersCreateAt, hints...) +} + +func (s *LocalCacheSupplier) PendingAutoAddChannelMembers(ctx context.Context, minGroupMembersCreateAt int64, hints ...LayeredStoreHint) *LayeredStoreSupplierResult { + return s.Next().PendingAutoAddChannelMembers(ctx, minGroupMembersCreateAt, hints...) +} diff --git a/store/local_cache_supplier_roles.go b/store/local_cache_supplier_roles.go index 41f88a216e..6d8c61a5df 100644 --- a/store/local_cache_supplier_roles.go +++ b/store/local_cache_supplier_roles.go @@ -18,7 +18,7 @@ func (s *LocalCacheSupplier) handleClusterInvalidateRole(msg *model.ClusterMessa } func (s *LocalCacheSupplier) RoleSave(ctx context.Context, role *model.Role, hints ...LayeredStoreHint) *LayeredStoreSupplierResult { - if len(role.Id) != 0 { + if len(role.Name) != 0 { defer s.doInvalidateCacheCluster(s.roleCache, role.Name) } return s.Next().RoleSave(ctx, role, hints...) diff --git a/store/redis_supplier_groups.go b/store/redis_supplier_groups.go new file mode 100644 index 0000000000..ee8bcb04ae --- /dev/null +++ b/store/redis_supplier_groups.go @@ -0,0 +1,100 @@ +// Copyright (c) 2018-present Mattermost, Inc. All Rights Reserved. +// See License.txt for license information. + +package store + +import ( + "context" + + "github.com/mattermost/mattermost-server/model" +) + +func (s *RedisSupplier) GroupCreate(ctx context.Context, group *model.Group, hints ...LayeredStoreHint) *LayeredStoreSupplierResult { + // TODO: Redis caching. + return s.Next().GroupCreate(ctx, group, hints...) +} + +func (s *RedisSupplier) GroupGet(ctx context.Context, groupID string, hints ...LayeredStoreHint) *LayeredStoreSupplierResult { + // TODO: Redis caching. + return s.Next().GroupGet(ctx, groupID, hints...) +} + +func (s *RedisSupplier) GroupGetByRemoteID(ctx context.Context, remoteID string, groupSource model.GroupSource, hints ...LayeredStoreHint) *LayeredStoreSupplierResult { + // TODO: Redis caching. + return s.Next().GroupGetByRemoteID(ctx, remoteID, groupSource, hints...) +} + +func (s *RedisSupplier) GroupGetAllBySource(ctx context.Context, groupSource model.GroupSource, hints ...LayeredStoreHint) *LayeredStoreSupplierResult { + // TODO: Redis caching. + return s.Next().GroupGetAllBySource(ctx, groupSource, hints...) +} + +func (s *RedisSupplier) GroupUpdate(ctx context.Context, group *model.Group, hints ...LayeredStoreHint) *LayeredStoreSupplierResult { + // TODO: Redis caching. + return s.Next().GroupUpdate(ctx, group, hints...) +} + +func (s *RedisSupplier) GroupDelete(ctx context.Context, groupID string, hints ...LayeredStoreHint) *LayeredStoreSupplierResult { + // TODO: Redis caching. + return s.Next().GroupDelete(ctx, groupID, hints...) +} + +func (s *RedisSupplier) GroupGetMemberUsers(ctx context.Context, groupID string, hints ...LayeredStoreHint) *LayeredStoreSupplierResult { + // TODO: Redis caching. + return s.Next().GroupGetMemberUsers(ctx, groupID, hints...) +} + +func (s *RedisSupplier) GroupGetMemberUsersPage(ctx context.Context, groupID string, offset int, limit int, hints ...LayeredStoreHint) *LayeredStoreSupplierResult { + // TODO: Redis caching. + return s.Next().GroupGetMemberUsersPage(ctx, groupID, offset, limit, hints...) +} + +func (s *RedisSupplier) GroupGetMemberCount(ctx context.Context, groupID string, hints ...LayeredStoreHint) *LayeredStoreSupplierResult { + // TODO: Redis caching. + return s.Next().GroupGetMemberCount(ctx, groupID, hints...) +} + +func (s *RedisSupplier) GroupCreateOrRestoreMember(ctx context.Context, groupID string, userID string, hints ...LayeredStoreHint) *LayeredStoreSupplierResult { + // TODO: Redis caching. + return s.Next().GroupCreateOrRestoreMember(ctx, groupID, userID, hints...) +} + +func (s *RedisSupplier) GroupDeleteMember(ctx context.Context, groupID string, userID string, hints ...LayeredStoreHint) *LayeredStoreSupplierResult { + // TODO: Redis caching. + return s.Next().GroupDeleteMember(ctx, groupID, userID, hints...) +} + +func (s *RedisSupplier) GroupCreateGroupSyncable(ctx context.Context, groupSyncable *model.GroupSyncable, hints ...LayeredStoreHint) *LayeredStoreSupplierResult { + // TODO: Redis caching. + return s.Next().GroupCreateGroupSyncable(ctx, groupSyncable, hints...) +} + +func (s *RedisSupplier) GroupGetGroupSyncable(ctx context.Context, groupID string, syncableID string, syncableType model.GroupSyncableType, hints ...LayeredStoreHint) *LayeredStoreSupplierResult { + // TODO: Redis caching. + return s.Next().GroupGetGroupSyncable(ctx, groupID, syncableID, syncableType, hints...) +} + +func (s *RedisSupplier) GroupGetAllGroupSyncablesByGroup(ctx context.Context, groupID string, syncableType model.GroupSyncableType, hints ...LayeredStoreHint) *LayeredStoreSupplierResult { + // TODO: Redis caching. + return s.Next().GroupGetAllGroupSyncablesByGroup(ctx, groupID, syncableType, hints...) +} + +func (s *RedisSupplier) GroupUpdateGroupSyncable(ctx context.Context, groupSyncable *model.GroupSyncable, hints ...LayeredStoreHint) *LayeredStoreSupplierResult { + // TODO: Redis caching. + return s.Next().GroupUpdateGroupSyncable(ctx, groupSyncable, hints...) +} + +func (s *RedisSupplier) GroupDeleteGroupSyncable(ctx context.Context, groupID string, syncableID string, syncableType model.GroupSyncableType, hints ...LayeredStoreHint) *LayeredStoreSupplierResult { + // TODO: Redis caching. + return s.Next().GroupDeleteGroupSyncable(ctx, groupID, syncableID, syncableType, hints...) +} + +func (s *RedisSupplier) PendingAutoAddTeamMembers(ctx context.Context, minGroupMembersCreateAt int64, hints ...LayeredStoreHint) *LayeredStoreSupplierResult { + // TODO: Redis caching. + return s.Next().PendingAutoAddTeamMembers(ctx, minGroupMembersCreateAt, hints...) +} + +func (s *RedisSupplier) PendingAutoAddChannelMembers(ctx context.Context, minGroupMembersCreateAt int64, hints ...LayeredStoreHint) *LayeredStoreSupplierResult { + // TODO: Redis caching. + return s.Next().PendingAutoAddChannelMembers(ctx, minGroupMembersCreateAt, hints...) +} diff --git a/store/sqlstore/channel_store.go b/store/sqlstore/channel_store.go index f934169f32..e82f0bd277 100644 --- a/store/sqlstore/channel_store.go +++ b/store/sqlstore/channel_store.go @@ -955,6 +955,27 @@ func (s SqlChannelStore) GetChannels(teamId string, userId string, includeDelete }) } +func (s SqlChannelStore) GetAllChannels(offset int, limit int, includeDeleted bool) store.StoreChannel { + return store.Do(func(result *store.StoreResult) { + deleteFilter := "AND c.DeleteAt = 0" + if includeDeleted { + deleteFilter = "" + } + + query := "SELECT c.*, Teams.DisplayName AS TeamDisplayName, Teams.Name AS TeamName, Teams.UpdateAt as TeamUpdateAt FROM Channels AS c JOIN Teams ON Teams.Id = c.TeamId WHERE (c.Type = 'P' OR c.Type = 'O') " + deleteFilter + " ORDER BY c.DisplayName, Teams.DisplayName LIMIT :Limit OFFSET :Offset" + + data := &model.ChannelListWithTeamData{} + _, err := s.GetReplica().Select(data, query, map[string]interface{}{"Limit": limit, "Offset": offset}) + + if err != nil { + result.Err = model.NewAppError("SqlChannelStore.GetAllChannels", "store.sql_channel.get_all_channels.get.app_error", nil, err.Error(), http.StatusInternalServerError) + return + } + + result.Data = data + }) +} + func (s SqlChannelStore) GetMoreChannels(teamId string, userId string, offset int, limit int) store.StoreChannel { return store.Do(func(result *store.StoreResult) { data := &model.ChannelList{} @@ -2072,6 +2093,36 @@ func (s SqlChannelStore) SearchInTeam(teamId string, term string, includeDeleted }) } +func (s SqlChannelStore) SearchAllChannels(term string, includeDeleted bool) store.StoreChannel { + return store.Do(func(result *store.StoreResult) { + parameters := map[string]interface{}{} + deleteFilter := "AND c.DeleteAt = 0" + if includeDeleted { + deleteFilter = "" + } + searchQuery := `SELECT c.*, t.DisplayName AS TeamDisplayName, t.Name AS TeamName, t.UpdateAt as TeamUpdateAt FROM Channels AS c JOIN Teams AS t ON t.Id = c.TeamId WHERE (c.Type = 'P' OR c.Type = 'O') ` + deleteFilter + ` SEARCH_CLAUSE ORDER BY c.DisplayName, t.DisplayName LIMIT 100` + + likeClause, likeTerm := s.buildLIKEClause(term, "c.Name, c.DisplayName, c.Purpose") + if likeTerm == "" { + // If the likeTerm is empty after preparing, then don't bother searching. + searchQuery = strings.Replace(searchQuery, "SEARCH_CLAUSE", "", 1) + } else { + parameters["LikeTerm"] = likeTerm + fulltextClause, fulltextTerm := s.buildFulltextClause(term, "c.Name, c.DisplayName, c.Purpose") + parameters["FulltextTerm"] = fulltextTerm + searchQuery = strings.Replace(searchQuery, "SEARCH_CLAUSE", "AND ("+likeClause+" OR "+fulltextClause+")", 1) + } + + var channels model.ChannelListWithTeamData + + if _, err := s.GetReplica().Select(&channels, searchQuery, parameters); err != nil { + result.Err = model.NewAppError("SqlChannelStore.Search", "store.sql_channel.search.app_error", nil, "term="+term+", "+", "+err.Error(), http.StatusInternalServerError) + } + + result.Data = &channels + }) +} + func (s SqlChannelStore) SearchMore(userId string, teamId string, term string) store.StoreChannel { return store.Do(func(result *store.StoreResult) { *result = s.performSearch(` diff --git a/store/sqlstore/group_store_test.go b/store/sqlstore/group_store_test.go new file mode 100644 index 0000000000..117f4c5bad --- /dev/null +++ b/store/sqlstore/group_store_test.go @@ -0,0 +1,14 @@ +// Copyright (c) 2018-present Mattermost, Inc. All Rights Reserved. +// See License.txt for license information. + +package sqlstore + +import ( + "testing" + + "github.com/mattermost/mattermost-server/store/storetest" +) + +func TestGroupStore(t *testing.T) { + StoreTest(t, storetest.TestGroupStore) +} diff --git a/store/sqlstore/group_supplier.go b/store/sqlstore/group_supplier.go new file mode 100644 index 0000000000..db85187a60 --- /dev/null +++ b/store/sqlstore/group_supplier.go @@ -0,0 +1,771 @@ +// Copyright (c) 2018-present Mattermost, Inc. All Rights Reserved. +// See License.txt for license information. + +package sqlstore + +import ( + "context" + "database/sql" + "fmt" + "net/http" + + "github.com/mattermost/mattermost-server/model" + "github.com/mattermost/mattermost-server/store" +) + +type groupTeam struct { + model.GroupSyncable + TeamId string `db:"TeamId"` +} + +type groupChannel struct { + model.GroupSyncable + ChannelId string `db:"ChannelId"` +} + +type groupTeamJoin struct { + groupTeam + TeamDisplayName string `db:"TeamDisplayName"` + TeamType string `db:"TeamType"` +} + +type groupChannelJoin struct { + groupChannel + ChannelDisplayName string `db:"ChannelDisplayName"` + TeamDisplayName string `db:"TeamDisplayName"` + TeamType string `db:"TeamType"` + ChannelType string `db:"ChannelType"` + TeamID string `db:"TeamId"` +} + +func initSqlSupplierGroups(sqlStore SqlStore) { + for _, db := range sqlStore.GetAllConns() { + groups := db.AddTableWithName(model.Group{}, "UserGroups").SetKeys(false, "Id") + groups.ColMap("Id").SetMaxSize(26) + groups.ColMap("Name").SetMaxSize(model.GroupNameMaxLength).SetUnique(true) + groups.ColMap("DisplayName").SetMaxSize(model.GroupDisplayNameMaxLength) + groups.ColMap("Description").SetMaxSize(model.GroupDescriptionMaxLength) + groups.ColMap("Source").SetMaxSize(model.GroupSourceMaxLength) + groups.ColMap("RemoteId").SetMaxSize(model.GroupRemoteIDMaxLength) + groups.SetUniqueTogether("Source", "RemoteId") + + groupMembers := db.AddTableWithName(model.GroupMember{}, "GroupMembers").SetKeys(false, "GroupId", "UserId") + groupMembers.ColMap("GroupId").SetMaxSize(26) + groupMembers.ColMap("UserId").SetMaxSize(26) + + groupTeams := db.AddTableWithName(groupTeam{}, "GroupTeams").SetKeys(false, "GroupId", "TeamId") + groupTeams.ColMap("GroupId").SetMaxSize(26) + groupTeams.ColMap("TeamId").SetMaxSize(26) + + groupChannels := db.AddTableWithName(groupChannel{}, "GroupChannels").SetKeys(false, "GroupId", "ChannelId") + groupChannels.ColMap("GroupId").SetMaxSize(26) + groupChannels.ColMap("ChannelId").SetMaxSize(26) + } +} + +func (s *SqlSupplier) CreateIndexesIfNotExistsGroups() { + s.CreateIndexIfNotExists("idx_groupmembers_create_at", "GroupMembers", "CreateAt") + s.CreateIndexIfNotExists("idx_usergroups_remote_id", "UserGroups", "RemoteId") + s.CreateIndexIfNotExists("idx_usergroups_delete_at", "UserGroups", "DeleteAt") +} + +func (s *SqlSupplier) GroupCreate(ctx context.Context, group *model.Group, hints ...store.LayeredStoreHint) *store.LayeredStoreSupplierResult { + result := store.NewSupplierResult() + + if len(group.Id) != 0 { + result.Err = model.NewAppError("SqlGroupStore.GroupCreate", "model.group.id.app_error", nil, "", http.StatusBadRequest) + return result + } + + if err := group.IsValidForCreate(); err != nil { + result.Err = err + return result + } + + group.Id = model.NewId() + group.CreateAt = model.GetMillis() + group.UpdateAt = group.CreateAt + + if err := s.GetMaster().Insert(group); err != nil { + if IsUniqueConstraintError(err, []string{"Name", "groups_name_key"}) { + result.Err = model.NewAppError("SqlGroupStore.GroupCreate", "store.sql_group.unique_constraint", nil, err.Error(), http.StatusInternalServerError) + } else { + result.Err = model.NewAppError("SqlGroupStore.GroupCreate", "store.insert_error", nil, err.Error(), http.StatusInternalServerError) + } + return result + } + + result.Data = group + return result +} + +func (s *SqlSupplier) GroupGet(ctx context.Context, groupId string, hints ...store.LayeredStoreHint) *store.LayeredStoreSupplierResult { + result := store.NewSupplierResult() + + var group *model.Group + if err := s.GetReplica().SelectOne(&group, "SELECT * from UserGroups WHERE Id = :Id", map[string]interface{}{"Id": groupId}); err != nil { + if err == sql.ErrNoRows { + result.Err = model.NewAppError("SqlGroupStore.GroupGet", "store.sql_group.no_rows", nil, err.Error(), http.StatusNotFound) + } else { + result.Err = model.NewAppError("SqlGroupStore.GroupGet", "store.select_error", nil, err.Error(), http.StatusInternalServerError) + } + return result + } + + result.Data = group + return result +} + +func (s *SqlSupplier) GroupGetByRemoteID(ctx context.Context, remoteID string, groupSource model.GroupSource, hints ...store.LayeredStoreHint) *store.LayeredStoreSupplierResult { + result := store.NewSupplierResult() + + var group *model.Group + if err := s.GetReplica().SelectOne(&group, "SELECT * from UserGroups WHERE RemoteId = :RemoteId AND Source = :Source", map[string]interface{}{"RemoteId": remoteID, "Source": groupSource}); err != nil { + if err == sql.ErrNoRows { + result.Err = model.NewAppError("SqlGroupStore.GroupGetByRemoteID", "store.sql_group.no_rows", nil, err.Error(), http.StatusNotFound) + } else { + result.Err = model.NewAppError("SqlGroupStore.GroupGetByRemoteID", "store.select_error", nil, err.Error(), http.StatusInternalServerError) + } + return result + } + + result.Data = group + return result +} + +func (s *SqlSupplier) GroupGetAllBySource(ctx context.Context, groupSource model.GroupSource, hints ...store.LayeredStoreHint) *store.LayeredStoreSupplierResult { + result := store.NewSupplierResult() + + var groups []*model.Group + + if _, err := s.GetReplica().Select(&groups, "SELECT * from UserGroups WHERE DeleteAt = 0 AND Source = :Source", map[string]interface{}{"Source": groupSource}); err != nil { + result.Err = model.NewAppError("SqlGroupStore.GroupGetAllBySource", "store.select_error", nil, err.Error(), http.StatusInternalServerError) + return result + } + + result.Data = groups + + return result +} + +func (s *SqlSupplier) GroupUpdate(ctx context.Context, group *model.Group, hints ...store.LayeredStoreHint) *store.LayeredStoreSupplierResult { + result := store.NewSupplierResult() + + var retrievedGroup *model.Group + if err := s.GetMaster().SelectOne(&retrievedGroup, "SELECT * FROM UserGroups WHERE Id = :Id", map[string]interface{}{"Id": group.Id}); err != nil { + if err == sql.ErrNoRows { + result.Err = model.NewAppError("SqlGroupStore.GroupUpdate", "store.sql_group.no_rows", nil, "id="+group.Id+","+err.Error(), http.StatusNotFound) + } else { + result.Err = model.NewAppError("SqlGroupStore.GroupUpdate", "store.select_error", nil, "id="+group.Id+","+err.Error(), http.StatusInternalServerError) + } + return result + } + + // If updating DeleteAt it can only be to 0 + if group.DeleteAt != retrievedGroup.DeleteAt && group.DeleteAt != 0 { + result.Err = model.NewAppError("SqlGroupStore.GroupUpdate", "model.group.delete_at.app_error", nil, "", http.StatusInternalServerError) + return result + } + + // Reset these properties, don't update them based on input + group.CreateAt = retrievedGroup.CreateAt + group.UpdateAt = model.GetMillis() + + if err := group.IsValidForUpdate(); err != nil { + result.Err = err + return result + } + + rowsChanged, err := s.GetMaster().Update(group) + if err != nil { + result.Err = model.NewAppError("SqlGroupStore.GroupUpdate", "store.update_error", nil, err.Error(), http.StatusInternalServerError) + return result + } + if rowsChanged != 1 { + result.Err = model.NewAppError("SqlGroupStore.GroupUpdate", "store.sql_group.no_rows_changed", nil, "", http.StatusInternalServerError) + return result + } + + result.Data = group + return result +} + +func (s *SqlSupplier) GroupDelete(ctx context.Context, groupID string, hints ...store.LayeredStoreHint) *store.LayeredStoreSupplierResult { + result := store.NewSupplierResult() + + var group *model.Group + if err := s.GetReplica().SelectOne(&group, "SELECT * from UserGroups WHERE Id = :Id AND DeleteAt = 0", map[string]interface{}{"Id": groupID}); err != nil { + if err == sql.ErrNoRows { + result.Err = model.NewAppError("SqlGroupStore.GroupDelete", "store.sql_group.no_rows", nil, "Id="+groupID+", "+err.Error(), http.StatusNotFound) + } else { + result.Err = model.NewAppError("SqlGroupStore.GroupDelete", "store.select_error", nil, err.Error(), http.StatusInternalServerError) + } + + return result + } + + time := model.GetMillis() + group.DeleteAt = time + group.UpdateAt = time + + if _, err := s.GetMaster().Update(group); err != nil { + result.Err = model.NewAppError("SqlGroupStore.GroupDelete", "store.update_error", nil, err.Error(), http.StatusInternalServerError) + } + + result.Data = group + return result +} + +func (s *SqlSupplier) GroupGetMemberUsers(stc context.Context, groupID string, hints ...store.LayeredStoreHint) *store.LayeredStoreSupplierResult { + result := store.NewSupplierResult() + + var groupMembers []*model.User + + query := ` + SELECT + Users.* + FROM + GroupMembers + JOIN Users ON Users.Id = GroupMembers.UserId + WHERE + GroupMembers.DeleteAt = 0 + AND Users.DeleteAt = 0 + AND GroupId = :GroupId` + + if _, err := s.GetReplica().Select(&groupMembers, query, map[string]interface{}{"GroupId": groupID}); err != nil { + result.Err = model.NewAppError("SqlGroupStore.GroupGetAllBySource", "store.select_error", nil, err.Error(), http.StatusInternalServerError) + return result + } + + result.Data = groupMembers + + return result +} + +func (s *SqlSupplier) GroupGetMemberUsersPage(stc context.Context, groupID string, offset int, limit int, hints ...store.LayeredStoreHint) *store.LayeredStoreSupplierResult { + result := store.NewSupplierResult() + + var groupMembers []*model.User + + query := ` + SELECT + Users.* + FROM + GroupMembers + JOIN Users ON Users.Id = GroupMembers.UserId + WHERE + GroupMembers.DeleteAt = 0 + AND Users.DeleteAt = 0 + AND GroupId = :GroupId + ORDER BY + GroupMembers.CreateAt DESC + LIMIT + :Limit + OFFSET + :Offset` + + if _, err := s.GetReplica().Select(&groupMembers, query, map[string]interface{}{"GroupId": groupID, "Limit": limit, "Offset": offset}); err != nil { + result.Err = model.NewAppError("SqlGroupStore.GroupGetMemberUsersPage", "store.select_error", nil, err.Error(), http.StatusInternalServerError) + return result + } + + result.Data = groupMembers + + return result +} + +func (s *SqlSupplier) GroupGetMemberCount(stc context.Context, groupID string, hints ...store.LayeredStoreHint) *store.LayeredStoreSupplierResult { + result := store.NewSupplierResult() + + var count int64 + var err error + + query := ` + SELECT + count(*) + FROM + GroupMembers + WHERE + GroupMembers.GroupId = :GroupId` + + if count, err = s.GetReplica().SelectInt(query, map[string]interface{}{"GroupId": groupID}); err != nil { + result.Err = model.NewAppError("SqlGroupStore.GroupGetMemberUsersPage", "store.select_error", nil, err.Error(), http.StatusInternalServerError) + return result + } + + result.Data = count + + return result +} + +func (s *SqlSupplier) GroupCreateOrRestoreMember(ctx context.Context, groupID string, userID string, hints ...store.LayeredStoreHint) *store.LayeredStoreSupplierResult { + result := store.NewSupplierResult() + + member := &model.GroupMember{ + GroupId: groupID, + UserId: userID, + CreateAt: model.GetMillis(), + } + + if result.Err = member.IsValid(); result.Err != nil { + return result + } + + var retrievedGroup *model.Group + if err := s.GetMaster().SelectOne(&retrievedGroup, "SELECT * FROM UserGroups WHERE Id = :Id", map[string]interface{}{"Id": groupID}); err != nil { + result.Err = model.NewAppError("SqlGroupStore.GroupCreateOrRestoreMember", "store.insert_error", nil, "group_id="+member.GroupId+"user_id="+member.UserId+","+err.Error(), http.StatusInternalServerError) + return result + } + + var retrievedMember *model.GroupMember + if err := s.GetMaster().SelectOne(&retrievedMember, "SELECT * FROM GroupMembers WHERE GroupId = :GroupId AND UserId = :UserId", map[string]interface{}{"GroupId": member.GroupId, "UserId": member.UserId}); err != nil { + if err != sql.ErrNoRows { + result.Err = model.NewAppError("SqlGroupStore.GroupCreateOrRestoreMember", "store.select_error", nil, "group_id="+member.GroupId+"user_id="+member.UserId+","+err.Error(), http.StatusInternalServerError) + return result + } + } + + if retrievedMember != nil && retrievedMember.DeleteAt == 0 { + result.Err = model.NewAppError("SqlGroupStore.GroupCreateOrRestoreMember", "store.sql_group.uniqueness_error", nil, "group_id="+member.GroupId+", user_id="+member.UserId, http.StatusBadRequest) + return result + } + + if retrievedMember == nil { + if err := s.GetMaster().Insert(member); err != nil { + if IsUniqueConstraintError(err, []string{"GroupId", "UserId", "groupmembers_pkey", "PRIMARY"}) { + result.Err = model.NewAppError("SqlGroupStore.GroupCreateOrRestoreMember", "store.sql_group.uniqueness_error", nil, "group_id="+member.GroupId+", user_id="+member.UserId+", "+err.Error(), http.StatusBadRequest) + return result + } + result.Err = model.NewAppError("SqlGroupStore.GroupCreateOrRestoreMember", "store.insert_error", nil, "group_id="+member.GroupId+", user_id="+member.UserId+", "+err.Error(), http.StatusInternalServerError) + return result + } + } else { + member.DeleteAt = 0 + var rowsChanged int64 + var err error + if rowsChanged, err = s.GetMaster().Update(member); err != nil { + result.Err = model.NewAppError("SqlGroupStore.GroupCreateOrRestoreMember", "store.update_error", nil, "group_id="+member.GroupId+", user_id="+member.UserId+", "+err.Error(), http.StatusInternalServerError) + return result + } + if rowsChanged != 1 { + result.Err = model.NewAppError("SqlGroupStore.GroupCreateOrRestoreMember", "store.sql_group.no_rows_changed", nil, "", http.StatusInternalServerError) + return result + } + } + + result.Data = member + return result +} + +func (s *SqlSupplier) GroupDeleteMember(ctx context.Context, groupID string, userID string, hints ...store.LayeredStoreHint) *store.LayeredStoreSupplierResult { + result := store.NewSupplierResult() + + var retrievedMember *model.GroupMember + if err := s.GetMaster().SelectOne(&retrievedMember, "SELECT * FROM GroupMembers WHERE GroupId = :GroupId AND UserId = :UserId AND DeleteAt = 0", map[string]interface{}{"GroupId": groupID, "UserId": userID}); err != nil { + if err == sql.ErrNoRows { + result.Err = model.NewAppError("SqlGroupStore.GroupDeleteMember", "store.sql_group.no_rows", nil, "group_id="+groupID+"user_id="+userID+","+err.Error(), http.StatusNotFound) + return result + } + result.Err = model.NewAppError("SqlGroupStore.GroupDeleteMember", "store.select_error", nil, "group_id="+groupID+"user_id="+userID+","+err.Error(), http.StatusInternalServerError) + return result + } + + retrievedMember.DeleteAt = model.GetMillis() + + if _, err := s.GetMaster().Update(retrievedMember); err != nil { + result.Err = model.NewAppError("SqlGroupStore.GroupDeleteMember", "store.update_error", nil, err.Error(), http.StatusInternalServerError) + return result + } + + result.Data = retrievedMember + return result +} + +func (s *SqlSupplier) GroupCreateGroupSyncable(ctx context.Context, groupSyncable *model.GroupSyncable, hints ...store.LayeredStoreHint) *store.LayeredStoreSupplierResult { + result := store.NewSupplierResult() + + if err := groupSyncable.IsValid(); err != nil { + result.Err = err + return result + } + + // Reset values that shouldn't be updatable by parameter + groupSyncable.DeleteAt = 0 + groupSyncable.CreateAt = model.GetMillis() + groupSyncable.UpdateAt = groupSyncable.CreateAt + + var err error + + switch groupSyncable.Type { + case model.GroupSyncableTypeTeam: + teamResult := <-s.Team().Get(groupSyncable.SyncableId) + if teamResult.Err != nil { + result.Err = teamResult.Err + return result + } + + err = s.GetMaster().Insert(groupSyncableToGroupTeam(groupSyncable)) + case model.GroupSyncableTypeChannel: + channelResult := <-s.Channel().Get(groupSyncable.SyncableId, false) + if channelResult.Err != nil { + result.Err = channelResult.Err + return result + } + + err = s.GetMaster().Insert(groupSyncableToGroupChannel(groupSyncable)) + default: + result.Err = model.NewAppError("SqlGroupStore.GroupCreateGroupSyncable", "model.group_syncable.type.app_error", nil, "group_id="+groupSyncable.GroupId+", syncable_id="+groupSyncable.SyncableId+", "+err.Error(), http.StatusInternalServerError) + return result + } + + if err != nil { + result.Err = model.NewAppError("SqlGroupStore.GroupCreateGroupSyncable", "store.insert_error", nil, "group_id="+groupSyncable.GroupId+", syncable_id="+groupSyncable.SyncableId+", "+err.Error(), http.StatusInternalServerError) + return result + } + + result.Data = groupSyncable + return result +} + +func (s *SqlSupplier) GroupGetGroupSyncable(ctx context.Context, groupID string, syncableID string, syncableType model.GroupSyncableType, hints ...store.LayeredStoreHint) *store.LayeredStoreSupplierResult { + result := store.NewSupplierResult() + + groupSyncable, err := s.getGroupSyncable(groupID, syncableID, syncableType) + if err != nil { + if err == sql.ErrNoRows { + result.Err = model.NewAppError("SqlGroupStore.GroupGetGroupSyncable", "store.sql_group.no_rows", nil, err.Error(), http.StatusNotFound) + } else { + result.Err = model.NewAppError("SqlGroupStore.GroupGetGroupSyncable", "store.select_error", nil, err.Error(), http.StatusInternalServerError) + } + return result + } + + result.Data = groupSyncable + + return result +} + +func (s *SqlSupplier) getGroupSyncable(groupID string, syncableID string, syncableType model.GroupSyncableType) (*model.GroupSyncable, error) { + var err error + var result interface{} + + switch syncableType { + case model.GroupSyncableTypeTeam: + result, err = s.GetMaster().Get(groupTeam{}, groupID, syncableID) + case model.GroupSyncableTypeChannel: + result, err = s.GetMaster().Get(groupChannel{}, groupID, syncableID) + } + + if err != nil { + return nil, err + } + + if result == nil { + return nil, sql.ErrNoRows + } + + groupSyncable := model.GroupSyncable{} + switch syncableType { + case model.GroupSyncableTypeTeam: + groupTeam := result.(*groupTeam) + groupSyncable.SyncableId = groupTeam.TeamId + groupSyncable.GroupId = groupTeam.GroupId + groupSyncable.CanLeave = groupTeam.CanLeave + groupSyncable.AutoAdd = groupTeam.AutoAdd + groupSyncable.CreateAt = groupTeam.CreateAt + groupSyncable.DeleteAt = groupTeam.DeleteAt + groupSyncable.UpdateAt = groupTeam.UpdateAt + groupSyncable.Type = syncableType + case model.GroupSyncableTypeChannel: + groupChannel := result.(*groupChannel) + groupSyncable.SyncableId = groupChannel.ChannelId + groupSyncable.GroupId = groupChannel.GroupId + groupSyncable.CanLeave = groupChannel.CanLeave + groupSyncable.AutoAdd = groupChannel.AutoAdd + groupSyncable.CreateAt = groupChannel.CreateAt + groupSyncable.DeleteAt = groupChannel.DeleteAt + groupSyncable.UpdateAt = groupChannel.UpdateAt + groupSyncable.Type = syncableType + default: + return nil, fmt.Errorf("unable to convert syncableType: %s", syncableType.String()) + } + + return &groupSyncable, nil +} + +func (s *SqlSupplier) GroupGetAllGroupSyncablesByGroup(ctx context.Context, groupID string, syncableType model.GroupSyncableType, hints ...store.LayeredStoreHint) *store.LayeredStoreSupplierResult { + result := store.NewSupplierResult() + + args := map[string]interface{}{"GroupId": groupID} + + appErrF := func(msg string) *model.AppError { + return model.NewAppError("SqlGroupStore.GroupGetAllGroupSyncablesByGroup", "store.select_error", nil, msg, http.StatusInternalServerError) + } + + groupSyncables := []*model.GroupSyncable{} + + switch syncableType { + case model.GroupSyncableTypeTeam: + sqlQuery := ` + SELECT + GroupTeams.*, + Teams.DisplayName AS TeamDisplayName, + Teams.Type AS TeamType + FROM + GroupTeams + JOIN Teams ON Teams.Id = GroupTeams.TeamId + WHERE + GroupId = :GroupId AND GroupTeams.DeleteAt = 0` + + results := []*groupTeamJoin{} + _, err := s.GetMaster().Select(&results, sqlQuery, args) + if err != nil { + result.Err = appErrF(err.Error()) + return result + } + for _, result := range results { + groupSyncable := &model.GroupSyncable{ + SyncableId: result.TeamId, + GroupId: result.GroupId, + CanLeave: result.CanLeave, + AutoAdd: result.AutoAdd, + CreateAt: result.CreateAt, + DeleteAt: result.DeleteAt, + UpdateAt: result.UpdateAt, + Type: syncableType, + TeamDisplayName: result.TeamDisplayName, + TeamType: result.TeamType, + } + groupSyncables = append(groupSyncables, groupSyncable) + } + case model.GroupSyncableTypeChannel: + sqlQuery := ` + SELECT + GroupChannels.*, + Channels.DisplayName AS ChannelDisplayName, + Teams.DisplayName AS TeamDisplayName, + Channels.Type As ChannelType, + Teams.Type As TeamType, + Teams.Id AS TeamId + FROM + GroupChannels + JOIN Channels ON Channels.Id = GroupChannels.ChannelId + JOIN Teams ON Teams.Id = Channels.TeamId + WHERE + GroupId = :GroupId AND GroupChannels.DeleteAt = 0` + + results := []*groupChannelJoin{} + _, err := s.GetMaster().Select(&results, sqlQuery, args) + if err != nil { + result.Err = appErrF(err.Error()) + return result + } + for _, result := range results { + groupSyncable := &model.GroupSyncable{ + SyncableId: result.ChannelId, + GroupId: result.GroupId, + CanLeave: result.CanLeave, + AutoAdd: result.AutoAdd, + CreateAt: result.CreateAt, + DeleteAt: result.DeleteAt, + UpdateAt: result.UpdateAt, + Type: syncableType, + ChannelDisplayName: result.ChannelDisplayName, + ChannelType: result.ChannelType, + TeamDisplayName: result.TeamDisplayName, + TeamType: result.TeamType, + TeamID: result.TeamID, + } + groupSyncables = append(groupSyncables, groupSyncable) + } + } + + result.Data = groupSyncables + return result +} + +func (s *SqlSupplier) GroupUpdateGroupSyncable(ctx context.Context, groupSyncable *model.GroupSyncable, hints ...store.LayeredStoreHint) *store.LayeredStoreSupplierResult { + result := store.NewSupplierResult() + + retrievedGroupSyncable, err := s.getGroupSyncable(groupSyncable.GroupId, groupSyncable.SyncableId, groupSyncable.Type) + if err != nil { + if err == sql.ErrNoRows { + result.Err = model.NewAppError("SqlGroupStore.GroupUpdateGroupSyncable", "store.sql_group.no_rows", nil, err.Error(), http.StatusInternalServerError) + return result + } + result.Err = model.NewAppError("SqlGroupStore.GroupUpdateGroupSyncable", "store.select_error", nil, "GroupId="+groupSyncable.GroupId+", SyncableId="+groupSyncable.SyncableId+", SyncableType="+groupSyncable.Type.String()+", "+err.Error(), http.StatusInternalServerError) + return result + } + + if err := groupSyncable.IsValid(); err != nil { + result.Err = err + return result + } + + // If updating DeleteAt it can only be to 0 + if groupSyncable.DeleteAt != retrievedGroupSyncable.DeleteAt && groupSyncable.DeleteAt != 0 { + result.Err = model.NewAppError("SqlGroupStore.GroupUpdateGroupSyncable", "model.group.delete_at.app_error", nil, "", http.StatusInternalServerError) + return result + } + + // Reset these properties, don't update them based on input + groupSyncable.CreateAt = retrievedGroupSyncable.CreateAt + groupSyncable.UpdateAt = model.GetMillis() + + switch groupSyncable.Type { + case model.GroupSyncableTypeTeam: + _, err = s.GetMaster().Update(groupSyncableToGroupTeam(groupSyncable)) + case model.GroupSyncableTypeChannel: + _, err = s.GetMaster().Update(groupSyncableToGroupChannel(groupSyncable)) + default: + model.NewAppError("SqlGroupStore.GroupUpdateGroupSyncable", "model.group_syncable.type.app_error", nil, "group_id="+groupSyncable.GroupId+", syncable_id="+groupSyncable.SyncableId+", "+err.Error(), http.StatusInternalServerError) + return result + } + + if err != nil { + result.Err = model.NewAppError("SqlGroupStore.GroupUpdateGroupSyncable", "store.update_error", nil, err.Error(), http.StatusInternalServerError) + return result + } + + result.Data = groupSyncable + return result +} + +func (s *SqlSupplier) GroupDeleteGroupSyncable(ctx context.Context, groupID string, syncableID string, syncableType model.GroupSyncableType, hints ...store.LayeredStoreHint) *store.LayeredStoreSupplierResult { + result := store.NewSupplierResult() + + groupSyncable, err := s.getGroupSyncable(groupID, syncableID, syncableType) + if err != nil { + if err == sql.ErrNoRows { + result.Err = model.NewAppError("SqlGroupStore.GroupDeleteGroupSyncable", "store.sql_group.no_rows", nil, "Id="+groupID+", "+err.Error(), http.StatusNotFound) + } else { + result.Err = model.NewAppError("SqlGroupStore.GroupDeleteGroupSyncable", "store.select_error", nil, err.Error(), http.StatusInternalServerError) + } + return result + } + + if groupSyncable.DeleteAt != 0 { + result.Err = model.NewAppError("SqlGroupStore.GroupDeleteGroupSyncable", "store.sql_group.group_syncable_already_deleted", nil, "group_id="+groupID+"syncable_id="+syncableID, http.StatusBadRequest) + return result + } + + time := model.GetMillis() + groupSyncable.DeleteAt = time + groupSyncable.UpdateAt = time + + switch groupSyncable.Type { + case model.GroupSyncableTypeTeam: + _, err = s.GetMaster().Update(groupSyncableToGroupTeam(groupSyncable)) + case model.GroupSyncableTypeChannel: + _, err = s.GetMaster().Update(groupSyncableToGroupChannel(groupSyncable)) + default: + model.NewAppError("SqlGroupStore.GroupDeleteGroupSyncable", "model.group_syncable.type.app_error", nil, "group_id="+groupSyncable.GroupId+", syncable_id="+groupSyncable.SyncableId+", "+err.Error(), http.StatusInternalServerError) + return result + } + + if err != nil { + result.Err = model.NewAppError("SqlGroupStore.GroupDeleteGroupSyncable", "store.update_error", nil, err.Error(), http.StatusInternalServerError) + return result + } + + result.Data = groupSyncable + return result +} + +// PendingAutoAddTeamMembers returns a slice of UserTeamIDPair that need newly created memberships +// based on the groups configurations. +// +// Typically since will be the last successful group sync time. +func (s *SqlSupplier) PendingAutoAddTeamMembers(ctx context.Context, since int64, hints ...store.LayeredStoreHint) *store.LayeredStoreSupplierResult { + result := store.NewSupplierResult() + + sql := ` + SELECT + GroupMembers.UserId, GroupTeams.TeamId + FROM + GroupMembers + JOIN GroupTeams + ON GroupTeams.GroupId = GroupMembers.GroupId + JOIN UserGroups ON UserGroups.Id = GroupMembers.GroupId + JOIN Teams ON Teams.Id = GroupTeams.TeamId + LEFT OUTER JOIN TeamMembers + ON + TeamMembers.TeamId = GroupTeams.TeamId + AND TeamMembers.UserId = GroupMembers.UserId + WHERE + TeamMembers.UserId IS NULL + AND UserGroups.DeleteAt = 0 + AND GroupTeams.DeleteAt = 0 + AND GroupTeams.AutoAdd = true + AND GroupMembers.DeleteAt = 0 + AND Teams.DeleteAt = 0 + AND (GroupMembers.CreateAt >= :Since + OR GroupTeams.UpdateAt >= :Since)` + + var userTeamIDs []*model.UserTeamIDPair + + _, err := s.GetMaster().Select(&userTeamIDs, sql, map[string]interface{}{"Since": since}) + if err != nil { + result.Err = model.NewAppError("SqlGroupStore.PendingAutoAddTeamMembers", "store.select_error", nil, err.Error(), http.StatusInternalServerError) + } + + result.Data = userTeamIDs + + return result +} + +// PendingAutoAddChannelMembers returns a slice of UserChannelIDPair that need newly created memberships +// based on the groups configurations. +// +// Typically since will be the last successful group sync time. +func (s *SqlSupplier) PendingAutoAddChannelMembers(ctx context.Context, since int64, hints ...store.LayeredStoreHint) *store.LayeredStoreSupplierResult { + result := store.NewSupplierResult() + + sql := ` + SELECT + GroupMembers.UserId, GroupChannels.ChannelId + FROM + GroupMembers + JOIN GroupChannels ON GroupChannels.GroupId = GroupMembers.GroupId + JOIN UserGroups ON UserGroups.Id = GroupMembers.GroupId + JOIN Channels ON Channels.Id = GroupChannels.ChannelId + LEFT OUTER JOIN ChannelMemberHistory + ON + ChannelMemberHistory.ChannelId = GroupChannels.ChannelId + AND ChannelMemberHistory.UserId = GroupMembers.UserId + WHERE + ChannelMemberHistory.UserId IS NULL + AND ChannelMemberHistory.LeaveTime IS NULL + AND UserGroups.DeleteAt = 0 + AND GroupChannels.DeleteAt = 0 + AND GroupChannels.AutoAdd = true + AND GroupMembers.DeleteAt = 0 + AND Channels.DeleteAt = 0 + AND (GroupMembers.CreateAt >= :Since + OR GroupChannels.UpdateAt >= :Since)` + + var userChannelIDs []*model.UserChannelIDPair + + _, err := s.GetMaster().Select(&userChannelIDs, sql, map[string]interface{}{"Since": since}) + if err != nil { + result.Err = model.NewAppError("SqlGroupStore.PendingAutoAddChannelMembers", "store.select_error", nil, "", http.StatusInternalServerError) + } + + result.Data = userChannelIDs + + return result +} + +func groupSyncableToGroupTeam(groupSyncable *model.GroupSyncable) *groupTeam { + return &groupTeam{ + GroupSyncable: *groupSyncable, + TeamId: groupSyncable.SyncableId, + } +} + +func groupSyncableToGroupChannel(groupSyncable *model.GroupSyncable) *groupChannel { + return &groupChannel{ + GroupSyncable: *groupSyncable, + ChannelId: groupSyncable.SyncableId, + } +} diff --git a/store/sqlstore/supplier.go b/store/sqlstore/supplier.go index 2ea5ab030c..663e644e12 100644 --- a/store/sqlstore/supplier.go +++ b/store/sqlstore/supplier.go @@ -93,6 +93,7 @@ type SqlSupplierOldStores struct { role store.RoleStore scheme store.SchemeStore TermsOfService store.TermsOfServiceStore + group store.GroupStore UserTermsOfService store.UserTermsOfServiceStore } @@ -148,6 +149,7 @@ func NewSqlSupplier(settings model.SqlSettings, metrics einterfaces.MetricsInter initSqlSupplierReactions(supplier) initSqlSupplierRoles(supplier) initSqlSupplierSchemes(supplier) + initSqlSupplierGroups(supplier) err := supplier.GetMaster().CreateTablesIfNotExists() if err != nil { @@ -182,6 +184,8 @@ func NewSqlSupplier(settings model.SqlSettings, metrics einterfaces.MetricsInter supplier.oldStores.TermsOfService.(SqlTermsOfServiceStore).CreateIndexesIfNotExists() supplier.oldStores.UserTermsOfService.(SqlUserTermsOfServiceStore).CreateIndexesIfNotExists() + supplier.CreateIndexesIfNotExistsGroups() + supplier.oldStores.preference.(*SqlPreferenceStore).DeleteUnusedFeatures() return supplier @@ -1024,6 +1028,10 @@ func (ss *SqlSupplier) Scheme() store.SchemeStore { return ss.oldStores.scheme } +func (ss *SqlSupplier) Group() store.GroupStore { + return ss.oldStores.group +} + func (ss *SqlSupplier) DropAllTables() { ss.master.TruncateTables() } diff --git a/store/sqlstore/upgrade.go b/store/sqlstore/upgrade.go index 30fcd2de3e..183b77a2d1 100644 --- a/store/sqlstore/upgrade.go +++ b/store/sqlstore/upgrade.go @@ -532,6 +532,7 @@ func UpgradeDatabaseToVersion56(sqlStore SqlStore) { sqlStore.RemoveIndexIfExists("idx_users_firstname_lower", "lower(FirstName)") sqlStore.RemoveIndexIfExists("idx_users_lastname_lower", "lower(LastName)") } + saveSchemaVersion(sqlStore, VERSION_5_6_0) } diff --git a/store/store.go b/store/store.go index 7eb080cd6b..4124e48885 100644 --- a/store/store.go +++ b/store/store.go @@ -66,6 +66,7 @@ type Store interface { ChannelMemberHistory() ChannelMemberHistoryStore Plugin() PluginStore TermsOfService() TermsOfServiceStore + Group() GroupStore UserTermsOfService() UserTermsOfServiceStore MarkSystemRanUnitTests() Close() @@ -137,6 +138,7 @@ type ChannelStore interface { GetDeletedByName(team_id string, name string) StoreChannel GetDeleted(team_id string, offset int, limit int) StoreChannel GetChannels(teamId string, userId string, includeDeleted bool) StoreChannel + GetAllChannels(page, perPage int, includeDeleted bool) StoreChannel GetMoreChannels(teamId string, userId string, offset int, limit int) StoreChannel GetPublicChannelsForTeam(teamId string, offset int, limit int) StoreChannel GetPublicChannelsByIdsForTeam(teamId string, channelIds []string) StoreChannel @@ -168,6 +170,7 @@ type ChannelStore interface { GetMembersForUser(teamId string, userId string) StoreChannel AutocompleteInTeam(teamId string, term string, includeDeleted bool) StoreChannel AutocompleteInTeamForSearch(teamId string, userId string, term string, includeDeleted bool) StoreChannel + SearchAllChannels(term string, includeDeleted bool) StoreChannel SearchInTeam(teamId string, term string, includeDeleted bool) StoreChannel SearchMore(userId string, teamId string, term string) StoreChannel GetMembersByIds(channelId string, userIds []string) StoreChannel @@ -536,3 +539,27 @@ type UserTermsOfServiceStore interface { Save(userTermsOfService *model.UserTermsOfService) StoreChannel Delete(userId, termsOfServiceId string) StoreChannel } + +type GroupStore interface { + Create(group *model.Group) StoreChannel + Get(groupID string) StoreChannel + GetByRemoteID(remoteID string, groupSource model.GroupSource) StoreChannel + GetAllBySource(groupSource model.GroupSource) StoreChannel + Update(group *model.Group) StoreChannel + Delete(groupID string) StoreChannel + + GetMemberUsers(groupID string) StoreChannel + GetMemberUsersPage(groupID string, offset int, limit int) StoreChannel + GetMemberCount(groupID string) StoreChannel + CreateOrRestoreMember(groupID string, userID string) StoreChannel + DeleteMember(groupID string, userID string) StoreChannel + + CreateGroupSyncable(groupSyncable *model.GroupSyncable) StoreChannel + GetGroupSyncable(groupID string, syncableID string, syncableType model.GroupSyncableType) StoreChannel + GetAllGroupSyncablesByGroupId(groupID string, syncableType model.GroupSyncableType) StoreChannel + UpdateGroupSyncable(groupSyncable *model.GroupSyncable) StoreChannel + DeleteGroupSyncable(groupID string, syncableID string, syncableType model.GroupSyncableType) StoreChannel + + PendingAutoAddTeamMembers(minGroupMembersCreateAt int64) StoreChannel + PendingAutoAddChannelMembers(minGroupMembersCreateAt int64) StoreChannel +} diff --git a/store/storetest/channel_store.go b/store/storetest/channel_store.go index 7fc4190610..cebf6cdf42 100644 --- a/store/storetest/channel_store.go +++ b/store/storetest/channel_store.go @@ -21,6 +21,17 @@ type SqlSupplier interface { GetMaster() *gorp.DbMap } +func cleanupChannels(t *testing.T, ss store.Store) { + result := <-ss.Channel().GetAllChannels(0, 100000, true) + if result.Err != nil { + t.Fatal("error cleaning all channels") + } + list := result.Data.(*model.ChannelListWithTeamData) + for _, channel := range *list { + ss.Channel().PermanentDelete(channel.Id) + } +} + func TestChannelStore(t *testing.T, ss store.Store, s SqlSupplier) { createDefaultRoles(t, ss) @@ -40,6 +51,7 @@ func TestChannelStore(t *testing.T, ss store.Store, s SqlSupplier) { t.Run("ChannelMemberStore", func(t *testing.T) { testChannelMemberStore(t, ss) }) t.Run("ChannelDeleteMemberStore", func(t *testing.T) { testChannelDeleteMemberStore(t, ss) }) t.Run("GetChannels", func(t *testing.T) { testChannelStoreGetChannels(t, ss) }) + t.Run("GetAllChannels", func(t *testing.T) { testChannelStoreGetAllChannels(t, ss) }) t.Run("GetMoreChannels", func(t *testing.T) { testChannelStoreGetMoreChannels(t, ss) }) t.Run("GetPublicChannelsForTeam", func(t *testing.T) { testChannelStoreGetPublicChannelsForTeam(t, ss) }) t.Run("GetPublicChannelsByIdsForTeam", func(t *testing.T) { testChannelStoreGetPublicChannelsByIdsForTeam(t, ss) }) @@ -53,6 +65,7 @@ func TestChannelStore(t *testing.T, ss store.Store, s SqlSupplier) { t.Run("GetMemberCount", func(t *testing.T) { testGetMemberCount(t, ss) }) t.Run("SearchMore", func(t *testing.T) { testChannelStoreSearchMore(t, ss) }) t.Run("SearchInTeam", func(t *testing.T) { testChannelStoreSearchInTeam(t, ss) }) + t.Run("SearchAllChannels", func(t *testing.T) { testChannelStoreSearchAllChannels(t, ss) }) t.Run("AutocompleteInTeamForSearch", func(t *testing.T) { testChannelStoreAutocompleteInTeamForSearch(t, ss) }) t.Run("GetMembersByIds", func(t *testing.T) { testChannelStoreGetMembersByIds(t, ss) }) t.Run("AnalyticsDeletedTypeCount", func(t *testing.T) { testChannelStoreAnalyticsDeletedTypeCount(t, ss) }) @@ -959,6 +972,81 @@ func testChannelStoreGetChannels(t *testing.T, ss store.Store) { ss.Channel().InvalidateAllChannelMembersForUser(m1.UserId) } +func testChannelStoreGetAllChannels(t *testing.T, ss store.Store) { + cleanupChannels(t, ss) + + t1 := model.Team{} + t1.DisplayName = "Name" + t1.Name = model.NewId() + t1.Email = MakeEmail() + t1.Type = model.TEAM_OPEN + store.Must(ss.Team().Save(&t1)) + + t2 := model.Team{} + t2.DisplayName = "Name2" + t2.Name = model.NewId() + t2.Email = MakeEmail() + t2.Type = model.TEAM_OPEN + store.Must(ss.Team().Save(&t2)) + + c1 := model.Channel{} + c1.TeamId = t1.Id + c1.DisplayName = "Channel1" + model.NewId() + c1.Name = "zz" + model.NewId() + "b" + c1.Type = model.CHANNEL_OPEN + store.Must(ss.Channel().Save(&c1, -1)) + + c2 := model.Channel{} + c2.TeamId = t1.Id + c2.DisplayName = "Channel2" + model.NewId() + c2.Name = "zz" + model.NewId() + "b" + c2.Type = model.CHANNEL_OPEN + store.Must(ss.Channel().Save(&c2, -1)) + c2.DeleteAt = model.GetMillis() + c2.UpdateAt = c2.DeleteAt + store.Must(ss.Channel().Delete(c2.Id, c2.DeleteAt)) + + c3 := model.Channel{} + c3.TeamId = t2.Id + c3.DisplayName = "Channel3" + model.NewId() + c3.Name = "zz" + model.NewId() + "b" + c3.Type = model.CHANNEL_PRIVATE + store.Must(ss.Channel().Save(&c3, -1)) + + store.Must(ss.Channel().CreateDirectChannel(model.NewId(), model.NewId())) + + userIds := []string{model.NewId(), model.NewId(), model.NewId()} + + c5 := model.Channel{} + c5.Name = model.GetGroupNameFromUserIds(userIds) + c5.DisplayName = "GroupChannel" + model.NewId() + c5.Name = "zz" + model.NewId() + "b" + c5.Type = model.CHANNEL_GROUP + store.Must(ss.Channel().Save(&c5, -1)) + + cresult := <-ss.Channel().GetAllChannels(0, 10, false) + list := cresult.Data.(*model.ChannelListWithTeamData) + assert.Len(t, *list, 2) + assert.Equal(t, (*list)[0].Id, c1.Id) + assert.Equal(t, (*list)[0].TeamDisplayName, "Name") + assert.Equal(t, (*list)[1].Id, c3.Id) + assert.Equal(t, (*list)[1].TeamDisplayName, "Name2") + + cresult = <-ss.Channel().GetAllChannels(0, 10, true) + list = cresult.Data.(*model.ChannelListWithTeamData) + assert.Len(t, *list, 3) + assert.Equal(t, (*list)[0].Id, c1.Id) + assert.Equal(t, (*list)[0].TeamDisplayName, "Name") + assert.Equal(t, (*list)[1].Id, c2.Id) + assert.Equal(t, (*list)[2].Id, c3.Id) + + cresult = <-ss.Channel().GetAllChannels(0, 1, true) + list = cresult.Data.(*model.ChannelListWithTeamData) + assert.Len(t, *list, 1) + assert.Equal(t, (*list)[0].Id, c1.Id) + assert.Equal(t, (*list)[0].TeamDisplayName, "Name") +} + func testChannelStoreGetMoreChannels(t *testing.T, ss store.Store) { teamId := model.NewId() otherTeamId := model.NewId() @@ -2006,6 +2094,184 @@ func testChannelStoreSearchInTeam(t *testing.T, ss store.Store) { } } +func testChannelStoreSearchAllChannels(t *testing.T, ss store.Store) { + cleanupChannels(t, ss) + + t1 := model.Team{} + t1.DisplayName = "Name" + t1.Name = model.NewId() + t1.Email = MakeEmail() + t1.Type = model.TEAM_OPEN + store.Must(ss.Team().Save(&t1)) + + t2 := model.Team{} + t2.DisplayName = "Name2" + t2.Name = model.NewId() + t2.Email = MakeEmail() + t2.Type = model.TEAM_OPEN + store.Must(ss.Team().Save(&t2)) + + o1 := model.Channel{ + TeamId: t1.Id, + DisplayName: "ChannelA", + Name: "zz" + model.NewId() + "b", + Type: model.CHANNEL_OPEN, + } + store.Must(ss.Channel().Save(&o1, -1)) + + o2 := model.Channel{ + TeamId: t2.Id, + DisplayName: "ChannelA", + Name: "zz" + model.NewId() + "b", + Type: model.CHANNEL_OPEN, + } + store.Must(ss.Channel().Save(&o2, -1)) + + m1 := model.ChannelMember{ + ChannelId: o1.Id, + UserId: model.NewId(), + NotifyProps: model.GetDefaultChannelNotifyProps(), + } + store.Must(ss.Channel().SaveMember(&m1)) + + m2 := model.ChannelMember{ + ChannelId: o1.Id, + UserId: model.NewId(), + NotifyProps: model.GetDefaultChannelNotifyProps(), + } + store.Must(ss.Channel().SaveMember(&m2)) + + m3 := model.ChannelMember{ + ChannelId: o2.Id, + UserId: model.NewId(), + NotifyProps: model.GetDefaultChannelNotifyProps(), + } + store.Must(ss.Channel().SaveMember(&m3)) + + o3 := model.Channel{ + TeamId: t1.Id, + DisplayName: "ChannelA (alternate)", + Name: "zz" + model.NewId() + "b", + Type: model.CHANNEL_OPEN, + } + store.Must(ss.Channel().Save(&o3, -1)) + + o4 := model.Channel{ + TeamId: t1.Id, + DisplayName: "ChannelB", + Name: "zz" + model.NewId() + "b", + Type: model.CHANNEL_PRIVATE, + } + store.Must(ss.Channel().Save(&o4, -1)) + + o5 := model.Channel{ + TeamId: t1.Id, + DisplayName: "ChannelC", + Name: "zz" + model.NewId() + "b", + Type: model.CHANNEL_PRIVATE, + } + store.Must(ss.Channel().Save(&o5, -1)) + + o6 := model.Channel{ + TeamId: t1.Id, + DisplayName: "Off-Topic", + Name: "off-topic", + Type: model.CHANNEL_OPEN, + } + store.Must(ss.Channel().Save(&o6, -1)) + + o7 := model.Channel{ + TeamId: t1.Id, + DisplayName: "Off-Set", + Name: "off-set", + Type: model.CHANNEL_OPEN, + } + store.Must(ss.Channel().Save(&o7, -1)) + + o8 := model.Channel{ + TeamId: t1.Id, + DisplayName: "Off-Limit", + Name: "off-limit", + Type: model.CHANNEL_PRIVATE, + } + store.Must(ss.Channel().Save(&o8, -1)) + + o9 := model.Channel{ + TeamId: t1.Id, + DisplayName: "Town Square", + Name: "town-square", + Type: model.CHANNEL_OPEN, + } + store.Must(ss.Channel().Save(&o9, -1)) + + o10 := model.Channel{ + TeamId: t1.Id, + DisplayName: "The", + Name: "the", + Type: model.CHANNEL_OPEN, + } + store.Must(ss.Channel().Save(&o10, -1)) + + o11 := model.Channel{ + TeamId: t1.Id, + DisplayName: "Native Mobile Apps", + Name: "native-mobile-apps", + Type: model.CHANNEL_OPEN, + } + store.Must(ss.Channel().Save(&o11, -1)) + + o12 := model.Channel{ + TeamId: t1.Id, + DisplayName: "ChannelZ", + Purpose: "This can now be searchable!", + Name: "with-purpose", + Type: model.CHANNEL_OPEN, + } + store.Must(ss.Channel().Save(&o12, -1)) + + o13 := model.Channel{ + TeamId: t1.Id, + DisplayName: "ChannelA (deleted)", + Name: model.NewId(), + Type: model.CHANNEL_OPEN, + } + store.Must(ss.Channel().Save(&o13, -1)) + o13.DeleteAt = model.GetMillis() + o13.UpdateAt = o13.DeleteAt + store.Must(ss.Channel().Delete(o13.Id, o13.DeleteAt)) + + testCases := []struct { + Description string + Term string + IncludeDeleted bool + ExpectedResults *model.ChannelList + }{ + {"ChannelA", "ChannelA", false, &model.ChannelList{&o1, &o2, &o3}}, + {"ChannelA, include deleted", "ChannelA", true, &model.ChannelList{&o1, &o2, &o3, &o13}}, + {"empty string", "", false, &model.ChannelList{&o1, &o2, &o3, &o4, &o5, &o12, &o11, &o8, &o7, &o6, &o10, &o9}}, + {"no matches", "blargh", false, &model.ChannelList{}}, + {"prefix", "off-", false, &model.ChannelList{&o8, &o7, &o6}}, + {"full match with dash", "off-topic", false, &model.ChannelList{&o6}}, + {"town square", "town square", false, &model.ChannelList{&o9}}, + {"the in name", "the", false, &model.ChannelList{&o10}}, + {"Mobile", "Mobile", false, &model.ChannelList{&o11}}, + {"search purpose", "now searchable", false, &model.ChannelList{&o12}}, + {"pipe ignored", "town square |", false, &model.ChannelList{&o9}}, + } + + for _, testCase := range testCases { + t.Run(testCase.Description, func(t *testing.T) { + result := <-ss.Channel().SearchAllChannels(testCase.Term, testCase.IncludeDeleted) + require.Nil(t, result.Err) + channels := result.Data.(*model.ChannelListWithTeamData) + require.Equal(t, len(*channels), len(*testCase.ExpectedResults)) + for i, expected := range *testCase.ExpectedResults { + require.Equal(t, (*channels)[i].Id, expected.Id) + } + }) + } +} + func testChannelStoreAutocompleteInTeamForSearch(t *testing.T, ss store.Store) { u1 := &model.User{} u1.Email = MakeEmail() diff --git a/store/storetest/group_supplier.go b/store/storetest/group_supplier.go new file mode 100644 index 0000000000..9cee84e945 --- /dev/null +++ b/store/storetest/group_supplier.go @@ -0,0 +1,1318 @@ +// Copyright (c) 2018-present Mattermost, Inc. All Rights Reserved. +// See License.txt for license information. + +package storetest + +import ( + "strings" + "testing" + + "github.com/mattermost/mattermost-server/model" + "github.com/mattermost/mattermost-server/store" + "github.com/stretchr/testify/assert" +) + +func TestGroupStore(t *testing.T, ss store.Store) { + t.Run("Create", func(t *testing.T) { testGroupStoreCreate(t, ss) }) + t.Run("Get", func(t *testing.T) { testGroupStoreGet(t, ss) }) + t.Run("GetByRemoteID", func(t *testing.T) { testGroupStoreGetByRemoteID(t, ss) }) + t.Run("GetAllBySource", func(t *testing.T) { testGroupStoreGetAllByType(t, ss) }) + t.Run("Update", func(t *testing.T) { testGroupStoreUpdate(t, ss) }) + t.Run("Delete", func(t *testing.T) { testGroupStoreDelete(t, ss) }) + + t.Run("GetMemberUsers", func(t *testing.T) { testGroupGetMemberUsers(t, ss) }) + t.Run("GetMemberUsersPage", func(t *testing.T) { testGroupGetMemberUsersPage(t, ss) }) + t.Run("CreateOrRestoreMember", func(t *testing.T) { testGroupCreateOrRestoreMember(t, ss) }) + t.Run("DeleteMember", func(t *testing.T) { testGroupDeleteMember(t, ss) }) + + t.Run("CreateGroupSyncable", func(t *testing.T) { testCreateGroupSyncable(t, ss) }) + t.Run("GetGroupSyncable", func(t *testing.T) { testGetGroupSyncable(t, ss) }) + t.Run("GetAllGroupSyncablesByGroupId", func(t *testing.T) { testGetAllGroupSyncablesByGroup(t, ss) }) + t.Run("UpdateGroupSyncable", func(t *testing.T) { testUpdateGroupSyncable(t, ss) }) + t.Run("DeleteGroupSyncable", func(t *testing.T) { testDeleteGroupSyncable(t, ss) }) + + t.Run("PendingAutoAddTeamMembers", func(t *testing.T) { testPendingAutoAddTeamMembers(t, ss) }) + t.Run("PendingAutoAddChannelMembers", func(t *testing.T) { testPendingAutoAddChannelMembers(t, ss) }) +} + +func testGroupStoreCreate(t *testing.T, ss store.Store) { + // Save a new group + g1 := &model.Group{ + Name: model.NewId(), + DisplayName: model.NewId(), + Source: model.GroupSourceLdap, + Description: model.NewId(), + RemoteId: model.NewId(), + } + + // Happy path + res1 := <-ss.Group().Create(g1) + assert.Nil(t, res1.Err) + d1 := res1.Data.(*model.Group) + assert.Len(t, d1.Id, 26) + assert.Equal(t, g1.Name, d1.Name) + assert.Equal(t, g1.DisplayName, d1.DisplayName) + assert.Equal(t, g1.Description, d1.Description) + assert.Equal(t, g1.RemoteId, d1.RemoteId) + assert.NotZero(t, d1.CreateAt) + assert.NotZero(t, d1.UpdateAt) + assert.Zero(t, d1.DeleteAt) + + // Requires name and display name + g2 := &model.Group{ + Name: "", + DisplayName: model.NewId(), + Source: model.GroupSourceLdap, + RemoteId: model.NewId(), + } + res2 := <-ss.Group().Create(g2) + assert.Nil(t, res2.Data) + assert.NotNil(t, res2.Err) + assert.Equal(t, res2.Err.Id, "model.group.name.app_error") + + g2.Name = model.NewId() + g2.DisplayName = "" + res3 := <-ss.Group().Create(g2) + assert.Nil(t, res3.Data) + assert.NotNil(t, res3.Err) + assert.Equal(t, res3.Err.Id, "model.group.display_name.app_error") + + // Won't accept a duplicate name + g4 := &model.Group{ + Name: model.NewId(), + DisplayName: model.NewId(), + Source: model.GroupSourceLdap, + RemoteId: model.NewId(), + } + res5 := <-ss.Group().Create(g4) + assert.Nil(t, res5.Err) + g4b := &model.Group{ + Name: g4.Name, + DisplayName: model.NewId(), + Source: model.GroupSourceLdap, + RemoteId: model.NewId(), + } + res5b := <-ss.Group().Create(g4b) + assert.Nil(t, res5b.Data) + assert.Equal(t, res5b.Err.Id, "store.sql_group.unique_constraint") + + // Fields cannot be greater than max values + g5 := &model.Group{ + Name: strings.Repeat("x", model.GroupNameMaxLength), + DisplayName: strings.Repeat("x", model.GroupDisplayNameMaxLength), + Description: strings.Repeat("x", model.GroupDescriptionMaxLength), + Source: model.GroupSourceLdap, + RemoteId: model.NewId(), + } + assert.Nil(t, g5.IsValidForCreate()) + + g5.Name = g5.Name + "x" + assert.Equal(t, g5.IsValidForCreate().Id, "model.group.name.app_error") + g5.Name = model.NewId() + assert.Nil(t, g5.IsValidForCreate()) + + g5.DisplayName = g5.DisplayName + "x" + assert.Equal(t, g5.IsValidForCreate().Id, "model.group.display_name.app_error") + g5.DisplayName = model.NewId() + assert.Nil(t, g5.IsValidForCreate()) + + g5.Description = g5.Description + "x" + assert.Equal(t, g5.IsValidForCreate().Id, "model.group.description.app_error") + g5.Description = model.NewId() + assert.Nil(t, g5.IsValidForCreate()) + + // Must use a valid type + g6 := &model.Group{ + Name: model.NewId(), + DisplayName: model.NewId(), + Description: model.NewId(), + Source: model.GroupSource("fake"), + RemoteId: model.NewId(), + } + assert.Equal(t, g6.IsValidForCreate().Id, "model.group.source.app_error") +} + +func testGroupStoreGet(t *testing.T, ss store.Store) { + // Create a group + g1 := &model.Group{ + Name: model.NewId(), + DisplayName: model.NewId(), + Description: model.NewId(), + Source: model.GroupSourceLdap, + RemoteId: model.NewId(), + } + res1 := <-ss.Group().Create(g1) + assert.Nil(t, res1.Err) + d1 := res1.Data.(*model.Group) + assert.Len(t, d1.Id, 26) + + // Get the group + res2 := <-ss.Group().Get(d1.Id) + assert.Nil(t, res2.Err) + d2 := res2.Data.(*model.Group) + assert.Equal(t, d1.Id, d2.Id) + assert.Equal(t, d1.Name, d2.Name) + assert.Equal(t, d1.DisplayName, d2.DisplayName) + assert.Equal(t, d1.Description, d2.Description) + assert.Equal(t, d1.RemoteId, d2.RemoteId) + assert.Equal(t, d1.CreateAt, d2.CreateAt) + assert.Equal(t, d1.UpdateAt, d2.UpdateAt) + assert.Equal(t, d1.DeleteAt, d2.DeleteAt) + + // Get an invalid group + res3 := <-ss.Group().Get(model.NewId()) + assert.NotNil(t, res3.Err) + assert.Equal(t, res3.Err.Id, "store.sql_group.no_rows") +} + +func testGroupStoreGetByRemoteID(t *testing.T, ss store.Store) { + // Create a group + g1 := &model.Group{ + Name: model.NewId(), + DisplayName: model.NewId(), + Description: model.NewId(), + Source: model.GroupSourceLdap, + RemoteId: model.NewId(), + } + res1 := <-ss.Group().Create(g1) + assert.Nil(t, res1.Err) + d1 := res1.Data.(*model.Group) + assert.Len(t, d1.Id, 26) + + // Get the group + res2 := <-ss.Group().GetByRemoteID(d1.RemoteId, model.GroupSourceLdap) + assert.Nil(t, res2.Err) + d2 := res2.Data.(*model.Group) + assert.Equal(t, d1.Id, d2.Id) + assert.Equal(t, d1.Name, d2.Name) + assert.Equal(t, d1.DisplayName, d2.DisplayName) + assert.Equal(t, d1.Description, d2.Description) + assert.Equal(t, d1.RemoteId, d2.RemoteId) + assert.Equal(t, d1.CreateAt, d2.CreateAt) + assert.Equal(t, d1.UpdateAt, d2.UpdateAt) + assert.Equal(t, d1.DeleteAt, d2.DeleteAt) + + // Get an invalid group + res3 := <-ss.Group().GetByRemoteID(model.NewId(), model.GroupSource("fake")) + assert.NotNil(t, res3.Err) + assert.Equal(t, res3.Err.Id, "store.sql_group.no_rows") +} + +func testGroupStoreGetAllByType(t *testing.T, ss store.Store) { + numGroups := 10 + + groups := []*model.Group{} + + // Create groups + for i := 0; i < numGroups; i++ { + g := &model.Group{ + Name: model.NewId(), + DisplayName: model.NewId(), + Description: model.NewId(), + Source: model.GroupSourceLdap, + RemoteId: model.NewId(), + } + groups = append(groups, g) + res := <-ss.Group().Create(g) + assert.Nil(t, res.Err) + } + + // Returns all the groups + res1 := <-ss.Group().GetAllBySource(model.GroupSourceLdap) + d1 := res1.Data.([]*model.Group) + assert.Condition(t, func() bool { return len(d1) >= numGroups }) + for _, expectedGroup := range groups { + present := false + for _, dbGroup := range d1 { + if dbGroup.Id == expectedGroup.Id { + present = true + break + } + } + assert.True(t, present) + } +} + +func testGroupStoreUpdate(t *testing.T, ss store.Store) { + // Save a new group + g1 := &model.Group{ + Name: "g1-test", + DisplayName: model.NewId(), + Source: model.GroupSourceLdap, + Description: model.NewId(), + RemoteId: model.NewId(), + } + + // Create a group + res := <-ss.Group().Create(g1) + assert.Nil(t, res.Err) + d1 := res.Data.(*model.Group) + + // Update happy path + g1Update := &model.Group{} + *g1Update = *g1 + g1Update.Name = model.NewId() + g1Update.DisplayName = model.NewId() + g1Update.Description = model.NewId() + g1Update.RemoteId = model.NewId() + + res2 := <-ss.Group().Update(g1Update) + assert.Nil(t, res2.Err) + ud1 := res2.Data.(*model.Group) + // Not changed... + assert.Equal(t, d1.Id, ud1.Id) + assert.Equal(t, d1.CreateAt, ud1.CreateAt) + assert.Equal(t, d1.Source, ud1.Source) + // Still zero... + assert.Zero(t, ud1.DeleteAt) + // Updated... + assert.Equal(t, g1Update.Name, ud1.Name) + assert.Equal(t, g1Update.DisplayName, ud1.DisplayName) + assert.Equal(t, g1Update.Description, ud1.Description) + assert.Equal(t, g1Update.RemoteId, ud1.RemoteId) + + // Requires name and display name + res3 := <-ss.Group().Update(&model.Group{ + Id: d1.Id, + Name: "", + DisplayName: model.NewId(), + Source: model.GroupSourceLdap, + RemoteId: model.NewId(), + Description: model.NewId(), + }) + assert.Nil(t, res3.Data) + assert.NotNil(t, res3.Err) + assert.Equal(t, res3.Err.Id, "model.group.name.app_error") + + res4 := <-ss.Group().Update(&model.Group{ + Id: d1.Id, + Name: model.NewId(), + DisplayName: "", + Source: model.GroupSourceLdap, + RemoteId: model.NewId(), + }) + assert.Nil(t, res4.Data) + assert.NotNil(t, res4.Err) + assert.Equal(t, res4.Err.Id, "model.group.display_name.app_error") + + // Create another Group + g2 := &model.Group{ + Name: model.NewId(), + DisplayName: model.NewId(), + Source: model.GroupSourceLdap, + Description: model.NewId(), + RemoteId: model.NewId(), + } + res5 := <-ss.Group().Create(g2) + assert.Nil(t, res5.Err) + d2 := res5.Data.(*model.Group) + + // Can't update the name to be a duplicate of an existing group's name + res6 := <-ss.Group().Update(&model.Group{ + Id: d2.Id, + Name: g1Update.Name, + DisplayName: model.NewId(), + Source: model.GroupSourceLdap, + Description: model.NewId(), + RemoteId: model.NewId(), + }) + assert.Equal(t, res6.Err.Id, "store.update_error") + + // Cannot update CreateAt + someVal := model.GetMillis() + d1.CreateAt = someVal + res7 := <-ss.Group().Update(d1) + d3 := res7.Data.(*model.Group) + assert.NotEqual(t, someVal, d3.CreateAt) + + // Cannot update DeleteAt to non-zero + d1.DeleteAt = 1 + res9 := <-ss.Group().Update(d1) + assert.Equal(t, "model.group.delete_at.app_error", res9.Err.Id) + + //...except for 0 for DeleteAt + d1.DeleteAt = 0 + res8 := <-ss.Group().Update(d1) + assert.Nil(t, res8.Err) + d4 := res8.Data.(*model.Group) + assert.Zero(t, d4.DeleteAt) +} + +func testGroupStoreDelete(t *testing.T, ss store.Store) { + // Save a group + g1 := &model.Group{ + Name: model.NewId(), + DisplayName: model.NewId(), + Description: model.NewId(), + Source: model.GroupSourceLdap, + RemoteId: model.NewId(), + } + + res1 := <-ss.Group().Create(g1) + assert.Nil(t, res1.Err) + d1 := res1.Data.(*model.Group) + assert.Len(t, d1.Id, 26) + + // Check the group is retrievable + res2 := <-ss.Group().Get(d1.Id) + assert.Nil(t, res2.Err) + + // Get the before count + res7 := <-ss.Group().GetAllBySource(model.GroupSourceLdap) + d7 := res7.Data.([]*model.Group) + beforeCount := len(d7) + + // Delete the group + res3 := <-ss.Group().Delete(d1.Id) + assert.Nil(t, res3.Err) + + // Check the group is deleted + res4 := <-ss.Group().Get(d1.Id) + d4 := res4.Data.(*model.Group) + assert.NotZero(t, d4.DeleteAt) + + // Check the after count + res5 := <-ss.Group().GetAllBySource(model.GroupSourceLdap) + d5 := res5.Data.([]*model.Group) + afterCount := len(d5) + assert.Condition(t, func() bool { return beforeCount == afterCount+1 }) + + // Try and delete a nonexistent group + res6 := <-ss.Group().Delete(model.NewId()) + assert.NotNil(t, res6.Err) + assert.Equal(t, res6.Err.Id, "store.sql_group.no_rows") + + // Cannot delete again + res8 := <-ss.Group().Delete(d1.Id) + assert.Equal(t, res8.Err.Id, "store.sql_group.no_rows") +} + +func testGroupGetMemberUsers(t *testing.T, ss store.Store) { + // Save a group + g1 := &model.Group{ + Name: model.NewId(), + DisplayName: model.NewId(), + Description: model.NewId(), + Source: model.GroupSourceLdap, + RemoteId: model.NewId(), + } + res := <-ss.Group().Create(g1) + assert.Nil(t, res.Err) + group := res.Data.(*model.Group) + + u1 := &model.User{ + Email: MakeEmail(), + Username: model.NewId(), + } + res = <-ss.User().Save(u1) + assert.Nil(t, res.Err) + user1 := res.Data.(*model.User) + + res = <-ss.Group().CreateOrRestoreMember(group.Id, user1.Id) + assert.Nil(t, res.Err) + + u2 := &model.User{ + Email: MakeEmail(), + Username: model.NewId(), + } + res = <-ss.User().Save(u2) + assert.Nil(t, res.Err) + user2 := res.Data.(*model.User) + + res = <-ss.Group().CreateOrRestoreMember(group.Id, user2.Id) + assert.Nil(t, res.Err) + + // Check returns members + res = <-ss.Group().GetMemberUsers(group.Id) + assert.Nil(t, res.Err) + groupMembers := res.Data.([]*model.User) + assert.Equal(t, 2, len(groupMembers)) + + // Check madeup id + res = <-ss.Group().GetMemberUsers(model.NewId()) + assert.Equal(t, 0, len(res.Data.([]*model.User))) + + // Delete a member + <-ss.Group().DeleteMember(group.Id, user1.Id) + + // Should not return deleted members + res = <-ss.Group().GetMemberUsers(group.Id) + groupMembers = res.Data.([]*model.User) + assert.Equal(t, 1, len(groupMembers)) +} + +func testGroupGetMemberUsersPage(t *testing.T, ss store.Store) { + // Save a group + g1 := &model.Group{ + Name: model.NewId(), + DisplayName: model.NewId(), + Description: model.NewId(), + Source: model.GroupSourceLdap, + RemoteId: model.NewId(), + } + res := <-ss.Group().Create(g1) + assert.Nil(t, res.Err) + group := res.Data.(*model.Group) + + u1 := &model.User{ + Email: MakeEmail(), + Username: model.NewId(), + } + res = <-ss.User().Save(u1) + assert.Nil(t, res.Err) + user1 := res.Data.(*model.User) + + res = <-ss.Group().CreateOrRestoreMember(group.Id, user1.Id) + assert.Nil(t, res.Err) + + u2 := &model.User{ + Email: MakeEmail(), + Username: model.NewId(), + } + res = <-ss.User().Save(u2) + assert.Nil(t, res.Err) + user2 := res.Data.(*model.User) + + res = <-ss.Group().CreateOrRestoreMember(group.Id, user2.Id) + assert.Nil(t, res.Err) + + // Check returns members + res = <-ss.Group().GetMemberUsersPage(group.Id, 0, 100) + assert.Nil(t, res.Err) + groupMembers := res.Data.([]*model.User) + assert.Equal(t, 2, len(groupMembers)) + + // Check page 1 + res = <-ss.Group().GetMemberUsersPage(group.Id, 0, 1) + assert.Nil(t, res.Err) + groupMembers = res.Data.([]*model.User) + assert.Equal(t, 1, len(groupMembers)) + assert.Equal(t, user2.Id, groupMembers[0].Id) + + // Check page 2 + res = <-ss.Group().GetMemberUsersPage(group.Id, 1, 1) + assert.Nil(t, res.Err) + groupMembers = res.Data.([]*model.User) + assert.Equal(t, 1, len(groupMembers)) + assert.Equal(t, user1.Id, groupMembers[0].Id) + + // Check madeup id + res = <-ss.Group().GetMemberUsersPage(model.NewId(), 0, 100) + assert.Equal(t, 0, len(res.Data.([]*model.User))) + + // Delete a member + <-ss.Group().DeleteMember(group.Id, user1.Id) + + // Should not return deleted members + res = <-ss.Group().GetMemberUsersPage(group.Id, 0, 100) + groupMembers = res.Data.([]*model.User) + assert.Equal(t, 1, len(groupMembers)) +} + +func testGroupCreateOrRestoreMember(t *testing.T, ss store.Store) { + // Create group + g1 := &model.Group{ + Name: model.NewId(), + DisplayName: model.NewId(), + Source: model.GroupSourceLdap, + RemoteId: model.NewId(), + } + res1 := <-ss.Group().Create(g1) + assert.Nil(t, res1.Err) + group := res1.Data.(*model.Group) + + // Create user + u1 := &model.User{ + Email: MakeEmail(), + Username: model.NewId(), + } + res2 := <-ss.User().Save(u1) + assert.Nil(t, res2.Err) + user := res2.Data.(*model.User) + + // Happy path + res3 := <-ss.Group().CreateOrRestoreMember(group.Id, user.Id) + assert.Nil(t, res3.Err) + d2 := res3.Data.(*model.GroupMember) + assert.Equal(t, d2.GroupId, group.Id) + assert.Equal(t, d2.UserId, user.Id) + assert.NotZero(t, d2.CreateAt) + assert.Zero(t, d2.DeleteAt) + + // Duplicate composite key (GroupId, UserId) + res4 := <-ss.Group().CreateOrRestoreMember(group.Id, user.Id) + assert.Equal(t, res4.Err.Id, "store.sql_group.uniqueness_error") + + // Invalid GroupId + res6 := <-ss.Group().CreateOrRestoreMember(model.NewId(), user.Id) + assert.Equal(t, res6.Err.Id, "store.insert_error") + + // Restores a deleted member + res := <-ss.Group().CreateOrRestoreMember(group.Id, user.Id) + assert.NotNil(t, res.Err) + + res = <-ss.Group().DeleteMember(group.Id, user.Id) + assert.Nil(t, res.Err) + + res = <-ss.Group().GetMemberUsers(group.Id) + beforeRestoreCount := len(res.Data.([]*model.User)) + + res = <-ss.Group().CreateOrRestoreMember(group.Id, user.Id) + assert.Nil(t, res.Err) + + res = <-ss.Group().GetMemberUsers(group.Id) + afterRestoreCount := len(res.Data.([]*model.User)) + + assert.Equal(t, beforeRestoreCount+1, afterRestoreCount) +} + +func testGroupDeleteMember(t *testing.T, ss store.Store) { + // Create group + g1 := &model.Group{ + Name: model.NewId(), + DisplayName: model.NewId(), + Source: model.GroupSourceLdap, + RemoteId: model.NewId(), + } + res1 := <-ss.Group().Create(g1) + assert.Nil(t, res1.Err) + group := res1.Data.(*model.Group) + + // Create user + u1 := &model.User{ + Email: MakeEmail(), + Username: model.NewId(), + } + res2 := <-ss.User().Save(u1) + assert.Nil(t, res2.Err) + user := res2.Data.(*model.User) + + // Create member + res3 := <-ss.Group().CreateOrRestoreMember(group.Id, user.Id) + assert.Nil(t, res3.Err) + d1 := res3.Data.(*model.GroupMember) + + // Happy path + res4 := <-ss.Group().DeleteMember(group.Id, user.Id) + assert.Nil(t, res4.Err) + d2 := res4.Data.(*model.GroupMember) + assert.Equal(t, d2.GroupId, group.Id) + assert.Equal(t, d2.UserId, user.Id) + assert.Equal(t, d2.CreateAt, d1.CreateAt) + assert.NotZero(t, d2.DeleteAt) + + // Delete an already deleted member + res5 := <-ss.Group().DeleteMember(group.Id, user.Id) + assert.Equal(t, res5.Err.Id, "store.sql_group.no_rows") + + // Delete with non-existent User + res8 := <-ss.Group().DeleteMember(group.Id, model.NewId()) + assert.Equal(t, res8.Err.Id, "store.sql_group.no_rows") + + // Delete non-existent Group + res9 := <-ss.Group().DeleteMember(model.NewId(), group.Id) + assert.Equal(t, res9.Err.Id, "store.sql_group.no_rows") +} + +func testCreateGroupSyncable(t *testing.T, ss store.Store) { + // Invalid GroupID + res2 := <-ss.Group().CreateGroupSyncable(&model.GroupSyncable{ + GroupId: "x", + CanLeave: true, + SyncableId: string(model.NewId()), + Type: model.GroupSyncableTypeTeam, + }) + assert.Equal(t, res2.Err.Id, "model.group_syncable.group_id.app_error") + + // TODO: Add this validation test in phase 2 of LDAP groups sync. + // Invalid CanLeave/AutoAdd combo (both false) + // res3 := <-ss.Group().CreateGroupSyncable(&model.GroupSyncable{ + // GroupId: model.NewId(), + // CanLeave: false, + // AutoAdd: false, + // SyncableId: string(model.NewId()), + // Type: model.GroupSyncableTypeTeam, + // }) + // assert.Equal(t, res3.Err.Id, "model.group_syncable.invalid_state") + + // Create Group + g1 := &model.Group{ + Name: model.NewId(), + DisplayName: model.NewId(), + Source: model.GroupSourceLdap, + RemoteId: model.NewId(), + } + res4 := <-ss.Group().Create(g1) + assert.Nil(t, res4.Err) + group := res4.Data.(*model.Group) + + // Create Team + t1 := &model.Team{ + DisplayName: "Name", + Description: "Some description", + CompanyName: "Some company name", + AllowOpenInvite: false, + InviteId: "inviteid0", + Name: "z-z-" + model.NewId() + "a", + Email: "success+" + model.NewId() + "@simulator.amazonses.com", + Type: model.TEAM_OPEN, + } + res5 := <-ss.Team().Save(t1) + assert.Nil(t, res5.Err) + team := res5.Data.(*model.Team) + + // New GroupSyncable, happy path + gt1 := &model.GroupSyncable{ + GroupId: group.Id, + CanLeave: true, + AutoAdd: false, + SyncableId: string(team.Id), + Type: model.GroupSyncableTypeTeam, + } + res6 := <-ss.Group().CreateGroupSyncable(gt1) + assert.Nil(t, res6.Err) + d1 := res6.Data.(*model.GroupSyncable) + assert.Equal(t, gt1.SyncableId, d1.SyncableId) + assert.Equal(t, gt1.GroupId, d1.GroupId) + assert.Equal(t, gt1.CanLeave, d1.CanLeave) + assert.Equal(t, gt1.AutoAdd, d1.AutoAdd) + assert.NotZero(t, d1.CreateAt) + assert.Zero(t, d1.DeleteAt) +} + +func testGetGroupSyncable(t *testing.T, ss store.Store) { + // Create a group + g1 := &model.Group{ + Name: model.NewId(), + DisplayName: model.NewId(), + Description: model.NewId(), + Source: model.GroupSourceLdap, + RemoteId: model.NewId(), + } + res1 := <-ss.Group().Create(g1) + assert.Nil(t, res1.Err) + group := res1.Data.(*model.Group) + + // Create Team + t1 := &model.Team{ + DisplayName: "Name", + Description: "Some description", + CompanyName: "Some company name", + AllowOpenInvite: false, + InviteId: "inviteid0", + Name: "z-z-" + model.NewId() + "a", + Email: "success+" + model.NewId() + "@simulator.amazonses.com", + Type: model.TEAM_OPEN, + } + res2 := <-ss.Team().Save(t1) + assert.Nil(t, res2.Err) + team := res2.Data.(*model.Team) + + // Create GroupSyncable + gt1 := &model.GroupSyncable{ + GroupId: group.Id, + CanLeave: true, + AutoAdd: false, + SyncableId: string(team.Id), + Type: model.GroupSyncableTypeTeam, + } + res3 := <-ss.Group().CreateGroupSyncable(gt1) + assert.Nil(t, res3.Err) + groupTeam := res3.Data.(*model.GroupSyncable) + + // Get GroupSyncable + res4 := <-ss.Group().GetGroupSyncable(groupTeam.GroupId, groupTeam.SyncableId, model.GroupSyncableTypeTeam) + assert.Nil(t, res4.Err) + dgt := res4.Data.(*model.GroupSyncable) + assert.Equal(t, gt1.GroupId, dgt.GroupId) + assert.Equal(t, gt1.SyncableId, dgt.SyncableId) + // assert.Equal(t, gt1.CanLeave, dgt.CanLeave) // TODO: Re-add this test in phase 2 of LDAP groups sync. + assert.Equal(t, gt1.AutoAdd, dgt.AutoAdd) + assert.NotZero(t, gt1.CreateAt) + assert.NotZero(t, gt1.UpdateAt) + assert.Zero(t, gt1.DeleteAt) +} + +func testGetAllGroupSyncablesByGroup(t *testing.T, ss store.Store) { + numGroupSyncables := 10 + + // Create group + g := &model.Group{ + Name: model.NewId(), + DisplayName: model.NewId(), + Description: model.NewId(), + Source: model.GroupSourceLdap, + RemoteId: model.NewId(), + } + res1 := <-ss.Group().Create(g) + assert.Nil(t, res1.Err) + group := res1.Data.(*model.Group) + + groupTeams := []*model.GroupSyncable{} + + // Create groupTeams + for i := 0; i < numGroupSyncables; i++ { + // Create Team + t1 := &model.Team{ + DisplayName: "Name", + Description: "Some description", + CompanyName: "Some company name", + AllowOpenInvite: false, + InviteId: "inviteid0", + Name: "z-z-" + model.NewId() + "a", + Email: "success+" + model.NewId() + "@simulator.amazonses.com", + Type: model.TEAM_OPEN, + } + res2 := <-ss.Team().Save(t1) + assert.Nil(t, res2.Err) + team := res2.Data.(*model.Team) + + // create groupteam + res3 := <-ss.Group().CreateGroupSyncable(&model.GroupSyncable{ + GroupId: group.Id, + CanLeave: true, + SyncableId: string(team.Id), + Type: model.GroupSyncableTypeTeam, + }) + assert.Nil(t, res3.Err) + groupTeam := res3.Data.(*model.GroupSyncable) + groupTeams = append(groupTeams, groupTeam) + } + + // Returns all the group teams + res4 := <-ss.Group().GetAllGroupSyncablesByGroupId(group.Id, model.GroupSyncableTypeTeam) + d1 := res4.Data.([]*model.GroupSyncable) + assert.Condition(t, func() bool { return len(d1) >= numGroupSyncables }) + for _, expectedGroupTeam := range groupTeams { + present := false + for _, dbGroupTeam := range d1 { + if dbGroupTeam.GroupId == expectedGroupTeam.GroupId && dbGroupTeam.SyncableId == expectedGroupTeam.SyncableId { + present = true + break + } + } + assert.True(t, present) + } +} + +func testUpdateGroupSyncable(t *testing.T, ss store.Store) { + // Create Group + g1 := &model.Group{ + Name: model.NewId(), + DisplayName: model.NewId(), + Source: model.GroupSourceLdap, + RemoteId: model.NewId(), + } + res4 := <-ss.Group().Create(g1) + assert.Nil(t, res4.Err) + group := res4.Data.(*model.Group) + + // Create Team + t1 := &model.Team{ + DisplayName: "Name", + Description: "Some description", + CompanyName: "Some company name", + AllowOpenInvite: false, + InviteId: "inviteid0", + Name: "z-z-" + model.NewId() + "a", + Email: "success+" + model.NewId() + "@simulator.amazonses.com", + Type: model.TEAM_OPEN, + } + res5 := <-ss.Team().Save(t1) + assert.Nil(t, res5.Err) + team := res5.Data.(*model.Team) + + // New GroupSyncable, happy path + gt1 := &model.GroupSyncable{ + GroupId: group.Id, + CanLeave: true, + AutoAdd: false, + SyncableId: string(team.Id), + Type: model.GroupSyncableTypeTeam, + } + res6 := <-ss.Group().CreateGroupSyncable(gt1) + assert.Nil(t, res6.Err) + d1 := res6.Data.(*model.GroupSyncable) + + // Update existing group team + gt1.CanLeave = false + gt1.AutoAdd = true + res7 := <-ss.Group().UpdateGroupSyncable(gt1) + assert.Nil(t, res7.Err) + d2 := res7.Data.(*model.GroupSyncable) + assert.False(t, d2.CanLeave) + assert.True(t, d2.AutoAdd) + + // TODO: Add this validation check test in phase 2 of LDAP groups sync. + // Update to invalid state + // gt1.AutoAdd = false + // gt1.CanLeave = false + // res8 := <-ss.Group().UpdateGroupSyncable(gt1) + // assert.Equal(t, res8.Err.Id, "model.group_syncable.invalid_state") + + // Non-existent Group + gt2 := &model.GroupSyncable{ + GroupId: model.NewId(), + CanLeave: true, + AutoAdd: false, + SyncableId: string(team.Id), + Type: model.GroupSyncableTypeTeam, + } + res9 := <-ss.Group().UpdateGroupSyncable(gt2) + assert.Equal(t, res9.Err.Id, "store.sql_group.no_rows") + + // Non-existent Team + gt3 := &model.GroupSyncable{ + GroupId: group.Id, + CanLeave: true, + AutoAdd: false, + SyncableId: string(model.NewId()), + Type: model.GroupSyncableTypeTeam, + } + res10 := <-ss.Group().UpdateGroupSyncable(gt3) + assert.Equal(t, res10.Err.Id, "store.sql_group.no_rows") + + // Cannot update CreateAt or DeleteAt + origCreateAt := d1.CreateAt + d1.CreateAt = model.GetMillis() + d1.AutoAdd = true + d1.CanLeave = true + res11 := <-ss.Group().UpdateGroupSyncable(d1) + assert.Nil(t, res11.Err) + d3 := res11.Data.(*model.GroupSyncable) + assert.Equal(t, origCreateAt, d3.CreateAt) + + // Cannot update DeleteAt to arbitrary value + d1.DeleteAt = 1 + res12 := <-ss.Group().UpdateGroupSyncable(d1) + assert.Equal(t, "model.group.delete_at.app_error", res12.Err.Id) + + // Can update DeleteAt to 0 + d1.DeleteAt = 0 + res13 := <-ss.Group().UpdateGroupSyncable(d1) + assert.Nil(t, res13.Err) + d4 := res13.Data.(*model.GroupSyncable) + assert.Zero(t, d4.DeleteAt) +} + +func testDeleteGroupSyncable(t *testing.T, ss store.Store) { + // Create Group + g1 := &model.Group{ + Name: model.NewId(), + DisplayName: model.NewId(), + Source: model.GroupSourceLdap, + RemoteId: model.NewId(), + } + res1 := <-ss.Group().Create(g1) + assert.Nil(t, res1.Err) + group := res1.Data.(*model.Group) + + // Create Team + t1 := &model.Team{ + DisplayName: "Name", + Description: "Some description", + CompanyName: "Some company name", + AllowOpenInvite: false, + InviteId: "inviteid0", + Name: "z-z-" + model.NewId() + "a", + Email: "success+" + model.NewId() + "@simulator.amazonses.com", + Type: model.TEAM_OPEN, + } + res2 := <-ss.Team().Save(t1) + assert.Nil(t, res2.Err) + team := res2.Data.(*model.Team) + + // Create GroupSyncable + gt1 := &model.GroupSyncable{ + GroupId: group.Id, + CanLeave: true, + AutoAdd: false, + SyncableId: string(team.Id), + Type: model.GroupSyncableTypeTeam, + } + res7 := <-ss.Group().CreateGroupSyncable(gt1) + assert.Nil(t, res7.Err) + groupTeam := res7.Data.(*model.GroupSyncable) + + // Non-existent Group + res5 := <-ss.Group().DeleteGroupSyncable(model.NewId(), groupTeam.SyncableId, model.GroupSyncableTypeTeam) + assert.Equal(t, res5.Err.Id, "store.sql_group.no_rows") + + // Non-existent Team + res6 := <-ss.Group().DeleteGroupSyncable(groupTeam.GroupId, string(model.NewId()), model.GroupSyncableTypeTeam) + assert.Equal(t, res6.Err.Id, "store.sql_group.no_rows") + + // Happy path... + res8 := <-ss.Group().DeleteGroupSyncable(groupTeam.GroupId, groupTeam.SyncableId, model.GroupSyncableTypeTeam) + assert.Nil(t, res8.Err) + d1 := res8.Data.(*model.GroupSyncable) + assert.NotZero(t, d1.DeleteAt) + assert.Equal(t, d1.GroupId, groupTeam.GroupId) + assert.Equal(t, d1.SyncableId, groupTeam.SyncableId) + // assert.Equal(t, d1.CanLeave, groupTeam.CanLeave) // TODO: Re-add this test in phase 2 of LDAP groups sync. + assert.Equal(t, d1.AutoAdd, groupTeam.AutoAdd) + assert.Equal(t, d1.CreateAt, groupTeam.CreateAt) + assert.Condition(t, func() bool { return d1.UpdateAt > groupTeam.UpdateAt }) + + // Record already deleted + res9 := <-ss.Group().DeleteGroupSyncable(d1.GroupId, d1.SyncableId, d1.Type) + assert.NotNil(t, res9.Err) + assert.Equal(t, res9.Err.Id, "store.sql_group.group_syncable_already_deleted") +} + +func testPendingAutoAddTeamMembers(t *testing.T, ss store.Store) { + // Create Group + res := <-ss.Group().Create(&model.Group{ + Name: model.NewId(), + DisplayName: "PendingAutoAddTeamMembers Test Group", + RemoteId: model.NewId(), + Source: model.GroupSourceLdap, + }) + assert.Nil(t, res.Err) + group := res.Data.(*model.Group) + + // Create User + user := &model.User{ + Email: MakeEmail(), + Username: model.NewId(), + } + res = <-ss.User().Save(user) + assert.Nil(t, res.Err) + user = res.Data.(*model.User) + + // Create GroupMember + res = <-ss.Group().CreateOrRestoreMember(group.Id, user.Id) + assert.Nil(t, res.Err) + + // Create Team + team := &model.Team{ + DisplayName: "Name", + Description: "Some description", + CompanyName: "Some company name", + AllowOpenInvite: false, + InviteId: "inviteid0", + Name: "z-z-" + model.NewId() + "a", + Email: "success+" + model.NewId() + "@simulator.amazonses.com", + Type: model.TEAM_OPEN, + } + res = <-ss.Team().Save(team) + assert.Nil(t, res.Err) + team = res.Data.(*model.Team) + + // Create GroupTeam + res = <-ss.Group().CreateGroupSyncable(&model.GroupSyncable{ + AutoAdd: true, + CanLeave: true, + SyncableId: team.Id, + Type: model.GroupSyncableTypeTeam, + GroupId: group.Id, + }) + assert.Nil(t, res.Err) + syncable := res.Data.(*model.GroupSyncable) + + // Time before syncable was created + res = <-ss.Group().PendingAutoAddTeamMembers(syncable.CreateAt - 1) + assert.Nil(t, res.Err) + userTeamIDs := res.Data.([]*model.UserTeamIDPair) + assert.Len(t, userTeamIDs, 1) + assert.Equal(t, user.Id, userTeamIDs[0].UserID) + assert.Equal(t, team.Id, userTeamIDs[0].TeamID) + + // Time after syncable was created + res = <-ss.Group().PendingAutoAddTeamMembers(syncable.CreateAt + 1) + assert.Nil(t, res.Err) + assert.Len(t, res.Data, 0) + + // Delete and restore GroupMember should return result + res = <-ss.Group().DeleteMember(group.Id, user.Id) + assert.Nil(t, res.Err) + res = <-ss.Group().CreateOrRestoreMember(group.Id, user.Id) + assert.Nil(t, res.Err) + res = <-ss.Group().PendingAutoAddTeamMembers(syncable.CreateAt + 1) + assert.Nil(t, res.Err) + assert.Len(t, res.Data, 1) + + pristineSyncable := *syncable + + syncable.CanLeave = false + res = <-ss.Group().UpdateGroupSyncable(syncable) + assert.Nil(t, res.Err) + + // Time before syncable was updated + res = <-ss.Group().PendingAutoAddTeamMembers(syncable.UpdateAt - 1) + assert.Nil(t, res.Err) + userTeamIDs = res.Data.([]*model.UserTeamIDPair) + assert.Len(t, userTeamIDs, 1) + assert.Equal(t, user.Id, userTeamIDs[0].UserID) + assert.Equal(t, team.Id, userTeamIDs[0].TeamID) + + // Time after syncable was updated + res = <-ss.Group().PendingAutoAddTeamMembers(syncable.UpdateAt + 1) + assert.Nil(t, res.Err) + assert.Len(t, res.Data, 0) + + // Only includes if auto-add + syncable.AutoAdd = false + syncable.CanLeave = true // have to update this or the model isn't valid + res = <-ss.Group().UpdateGroupSyncable(syncable) + assert.Nil(t, res.Err) + res = <-ss.Group().PendingAutoAddTeamMembers(0) + assert.Nil(t, res.Err) + assert.Len(t, res.Data, 0) + + // reset state of syncable and verify + res = <-ss.Group().UpdateGroupSyncable(&pristineSyncable) + assert.Nil(t, res.Err) + res = <-ss.Group().PendingAutoAddTeamMembers(0) + assert.Nil(t, res.Err) + assert.Len(t, res.Data, 1) + + // No result if Group deleted + res = <-ss.Group().Delete(group.Id) + assert.Nil(t, res.Err) + res = <-ss.Group().PendingAutoAddTeamMembers(0) + assert.Nil(t, res.Err) + assert.Len(t, res.Data, 0) + + // reset state of group and verify + group.DeleteAt = 0 + res = <-ss.Group().Update(group) + res = <-ss.Group().PendingAutoAddTeamMembers(0) + assert.Nil(t, res.Err) + assert.Len(t, res.Data, 1) + + // No result if Team deleted + team.DeleteAt = model.GetMillis() + res = <-ss.Team().Update(team) + assert.Nil(t, res.Err) + res = <-ss.Group().PendingAutoAddTeamMembers(0) + assert.Nil(t, res.Err) + assert.Len(t, res.Data, 0) + + // reset state of team and verify + team.DeleteAt = 0 + res = <-ss.Team().Update(team) + assert.Nil(t, res.Err) + res = <-ss.Group().PendingAutoAddTeamMembers(0) + assert.Nil(t, res.Err) + assert.Len(t, res.Data, 1) + + // No result if GroupTeam deleted + res = <-ss.Group().DeleteGroupSyncable(group.Id, team.Id, model.GroupSyncableTypeTeam) + assert.Nil(t, res.Err) + res = <-ss.Group().PendingAutoAddTeamMembers(0) + assert.Nil(t, res.Err) + assert.Len(t, res.Data, 0) + + // reset GroupTeam and verify + res = <-ss.Group().UpdateGroupSyncable(&pristineSyncable) + assert.Nil(t, res.Err) + res = <-ss.Group().PendingAutoAddTeamMembers(0) + assert.Nil(t, res.Err) + assert.Len(t, res.Data, 1) + + // No result if GroupMember deleted + res = <-ss.Group().DeleteMember(group.Id, user.Id) + assert.Nil(t, res.Err) + res = <-ss.Group().PendingAutoAddTeamMembers(0) + assert.Nil(t, res.Err) + assert.Len(t, res.Data, 0) + + // restore group member and verify + res = <-ss.Group().CreateOrRestoreMember(group.Id, user.Id) + res = <-ss.Group().PendingAutoAddTeamMembers(0) + assert.Nil(t, res.Err) + assert.Len(t, res.Data, 1) + + // adding team membership stops returning result + res = <-ss.Team().SaveMember(&model.TeamMember{ + TeamId: team.Id, + UserId: user.Id, + }, 999) + assert.Nil(t, res.Err) + res = <-ss.Group().PendingAutoAddTeamMembers(0) + assert.Nil(t, res.Err) + assert.Len(t, res.Data, 0) +} + +func testPendingAutoAddChannelMembers(t *testing.T, ss store.Store) { + // Create Group + res := <-ss.Group().Create(&model.Group{ + Name: model.NewId(), + DisplayName: "PendingAutoAddChannelMembers Test Group", + RemoteId: model.NewId(), + Source: model.GroupSourceLdap, + }) + assert.Nil(t, res.Err) + group := res.Data.(*model.Group) + + // Create User + user := &model.User{ + Email: MakeEmail(), + Username: model.NewId(), + } + res = <-ss.User().Save(user) + assert.Nil(t, res.Err) + user = res.Data.(*model.User) + + // Create GroupMember + res = <-ss.Group().CreateOrRestoreMember(group.Id, user.Id) + assert.Nil(t, res.Err) + + // Create Channel + channel := &model.Channel{ + TeamId: model.NewId(), + DisplayName: "A Name", + Name: model.NewId(), + Type: model.CHANNEL_OPEN, // Query does not look at type so this shouldn't matter. + } + res = <-ss.Channel().Save(channel, 9999) + assert.Nil(t, res.Err) + channel = res.Data.(*model.Channel) + + // Create GroupChannel + res = <-ss.Group().CreateGroupSyncable(&model.GroupSyncable{ + AutoAdd: true, + CanLeave: true, + SyncableId: channel.Id, + Type: model.GroupSyncableTypeChannel, + GroupId: group.Id, + }) + assert.Nil(t, res.Err) + syncable := res.Data.(*model.GroupSyncable) + + // Time before syncable was created + res = <-ss.Group().PendingAutoAddChannelMembers(syncable.CreateAt - 1) + assert.Nil(t, res.Err) + userChannelIDs := res.Data.([]*model.UserChannelIDPair) + assert.Len(t, userChannelIDs, 1) + assert.Equal(t, user.Id, userChannelIDs[0].UserID) + assert.Equal(t, channel.Id, userChannelIDs[0].ChannelID) + + // Time after syncable was created + res = <-ss.Group().PendingAutoAddChannelMembers(syncable.CreateAt + 1) + assert.Nil(t, res.Err) + assert.Len(t, res.Data, 0) + + // Delete and restore GroupMember should return result + res = <-ss.Group().DeleteMember(group.Id, user.Id) + assert.Nil(t, res.Err) + res = <-ss.Group().CreateOrRestoreMember(group.Id, user.Id) + assert.Nil(t, res.Err) + res = <-ss.Group().PendingAutoAddChannelMembers(syncable.CreateAt + 1) + assert.Nil(t, res.Err) + assert.Len(t, res.Data, 1) + + pristineSyncable := *syncable + + syncable.CanLeave = false + res = <-ss.Group().UpdateGroupSyncable(syncable) + assert.Nil(t, res.Err) + + // Time before syncable was updated + res = <-ss.Group().PendingAutoAddChannelMembers(syncable.UpdateAt - 1) + assert.Nil(t, res.Err) + userChannelIDs = res.Data.([]*model.UserChannelIDPair) + assert.Len(t, userChannelIDs, 1) + assert.Equal(t, user.Id, userChannelIDs[0].UserID) + assert.Equal(t, channel.Id, userChannelIDs[0].ChannelID) + + // Time after syncable was updated + res = <-ss.Group().PendingAutoAddChannelMembers(syncable.UpdateAt + 1) + assert.Nil(t, res.Err) + assert.Len(t, res.Data, 0) + + // Only includes if auto-add + syncable.AutoAdd = false + syncable.CanLeave = true // have to update this or the model isn't valid + res = <-ss.Group().UpdateGroupSyncable(syncable) + assert.Nil(t, res.Err) + res = <-ss.Group().PendingAutoAddChannelMembers(0) + assert.Nil(t, res.Err) + assert.Len(t, res.Data, 0) + + // reset state of syncable and verify + res = <-ss.Group().UpdateGroupSyncable(&pristineSyncable) + assert.Nil(t, res.Err) + res = <-ss.Group().PendingAutoAddChannelMembers(0) + assert.Nil(t, res.Err) + assert.Len(t, res.Data, 1) + + // No result if Group deleted + res = <-ss.Group().Delete(group.Id) + assert.Nil(t, res.Err) + res = <-ss.Group().PendingAutoAddChannelMembers(0) + assert.Nil(t, res.Err) + assert.Len(t, res.Data, 0) + + // reset state of group and verify + group.DeleteAt = 0 + res = <-ss.Group().Update(group) + res = <-ss.Group().PendingAutoAddChannelMembers(0) + assert.Nil(t, res.Err) + assert.Len(t, res.Data, 1) + + // No result if Channel deleted + res = <-ss.Channel().Delete(channel.Id, model.GetMillis()) + assert.Nil(t, res.Err) + res = <-ss.Group().PendingAutoAddChannelMembers(0) + assert.Nil(t, res.Err) + assert.Len(t, res.Data, 0) + + // reset state of channel and verify + channel.DeleteAt = 0 + res = <-ss.Channel().Update(channel) + assert.Nil(t, res.Err) + res = <-ss.Group().PendingAutoAddChannelMembers(0) + assert.Nil(t, res.Err) + assert.Len(t, res.Data, 1) + + // No result if GroupChannel deleted + res = <-ss.Group().DeleteGroupSyncable(group.Id, channel.Id, model.GroupSyncableTypeChannel) + assert.Nil(t, res.Err) + res = <-ss.Group().PendingAutoAddChannelMembers(0) + assert.Nil(t, res.Err) + assert.Len(t, res.Data, 0) + + // reset GroupChannel and verify + res = <-ss.Group().UpdateGroupSyncable(&pristineSyncable) + assert.Nil(t, res.Err) + res = <-ss.Group().PendingAutoAddChannelMembers(0) + assert.Nil(t, res.Err) + assert.Len(t, res.Data, 1) + + // No result if GroupMember deleted + res = <-ss.Group().DeleteMember(group.Id, user.Id) + assert.Nil(t, res.Err) + res = <-ss.Group().PendingAutoAddChannelMembers(0) + assert.Nil(t, res.Err) + assert.Len(t, res.Data, 0) + + // restore group member and verify + res = <-ss.Group().CreateOrRestoreMember(group.Id, user.Id) + assert.Nil(t, res.Err) + res = <-ss.Group().PendingAutoAddChannelMembers(0) + assert.Nil(t, res.Err) + assert.Len(t, res.Data, 1) + + // Adding Channel (ChannelMemberHistory) should stop returning result + res = <-ss.ChannelMemberHistory().LogJoinEvent(user.Id, channel.Id, model.GetMillis()) + assert.Nil(t, res.Err) + res = <-ss.Group().PendingAutoAddChannelMembers(0) + assert.Nil(t, res.Err) + assert.Len(t, res.Data, 0) + + // Leaving Channel (ChannelMemberHistory) should still not return result + res = <-ss.ChannelMemberHistory().LogLeaveEvent(user.Id, channel.Id, model.GetMillis()) + assert.Nil(t, res.Err) + res = <-ss.Group().PendingAutoAddChannelMembers(0) + assert.Nil(t, res.Err) + assert.Len(t, res.Data, 0) + + // Purging ChannelMemberHistory re-returns the result + res = <-ss.ChannelMemberHistory().PermanentDeleteBatch(model.GetMillis()+1, 100) + assert.Nil(t, res.Err) + res = <-ss.Group().PendingAutoAddChannelMembers(0) + assert.Nil(t, res.Err) + assert.Len(t, res.Data, 1) +} diff --git a/store/storetest/mocks/ChannelStore.go b/store/storetest/mocks/ChannelStore.go index b12d2a0009..a0094301be 100644 --- a/store/storetest/mocks/ChannelStore.go +++ b/store/storetest/mocks/ChannelStore.go @@ -194,6 +194,22 @@ func (_m *ChannelStore) GetAllChannelMembersNotifyPropsForChannel(channelId stri return r0 } +// GetAllChannels provides a mock function with given fields: page, perPage, includeDeleted +func (_m *ChannelStore) GetAllChannels(page int, perPage int, includeDeleted bool) store.StoreChannel { + ret := _m.Called(page, perPage, includeDeleted) + + var r0 store.StoreChannel + if rf, ok := ret.Get(0).(func(int, int, bool) store.StoreChannel); ok { + r0 = rf(page, perPage, includeDeleted) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(store.StoreChannel) + } + } + + return r0 +} + // GetAllChannelsForExportAfter provides a mock function with given fields: limit, afterId func (_m *ChannelStore) GetAllChannelsForExportAfter(limit int, afterId string) store.StoreChannel { ret := _m.Called(limit, afterId) @@ -885,6 +901,22 @@ func (_m *ChannelStore) SaveMember(member *model.ChannelMember) store.StoreChann return r0 } +// SearchAllChannels provides a mock function with given fields: term, includeDeleted +func (_m *ChannelStore) SearchAllChannels(term string, includeDeleted bool) store.StoreChannel { + ret := _m.Called(term, includeDeleted) + + var r0 store.StoreChannel + if rf, ok := ret.Get(0).(func(string, bool) store.StoreChannel); ok { + r0 = rf(term, includeDeleted) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(store.StoreChannel) + } + } + + return r0 +} + // SearchInTeam provides a mock function with given fields: teamId, term, includeDeleted func (_m *ChannelStore) SearchInTeam(teamId string, term string, includeDeleted bool) store.StoreChannel { ret := _m.Called(teamId, term, includeDeleted) diff --git a/store/storetest/mocks/GroupStore.go b/store/storetest/mocks/GroupStore.go new file mode 100644 index 0000000000..36f8f8787b --- /dev/null +++ b/store/storetest/mocks/GroupStore.go @@ -0,0 +1,302 @@ +// Code generated by mockery v1.0.0. DO NOT EDIT. + +// Regenerate this file using `make store-mocks`. + +package mocks + +import mock "github.com/stretchr/testify/mock" +import model "github.com/mattermost/mattermost-server/model" +import store "github.com/mattermost/mattermost-server/store" + +// GroupStore is an autogenerated mock type for the GroupStore type +type GroupStore struct { + mock.Mock +} + +// Create provides a mock function with given fields: group +func (_m *GroupStore) Create(group *model.Group) store.StoreChannel { + ret := _m.Called(group) + + var r0 store.StoreChannel + if rf, ok := ret.Get(0).(func(*model.Group) store.StoreChannel); ok { + r0 = rf(group) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(store.StoreChannel) + } + } + + return r0 +} + +// CreateGroupSyncable provides a mock function with given fields: groupSyncable +func (_m *GroupStore) CreateGroupSyncable(groupSyncable *model.GroupSyncable) store.StoreChannel { + ret := _m.Called(groupSyncable) + + var r0 store.StoreChannel + if rf, ok := ret.Get(0).(func(*model.GroupSyncable) store.StoreChannel); ok { + r0 = rf(groupSyncable) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(store.StoreChannel) + } + } + + return r0 +} + +// CreateOrRestoreMember provides a mock function with given fields: groupID, userID +func (_m *GroupStore) CreateOrRestoreMember(groupID string, userID string) store.StoreChannel { + ret := _m.Called(groupID, userID) + + var r0 store.StoreChannel + if rf, ok := ret.Get(0).(func(string, string) store.StoreChannel); ok { + r0 = rf(groupID, userID) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(store.StoreChannel) + } + } + + return r0 +} + +// Delete provides a mock function with given fields: groupID +func (_m *GroupStore) Delete(groupID string) store.StoreChannel { + ret := _m.Called(groupID) + + var r0 store.StoreChannel + if rf, ok := ret.Get(0).(func(string) store.StoreChannel); ok { + r0 = rf(groupID) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(store.StoreChannel) + } + } + + return r0 +} + +// DeleteGroupSyncable provides a mock function with given fields: groupID, syncableID, syncableType +func (_m *GroupStore) DeleteGroupSyncable(groupID string, syncableID string, syncableType model.GroupSyncableType) store.StoreChannel { + ret := _m.Called(groupID, syncableID, syncableType) + + var r0 store.StoreChannel + if rf, ok := ret.Get(0).(func(string, string, model.GroupSyncableType) store.StoreChannel); ok { + r0 = rf(groupID, syncableID, syncableType) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(store.StoreChannel) + } + } + + return r0 +} + +// DeleteMember provides a mock function with given fields: groupID, userID +func (_m *GroupStore) DeleteMember(groupID string, userID string) store.StoreChannel { + ret := _m.Called(groupID, userID) + + var r0 store.StoreChannel + if rf, ok := ret.Get(0).(func(string, string) store.StoreChannel); ok { + r0 = rf(groupID, userID) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(store.StoreChannel) + } + } + + return r0 +} + +// Get provides a mock function with given fields: groupID +func (_m *GroupStore) Get(groupID string) store.StoreChannel { + ret := _m.Called(groupID) + + var r0 store.StoreChannel + if rf, ok := ret.Get(0).(func(string) store.StoreChannel); ok { + r0 = rf(groupID) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(store.StoreChannel) + } + } + + return r0 +} + +// GetAllBySource provides a mock function with given fields: groupSource +func (_m *GroupStore) GetAllBySource(groupSource model.GroupSource) store.StoreChannel { + ret := _m.Called(groupSource) + + var r0 store.StoreChannel + if rf, ok := ret.Get(0).(func(model.GroupSource) store.StoreChannel); ok { + r0 = rf(groupSource) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(store.StoreChannel) + } + } + + return r0 +} + +// GetAllGroupSyncablesByGroupId provides a mock function with given fields: groupID, syncableType +func (_m *GroupStore) GetAllGroupSyncablesByGroupId(groupID string, syncableType model.GroupSyncableType) store.StoreChannel { + ret := _m.Called(groupID, syncableType) + + var r0 store.StoreChannel + if rf, ok := ret.Get(0).(func(string, model.GroupSyncableType) store.StoreChannel); ok { + r0 = rf(groupID, syncableType) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(store.StoreChannel) + } + } + + return r0 +} + +// GetByRemoteID provides a mock function with given fields: remoteID, groupSource +func (_m *GroupStore) GetByRemoteID(remoteID string, groupSource model.GroupSource) store.StoreChannel { + ret := _m.Called(remoteID, groupSource) + + var r0 store.StoreChannel + if rf, ok := ret.Get(0).(func(string, model.GroupSource) store.StoreChannel); ok { + r0 = rf(remoteID, groupSource) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(store.StoreChannel) + } + } + + return r0 +} + +// GetGroupSyncable provides a mock function with given fields: groupID, syncableID, syncableType +func (_m *GroupStore) GetGroupSyncable(groupID string, syncableID string, syncableType model.GroupSyncableType) store.StoreChannel { + ret := _m.Called(groupID, syncableID, syncableType) + + var r0 store.StoreChannel + if rf, ok := ret.Get(0).(func(string, string, model.GroupSyncableType) store.StoreChannel); ok { + r0 = rf(groupID, syncableID, syncableType) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(store.StoreChannel) + } + } + + return r0 +} + +// GetMemberCount provides a mock function with given fields: groupID +func (_m *GroupStore) GetMemberCount(groupID string) store.StoreChannel { + ret := _m.Called(groupID) + + var r0 store.StoreChannel + if rf, ok := ret.Get(0).(func(string) store.StoreChannel); ok { + r0 = rf(groupID) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(store.StoreChannel) + } + } + + return r0 +} + +// GetMemberUsers provides a mock function with given fields: groupID +func (_m *GroupStore) GetMemberUsers(groupID string) store.StoreChannel { + ret := _m.Called(groupID) + + var r0 store.StoreChannel + if rf, ok := ret.Get(0).(func(string) store.StoreChannel); ok { + r0 = rf(groupID) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(store.StoreChannel) + } + } + + return r0 +} + +// GetMemberUsersPage provides a mock function with given fields: groupID, offset, limit +func (_m *GroupStore) GetMemberUsersPage(groupID string, offset int, limit int) store.StoreChannel { + ret := _m.Called(groupID, offset, limit) + + var r0 store.StoreChannel + if rf, ok := ret.Get(0).(func(string, int, int) store.StoreChannel); ok { + r0 = rf(groupID, offset, limit) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(store.StoreChannel) + } + } + + return r0 +} + +// PendingAutoAddChannelMembers provides a mock function with given fields: minGroupMembersCreateAt +func (_m *GroupStore) PendingAutoAddChannelMembers(minGroupMembersCreateAt int64) store.StoreChannel { + ret := _m.Called(minGroupMembersCreateAt) + + var r0 store.StoreChannel + if rf, ok := ret.Get(0).(func(int64) store.StoreChannel); ok { + r0 = rf(minGroupMembersCreateAt) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(store.StoreChannel) + } + } + + return r0 +} + +// PendingAutoAddTeamMembers provides a mock function with given fields: minGroupMembersCreateAt +func (_m *GroupStore) PendingAutoAddTeamMembers(minGroupMembersCreateAt int64) store.StoreChannel { + ret := _m.Called(minGroupMembersCreateAt) + + var r0 store.StoreChannel + if rf, ok := ret.Get(0).(func(int64) store.StoreChannel); ok { + r0 = rf(minGroupMembersCreateAt) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(store.StoreChannel) + } + } + + return r0 +} + +// Update provides a mock function with given fields: group +func (_m *GroupStore) Update(group *model.Group) store.StoreChannel { + ret := _m.Called(group) + + var r0 store.StoreChannel + if rf, ok := ret.Get(0).(func(*model.Group) store.StoreChannel); ok { + r0 = rf(group) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(store.StoreChannel) + } + } + + return r0 +} + +// UpdateGroupSyncable provides a mock function with given fields: groupSyncable +func (_m *GroupStore) UpdateGroupSyncable(groupSyncable *model.GroupSyncable) store.StoreChannel { + ret := _m.Called(groupSyncable) + + var r0 store.StoreChannel + if rf, ok := ret.Get(0).(func(*model.GroupSyncable) store.StoreChannel); ok { + r0 = rf(groupSyncable) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(store.StoreChannel) + } + } + + return r0 +} diff --git a/store/storetest/mocks/LayeredStoreDatabaseLayer.go b/store/storetest/mocks/LayeredStoreDatabaseLayer.go index eb0881dd85..8234c0594d 100644 --- a/store/storetest/mocks/LayeredStoreDatabaseLayer.go +++ b/store/storetest/mocks/LayeredStoreDatabaseLayer.go @@ -168,6 +168,390 @@ func (_m *LayeredStoreDatabaseLayer) FileInfo() store.FileInfoStore { return r0 } +// Group provides a mock function with given fields: +func (_m *LayeredStoreDatabaseLayer) Group() store.GroupStore { + ret := _m.Called() + + var r0 store.GroupStore + if rf, ok := ret.Get(0).(func() store.GroupStore); ok { + r0 = rf() + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(store.GroupStore) + } + } + + return r0 +} + +// GroupCreate provides a mock function with given fields: ctx, group, hints +func (_m *LayeredStoreDatabaseLayer) GroupCreate(ctx context.Context, group *model.Group, hints ...store.LayeredStoreHint) *store.LayeredStoreSupplierResult { + _va := make([]interface{}, len(hints)) + for _i := range hints { + _va[_i] = hints[_i] + } + var _ca []interface{} + _ca = append(_ca, ctx, group) + _ca = append(_ca, _va...) + ret := _m.Called(_ca...) + + var r0 *store.LayeredStoreSupplierResult + if rf, ok := ret.Get(0).(func(context.Context, *model.Group, ...store.LayeredStoreHint) *store.LayeredStoreSupplierResult); ok { + r0 = rf(ctx, group, hints...) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*store.LayeredStoreSupplierResult) + } + } + + return r0 +} + +// GroupCreateGroupSyncable provides a mock function with given fields: ctx, groupSyncable, hints +func (_m *LayeredStoreDatabaseLayer) GroupCreateGroupSyncable(ctx context.Context, groupSyncable *model.GroupSyncable, hints ...store.LayeredStoreHint) *store.LayeredStoreSupplierResult { + _va := make([]interface{}, len(hints)) + for _i := range hints { + _va[_i] = hints[_i] + } + var _ca []interface{} + _ca = append(_ca, ctx, groupSyncable) + _ca = append(_ca, _va...) + ret := _m.Called(_ca...) + + var r0 *store.LayeredStoreSupplierResult + if rf, ok := ret.Get(0).(func(context.Context, *model.GroupSyncable, ...store.LayeredStoreHint) *store.LayeredStoreSupplierResult); ok { + r0 = rf(ctx, groupSyncable, hints...) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*store.LayeredStoreSupplierResult) + } + } + + return r0 +} + +// GroupCreateOrRestoreMember provides a mock function with given fields: ctx, groupID, userID, hints +func (_m *LayeredStoreDatabaseLayer) GroupCreateOrRestoreMember(ctx context.Context, groupID string, userID string, hints ...store.LayeredStoreHint) *store.LayeredStoreSupplierResult { + _va := make([]interface{}, len(hints)) + for _i := range hints { + _va[_i] = hints[_i] + } + var _ca []interface{} + _ca = append(_ca, ctx, groupID, userID) + _ca = append(_ca, _va...) + ret := _m.Called(_ca...) + + var r0 *store.LayeredStoreSupplierResult + if rf, ok := ret.Get(0).(func(context.Context, string, string, ...store.LayeredStoreHint) *store.LayeredStoreSupplierResult); ok { + r0 = rf(ctx, groupID, userID, hints...) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*store.LayeredStoreSupplierResult) + } + } + + return r0 +} + +// GroupDelete provides a mock function with given fields: ctx, groupID, hints +func (_m *LayeredStoreDatabaseLayer) GroupDelete(ctx context.Context, groupID string, hints ...store.LayeredStoreHint) *store.LayeredStoreSupplierResult { + _va := make([]interface{}, len(hints)) + for _i := range hints { + _va[_i] = hints[_i] + } + var _ca []interface{} + _ca = append(_ca, ctx, groupID) + _ca = append(_ca, _va...) + ret := _m.Called(_ca...) + + var r0 *store.LayeredStoreSupplierResult + if rf, ok := ret.Get(0).(func(context.Context, string, ...store.LayeredStoreHint) *store.LayeredStoreSupplierResult); ok { + r0 = rf(ctx, groupID, hints...) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*store.LayeredStoreSupplierResult) + } + } + + return r0 +} + +// GroupDeleteGroupSyncable provides a mock function with given fields: ctx, groupID, syncableID, syncableType, hints +func (_m *LayeredStoreDatabaseLayer) GroupDeleteGroupSyncable(ctx context.Context, groupID string, syncableID string, syncableType model.GroupSyncableType, hints ...store.LayeredStoreHint) *store.LayeredStoreSupplierResult { + _va := make([]interface{}, len(hints)) + for _i := range hints { + _va[_i] = hints[_i] + } + var _ca []interface{} + _ca = append(_ca, ctx, groupID, syncableID, syncableType) + _ca = append(_ca, _va...) + ret := _m.Called(_ca...) + + var r0 *store.LayeredStoreSupplierResult + if rf, ok := ret.Get(0).(func(context.Context, string, string, model.GroupSyncableType, ...store.LayeredStoreHint) *store.LayeredStoreSupplierResult); ok { + r0 = rf(ctx, groupID, syncableID, syncableType, hints...) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*store.LayeredStoreSupplierResult) + } + } + + return r0 +} + +// GroupDeleteMember provides a mock function with given fields: ctx, groupID, userID, hints +func (_m *LayeredStoreDatabaseLayer) GroupDeleteMember(ctx context.Context, groupID string, userID string, hints ...store.LayeredStoreHint) *store.LayeredStoreSupplierResult { + _va := make([]interface{}, len(hints)) + for _i := range hints { + _va[_i] = hints[_i] + } + var _ca []interface{} + _ca = append(_ca, ctx, groupID, userID) + _ca = append(_ca, _va...) + ret := _m.Called(_ca...) + + var r0 *store.LayeredStoreSupplierResult + if rf, ok := ret.Get(0).(func(context.Context, string, string, ...store.LayeredStoreHint) *store.LayeredStoreSupplierResult); ok { + r0 = rf(ctx, groupID, userID, hints...) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*store.LayeredStoreSupplierResult) + } + } + + return r0 +} + +// GroupGet provides a mock function with given fields: ctx, groupID, hints +func (_m *LayeredStoreDatabaseLayer) GroupGet(ctx context.Context, groupID string, hints ...store.LayeredStoreHint) *store.LayeredStoreSupplierResult { + _va := make([]interface{}, len(hints)) + for _i := range hints { + _va[_i] = hints[_i] + } + var _ca []interface{} + _ca = append(_ca, ctx, groupID) + _ca = append(_ca, _va...) + ret := _m.Called(_ca...) + + var r0 *store.LayeredStoreSupplierResult + if rf, ok := ret.Get(0).(func(context.Context, string, ...store.LayeredStoreHint) *store.LayeredStoreSupplierResult); ok { + r0 = rf(ctx, groupID, hints...) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*store.LayeredStoreSupplierResult) + } + } + + return r0 +} + +// GroupGetAllBySource provides a mock function with given fields: ctx, groupSource, hints +func (_m *LayeredStoreDatabaseLayer) GroupGetAllBySource(ctx context.Context, groupSource model.GroupSource, hints ...store.LayeredStoreHint) *store.LayeredStoreSupplierResult { + _va := make([]interface{}, len(hints)) + for _i := range hints { + _va[_i] = hints[_i] + } + var _ca []interface{} + _ca = append(_ca, ctx, groupSource) + _ca = append(_ca, _va...) + ret := _m.Called(_ca...) + + var r0 *store.LayeredStoreSupplierResult + if rf, ok := ret.Get(0).(func(context.Context, model.GroupSource, ...store.LayeredStoreHint) *store.LayeredStoreSupplierResult); ok { + r0 = rf(ctx, groupSource, hints...) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*store.LayeredStoreSupplierResult) + } + } + + return r0 +} + +// GroupGetAllGroupSyncablesByGroup provides a mock function with given fields: ctx, groupID, syncableType, hints +func (_m *LayeredStoreDatabaseLayer) GroupGetAllGroupSyncablesByGroup(ctx context.Context, groupID string, syncableType model.GroupSyncableType, hints ...store.LayeredStoreHint) *store.LayeredStoreSupplierResult { + _va := make([]interface{}, len(hints)) + for _i := range hints { + _va[_i] = hints[_i] + } + var _ca []interface{} + _ca = append(_ca, ctx, groupID, syncableType) + _ca = append(_ca, _va...) + ret := _m.Called(_ca...) + + var r0 *store.LayeredStoreSupplierResult + if rf, ok := ret.Get(0).(func(context.Context, string, model.GroupSyncableType, ...store.LayeredStoreHint) *store.LayeredStoreSupplierResult); ok { + r0 = rf(ctx, groupID, syncableType, hints...) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*store.LayeredStoreSupplierResult) + } + } + + return r0 +} + +// GroupGetByRemoteID provides a mock function with given fields: ctx, remoteID, groupSource, hints +func (_m *LayeredStoreDatabaseLayer) GroupGetByRemoteID(ctx context.Context, remoteID string, groupSource model.GroupSource, hints ...store.LayeredStoreHint) *store.LayeredStoreSupplierResult { + _va := make([]interface{}, len(hints)) + for _i := range hints { + _va[_i] = hints[_i] + } + var _ca []interface{} + _ca = append(_ca, ctx, remoteID, groupSource) + _ca = append(_ca, _va...) + ret := _m.Called(_ca...) + + var r0 *store.LayeredStoreSupplierResult + if rf, ok := ret.Get(0).(func(context.Context, string, model.GroupSource, ...store.LayeredStoreHint) *store.LayeredStoreSupplierResult); ok { + r0 = rf(ctx, remoteID, groupSource, hints...) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*store.LayeredStoreSupplierResult) + } + } + + return r0 +} + +// GroupGetGroupSyncable provides a mock function with given fields: ctx, groupID, syncableID, syncableType, hints +func (_m *LayeredStoreDatabaseLayer) GroupGetGroupSyncable(ctx context.Context, groupID string, syncableID string, syncableType model.GroupSyncableType, hints ...store.LayeredStoreHint) *store.LayeredStoreSupplierResult { + _va := make([]interface{}, len(hints)) + for _i := range hints { + _va[_i] = hints[_i] + } + var _ca []interface{} + _ca = append(_ca, ctx, groupID, syncableID, syncableType) + _ca = append(_ca, _va...) + ret := _m.Called(_ca...) + + var r0 *store.LayeredStoreSupplierResult + if rf, ok := ret.Get(0).(func(context.Context, string, string, model.GroupSyncableType, ...store.LayeredStoreHint) *store.LayeredStoreSupplierResult); ok { + r0 = rf(ctx, groupID, syncableID, syncableType, hints...) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*store.LayeredStoreSupplierResult) + } + } + + return r0 +} + +// GroupGetMemberCount provides a mock function with given fields: ctx, groupID, hints +func (_m *LayeredStoreDatabaseLayer) GroupGetMemberCount(ctx context.Context, groupID string, hints ...store.LayeredStoreHint) *store.LayeredStoreSupplierResult { + _va := make([]interface{}, len(hints)) + for _i := range hints { + _va[_i] = hints[_i] + } + var _ca []interface{} + _ca = append(_ca, ctx, groupID) + _ca = append(_ca, _va...) + ret := _m.Called(_ca...) + + var r0 *store.LayeredStoreSupplierResult + if rf, ok := ret.Get(0).(func(context.Context, string, ...store.LayeredStoreHint) *store.LayeredStoreSupplierResult); ok { + r0 = rf(ctx, groupID, hints...) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*store.LayeredStoreSupplierResult) + } + } + + return r0 +} + +// GroupGetMemberUsers provides a mock function with given fields: ctx, groupID, hints +func (_m *LayeredStoreDatabaseLayer) GroupGetMemberUsers(ctx context.Context, groupID string, hints ...store.LayeredStoreHint) *store.LayeredStoreSupplierResult { + _va := make([]interface{}, len(hints)) + for _i := range hints { + _va[_i] = hints[_i] + } + var _ca []interface{} + _ca = append(_ca, ctx, groupID) + _ca = append(_ca, _va...) + ret := _m.Called(_ca...) + + var r0 *store.LayeredStoreSupplierResult + if rf, ok := ret.Get(0).(func(context.Context, string, ...store.LayeredStoreHint) *store.LayeredStoreSupplierResult); ok { + r0 = rf(ctx, groupID, hints...) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*store.LayeredStoreSupplierResult) + } + } + + return r0 +} + +// GroupGetMemberUsersPage provides a mock function with given fields: ctx, groupID, offset, limit, hints +func (_m *LayeredStoreDatabaseLayer) GroupGetMemberUsersPage(ctx context.Context, groupID string, offset int, limit int, hints ...store.LayeredStoreHint) *store.LayeredStoreSupplierResult { + _va := make([]interface{}, len(hints)) + for _i := range hints { + _va[_i] = hints[_i] + } + var _ca []interface{} + _ca = append(_ca, ctx, groupID, offset, limit) + _ca = append(_ca, _va...) + ret := _m.Called(_ca...) + + var r0 *store.LayeredStoreSupplierResult + if rf, ok := ret.Get(0).(func(context.Context, string, int, int, ...store.LayeredStoreHint) *store.LayeredStoreSupplierResult); ok { + r0 = rf(ctx, groupID, offset, limit, hints...) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*store.LayeredStoreSupplierResult) + } + } + + return r0 +} + +// GroupUpdate provides a mock function with given fields: ctx, group, hints +func (_m *LayeredStoreDatabaseLayer) GroupUpdate(ctx context.Context, group *model.Group, hints ...store.LayeredStoreHint) *store.LayeredStoreSupplierResult { + _va := make([]interface{}, len(hints)) + for _i := range hints { + _va[_i] = hints[_i] + } + var _ca []interface{} + _ca = append(_ca, ctx, group) + _ca = append(_ca, _va...) + ret := _m.Called(_ca...) + + var r0 *store.LayeredStoreSupplierResult + if rf, ok := ret.Get(0).(func(context.Context, *model.Group, ...store.LayeredStoreHint) *store.LayeredStoreSupplierResult); ok { + r0 = rf(ctx, group, hints...) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*store.LayeredStoreSupplierResult) + } + } + + return r0 +} + +// GroupUpdateGroupSyncable provides a mock function with given fields: ctx, groupSyncable, hints +func (_m *LayeredStoreDatabaseLayer) GroupUpdateGroupSyncable(ctx context.Context, groupSyncable *model.GroupSyncable, hints ...store.LayeredStoreHint) *store.LayeredStoreSupplierResult { + _va := make([]interface{}, len(hints)) + for _i := range hints { + _va[_i] = hints[_i] + } + var _ca []interface{} + _ca = append(_ca, ctx, groupSyncable) + _ca = append(_ca, _va...) + ret := _m.Called(_ca...) + + var r0 *store.LayeredStoreSupplierResult + if rf, ok := ret.Get(0).(func(context.Context, *model.GroupSyncable, ...store.LayeredStoreHint) *store.LayeredStoreSupplierResult); ok { + r0 = rf(ctx, groupSyncable, hints...) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*store.LayeredStoreSupplierResult) + } + } + + return r0 +} + // Job provides a mock function with given fields: func (_m *LayeredStoreDatabaseLayer) Job() store.JobStore { ret := _m.Called() @@ -242,6 +626,52 @@ func (_m *LayeredStoreDatabaseLayer) OAuth() store.OAuthStore { return r0 } +// PendingAutoAddChannelMembers provides a mock function with given fields: ctx, minGroupMembersCreateAt, hints +func (_m *LayeredStoreDatabaseLayer) PendingAutoAddChannelMembers(ctx context.Context, minGroupMembersCreateAt int64, hints ...store.LayeredStoreHint) *store.LayeredStoreSupplierResult { + _va := make([]interface{}, len(hints)) + for _i := range hints { + _va[_i] = hints[_i] + } + var _ca []interface{} + _ca = append(_ca, ctx, minGroupMembersCreateAt) + _ca = append(_ca, _va...) + ret := _m.Called(_ca...) + + var r0 *store.LayeredStoreSupplierResult + if rf, ok := ret.Get(0).(func(context.Context, int64, ...store.LayeredStoreHint) *store.LayeredStoreSupplierResult); ok { + r0 = rf(ctx, minGroupMembersCreateAt, hints...) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*store.LayeredStoreSupplierResult) + } + } + + return r0 +} + +// PendingAutoAddTeamMembers provides a mock function with given fields: ctx, minGroupMembersCreateAt, hints +func (_m *LayeredStoreDatabaseLayer) PendingAutoAddTeamMembers(ctx context.Context, minGroupMembersCreateAt int64, hints ...store.LayeredStoreHint) *store.LayeredStoreSupplierResult { + _va := make([]interface{}, len(hints)) + for _i := range hints { + _va[_i] = hints[_i] + } + var _ca []interface{} + _ca = append(_ca, ctx, minGroupMembersCreateAt) + _ca = append(_ca, _va...) + ret := _m.Called(_ca...) + + var r0 *store.LayeredStoreSupplierResult + if rf, ok := ret.Get(0).(func(context.Context, int64, ...store.LayeredStoreHint) *store.LayeredStoreSupplierResult); ok { + r0 = rf(ctx, minGroupMembersCreateAt, hints...) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*store.LayeredStoreSupplierResult) + } + } + + return r0 +} + // Plugin provides a mock function with given fields: func (_m *LayeredStoreDatabaseLayer) Plugin() store.PluginStore { ret := _m.Called() diff --git a/store/storetest/mocks/LayeredStoreSupplier.go b/store/storetest/mocks/LayeredStoreSupplier.go index 4b3da6efc7..efcb1dbdb3 100644 --- a/store/storetest/mocks/LayeredStoreSupplier.go +++ b/store/storetest/mocks/LayeredStoreSupplier.go @@ -14,6 +14,374 @@ type LayeredStoreSupplier struct { mock.Mock } +// GroupCreate provides a mock function with given fields: ctx, group, hints +func (_m *LayeredStoreSupplier) GroupCreate(ctx context.Context, group *model.Group, hints ...store.LayeredStoreHint) *store.LayeredStoreSupplierResult { + _va := make([]interface{}, len(hints)) + for _i := range hints { + _va[_i] = hints[_i] + } + var _ca []interface{} + _ca = append(_ca, ctx, group) + _ca = append(_ca, _va...) + ret := _m.Called(_ca...) + + var r0 *store.LayeredStoreSupplierResult + if rf, ok := ret.Get(0).(func(context.Context, *model.Group, ...store.LayeredStoreHint) *store.LayeredStoreSupplierResult); ok { + r0 = rf(ctx, group, hints...) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*store.LayeredStoreSupplierResult) + } + } + + return r0 +} + +// GroupCreateGroupSyncable provides a mock function with given fields: ctx, groupSyncable, hints +func (_m *LayeredStoreSupplier) GroupCreateGroupSyncable(ctx context.Context, groupSyncable *model.GroupSyncable, hints ...store.LayeredStoreHint) *store.LayeredStoreSupplierResult { + _va := make([]interface{}, len(hints)) + for _i := range hints { + _va[_i] = hints[_i] + } + var _ca []interface{} + _ca = append(_ca, ctx, groupSyncable) + _ca = append(_ca, _va...) + ret := _m.Called(_ca...) + + var r0 *store.LayeredStoreSupplierResult + if rf, ok := ret.Get(0).(func(context.Context, *model.GroupSyncable, ...store.LayeredStoreHint) *store.LayeredStoreSupplierResult); ok { + r0 = rf(ctx, groupSyncable, hints...) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*store.LayeredStoreSupplierResult) + } + } + + return r0 +} + +// GroupCreateOrRestoreMember provides a mock function with given fields: ctx, groupID, userID, hints +func (_m *LayeredStoreSupplier) GroupCreateOrRestoreMember(ctx context.Context, groupID string, userID string, hints ...store.LayeredStoreHint) *store.LayeredStoreSupplierResult { + _va := make([]interface{}, len(hints)) + for _i := range hints { + _va[_i] = hints[_i] + } + var _ca []interface{} + _ca = append(_ca, ctx, groupID, userID) + _ca = append(_ca, _va...) + ret := _m.Called(_ca...) + + var r0 *store.LayeredStoreSupplierResult + if rf, ok := ret.Get(0).(func(context.Context, string, string, ...store.LayeredStoreHint) *store.LayeredStoreSupplierResult); ok { + r0 = rf(ctx, groupID, userID, hints...) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*store.LayeredStoreSupplierResult) + } + } + + return r0 +} + +// GroupDelete provides a mock function with given fields: ctx, groupID, hints +func (_m *LayeredStoreSupplier) GroupDelete(ctx context.Context, groupID string, hints ...store.LayeredStoreHint) *store.LayeredStoreSupplierResult { + _va := make([]interface{}, len(hints)) + for _i := range hints { + _va[_i] = hints[_i] + } + var _ca []interface{} + _ca = append(_ca, ctx, groupID) + _ca = append(_ca, _va...) + ret := _m.Called(_ca...) + + var r0 *store.LayeredStoreSupplierResult + if rf, ok := ret.Get(0).(func(context.Context, string, ...store.LayeredStoreHint) *store.LayeredStoreSupplierResult); ok { + r0 = rf(ctx, groupID, hints...) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*store.LayeredStoreSupplierResult) + } + } + + return r0 +} + +// GroupDeleteGroupSyncable provides a mock function with given fields: ctx, groupID, syncableID, syncableType, hints +func (_m *LayeredStoreSupplier) GroupDeleteGroupSyncable(ctx context.Context, groupID string, syncableID string, syncableType model.GroupSyncableType, hints ...store.LayeredStoreHint) *store.LayeredStoreSupplierResult { + _va := make([]interface{}, len(hints)) + for _i := range hints { + _va[_i] = hints[_i] + } + var _ca []interface{} + _ca = append(_ca, ctx, groupID, syncableID, syncableType) + _ca = append(_ca, _va...) + ret := _m.Called(_ca...) + + var r0 *store.LayeredStoreSupplierResult + if rf, ok := ret.Get(0).(func(context.Context, string, string, model.GroupSyncableType, ...store.LayeredStoreHint) *store.LayeredStoreSupplierResult); ok { + r0 = rf(ctx, groupID, syncableID, syncableType, hints...) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*store.LayeredStoreSupplierResult) + } + } + + return r0 +} + +// GroupDeleteMember provides a mock function with given fields: ctx, groupID, userID, hints +func (_m *LayeredStoreSupplier) GroupDeleteMember(ctx context.Context, groupID string, userID string, hints ...store.LayeredStoreHint) *store.LayeredStoreSupplierResult { + _va := make([]interface{}, len(hints)) + for _i := range hints { + _va[_i] = hints[_i] + } + var _ca []interface{} + _ca = append(_ca, ctx, groupID, userID) + _ca = append(_ca, _va...) + ret := _m.Called(_ca...) + + var r0 *store.LayeredStoreSupplierResult + if rf, ok := ret.Get(0).(func(context.Context, string, string, ...store.LayeredStoreHint) *store.LayeredStoreSupplierResult); ok { + r0 = rf(ctx, groupID, userID, hints...) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*store.LayeredStoreSupplierResult) + } + } + + return r0 +} + +// GroupGet provides a mock function with given fields: ctx, groupID, hints +func (_m *LayeredStoreSupplier) GroupGet(ctx context.Context, groupID string, hints ...store.LayeredStoreHint) *store.LayeredStoreSupplierResult { + _va := make([]interface{}, len(hints)) + for _i := range hints { + _va[_i] = hints[_i] + } + var _ca []interface{} + _ca = append(_ca, ctx, groupID) + _ca = append(_ca, _va...) + ret := _m.Called(_ca...) + + var r0 *store.LayeredStoreSupplierResult + if rf, ok := ret.Get(0).(func(context.Context, string, ...store.LayeredStoreHint) *store.LayeredStoreSupplierResult); ok { + r0 = rf(ctx, groupID, hints...) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*store.LayeredStoreSupplierResult) + } + } + + return r0 +} + +// GroupGetAllBySource provides a mock function with given fields: ctx, groupSource, hints +func (_m *LayeredStoreSupplier) GroupGetAllBySource(ctx context.Context, groupSource model.GroupSource, hints ...store.LayeredStoreHint) *store.LayeredStoreSupplierResult { + _va := make([]interface{}, len(hints)) + for _i := range hints { + _va[_i] = hints[_i] + } + var _ca []interface{} + _ca = append(_ca, ctx, groupSource) + _ca = append(_ca, _va...) + ret := _m.Called(_ca...) + + var r0 *store.LayeredStoreSupplierResult + if rf, ok := ret.Get(0).(func(context.Context, model.GroupSource, ...store.LayeredStoreHint) *store.LayeredStoreSupplierResult); ok { + r0 = rf(ctx, groupSource, hints...) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*store.LayeredStoreSupplierResult) + } + } + + return r0 +} + +// GroupGetAllGroupSyncablesByGroup provides a mock function with given fields: ctx, groupID, syncableType, hints +func (_m *LayeredStoreSupplier) GroupGetAllGroupSyncablesByGroup(ctx context.Context, groupID string, syncableType model.GroupSyncableType, hints ...store.LayeredStoreHint) *store.LayeredStoreSupplierResult { + _va := make([]interface{}, len(hints)) + for _i := range hints { + _va[_i] = hints[_i] + } + var _ca []interface{} + _ca = append(_ca, ctx, groupID, syncableType) + _ca = append(_ca, _va...) + ret := _m.Called(_ca...) + + var r0 *store.LayeredStoreSupplierResult + if rf, ok := ret.Get(0).(func(context.Context, string, model.GroupSyncableType, ...store.LayeredStoreHint) *store.LayeredStoreSupplierResult); ok { + r0 = rf(ctx, groupID, syncableType, hints...) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*store.LayeredStoreSupplierResult) + } + } + + return r0 +} + +// GroupGetByRemoteID provides a mock function with given fields: ctx, remoteID, groupSource, hints +func (_m *LayeredStoreSupplier) GroupGetByRemoteID(ctx context.Context, remoteID string, groupSource model.GroupSource, hints ...store.LayeredStoreHint) *store.LayeredStoreSupplierResult { + _va := make([]interface{}, len(hints)) + for _i := range hints { + _va[_i] = hints[_i] + } + var _ca []interface{} + _ca = append(_ca, ctx, remoteID, groupSource) + _ca = append(_ca, _va...) + ret := _m.Called(_ca...) + + var r0 *store.LayeredStoreSupplierResult + if rf, ok := ret.Get(0).(func(context.Context, string, model.GroupSource, ...store.LayeredStoreHint) *store.LayeredStoreSupplierResult); ok { + r0 = rf(ctx, remoteID, groupSource, hints...) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*store.LayeredStoreSupplierResult) + } + } + + return r0 +} + +// GroupGetGroupSyncable provides a mock function with given fields: ctx, groupID, syncableID, syncableType, hints +func (_m *LayeredStoreSupplier) GroupGetGroupSyncable(ctx context.Context, groupID string, syncableID string, syncableType model.GroupSyncableType, hints ...store.LayeredStoreHint) *store.LayeredStoreSupplierResult { + _va := make([]interface{}, len(hints)) + for _i := range hints { + _va[_i] = hints[_i] + } + var _ca []interface{} + _ca = append(_ca, ctx, groupID, syncableID, syncableType) + _ca = append(_ca, _va...) + ret := _m.Called(_ca...) + + var r0 *store.LayeredStoreSupplierResult + if rf, ok := ret.Get(0).(func(context.Context, string, string, model.GroupSyncableType, ...store.LayeredStoreHint) *store.LayeredStoreSupplierResult); ok { + r0 = rf(ctx, groupID, syncableID, syncableType, hints...) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*store.LayeredStoreSupplierResult) + } + } + + return r0 +} + +// GroupGetMemberCount provides a mock function with given fields: ctx, groupID, hints +func (_m *LayeredStoreSupplier) GroupGetMemberCount(ctx context.Context, groupID string, hints ...store.LayeredStoreHint) *store.LayeredStoreSupplierResult { + _va := make([]interface{}, len(hints)) + for _i := range hints { + _va[_i] = hints[_i] + } + var _ca []interface{} + _ca = append(_ca, ctx, groupID) + _ca = append(_ca, _va...) + ret := _m.Called(_ca...) + + var r0 *store.LayeredStoreSupplierResult + if rf, ok := ret.Get(0).(func(context.Context, string, ...store.LayeredStoreHint) *store.LayeredStoreSupplierResult); ok { + r0 = rf(ctx, groupID, hints...) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*store.LayeredStoreSupplierResult) + } + } + + return r0 +} + +// GroupGetMemberUsers provides a mock function with given fields: ctx, groupID, hints +func (_m *LayeredStoreSupplier) GroupGetMemberUsers(ctx context.Context, groupID string, hints ...store.LayeredStoreHint) *store.LayeredStoreSupplierResult { + _va := make([]interface{}, len(hints)) + for _i := range hints { + _va[_i] = hints[_i] + } + var _ca []interface{} + _ca = append(_ca, ctx, groupID) + _ca = append(_ca, _va...) + ret := _m.Called(_ca...) + + var r0 *store.LayeredStoreSupplierResult + if rf, ok := ret.Get(0).(func(context.Context, string, ...store.LayeredStoreHint) *store.LayeredStoreSupplierResult); ok { + r0 = rf(ctx, groupID, hints...) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*store.LayeredStoreSupplierResult) + } + } + + return r0 +} + +// GroupGetMemberUsersPage provides a mock function with given fields: ctx, groupID, offset, limit, hints +func (_m *LayeredStoreSupplier) GroupGetMemberUsersPage(ctx context.Context, groupID string, offset int, limit int, hints ...store.LayeredStoreHint) *store.LayeredStoreSupplierResult { + _va := make([]interface{}, len(hints)) + for _i := range hints { + _va[_i] = hints[_i] + } + var _ca []interface{} + _ca = append(_ca, ctx, groupID, offset, limit) + _ca = append(_ca, _va...) + ret := _m.Called(_ca...) + + var r0 *store.LayeredStoreSupplierResult + if rf, ok := ret.Get(0).(func(context.Context, string, int, int, ...store.LayeredStoreHint) *store.LayeredStoreSupplierResult); ok { + r0 = rf(ctx, groupID, offset, limit, hints...) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*store.LayeredStoreSupplierResult) + } + } + + return r0 +} + +// GroupUpdate provides a mock function with given fields: ctx, group, hints +func (_m *LayeredStoreSupplier) GroupUpdate(ctx context.Context, group *model.Group, hints ...store.LayeredStoreHint) *store.LayeredStoreSupplierResult { + _va := make([]interface{}, len(hints)) + for _i := range hints { + _va[_i] = hints[_i] + } + var _ca []interface{} + _ca = append(_ca, ctx, group) + _ca = append(_ca, _va...) + ret := _m.Called(_ca...) + + var r0 *store.LayeredStoreSupplierResult + if rf, ok := ret.Get(0).(func(context.Context, *model.Group, ...store.LayeredStoreHint) *store.LayeredStoreSupplierResult); ok { + r0 = rf(ctx, group, hints...) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*store.LayeredStoreSupplierResult) + } + } + + return r0 +} + +// GroupUpdateGroupSyncable provides a mock function with given fields: ctx, groupSyncable, hints +func (_m *LayeredStoreSupplier) GroupUpdateGroupSyncable(ctx context.Context, groupSyncable *model.GroupSyncable, hints ...store.LayeredStoreHint) *store.LayeredStoreSupplierResult { + _va := make([]interface{}, len(hints)) + for _i := range hints { + _va[_i] = hints[_i] + } + var _ca []interface{} + _ca = append(_ca, ctx, groupSyncable) + _ca = append(_ca, _va...) + ret := _m.Called(_ca...) + + var r0 *store.LayeredStoreSupplierResult + if rf, ok := ret.Get(0).(func(context.Context, *model.GroupSyncable, ...store.LayeredStoreHint) *store.LayeredStoreSupplierResult); ok { + r0 = rf(ctx, groupSyncable, hints...) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*store.LayeredStoreSupplierResult) + } + } + + return r0 +} + // Next provides a mock function with given fields: func (_m *LayeredStoreSupplier) Next() store.LayeredStoreSupplier { ret := _m.Called() @@ -30,6 +398,52 @@ func (_m *LayeredStoreSupplier) Next() store.LayeredStoreSupplier { return r0 } +// PendingAutoAddChannelMembers provides a mock function with given fields: ctx, minGroupMembersCreateAt, hints +func (_m *LayeredStoreSupplier) PendingAutoAddChannelMembers(ctx context.Context, minGroupMembersCreateAt int64, hints ...store.LayeredStoreHint) *store.LayeredStoreSupplierResult { + _va := make([]interface{}, len(hints)) + for _i := range hints { + _va[_i] = hints[_i] + } + var _ca []interface{} + _ca = append(_ca, ctx, minGroupMembersCreateAt) + _ca = append(_ca, _va...) + ret := _m.Called(_ca...) + + var r0 *store.LayeredStoreSupplierResult + if rf, ok := ret.Get(0).(func(context.Context, int64, ...store.LayeredStoreHint) *store.LayeredStoreSupplierResult); ok { + r0 = rf(ctx, minGroupMembersCreateAt, hints...) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*store.LayeredStoreSupplierResult) + } + } + + return r0 +} + +// PendingAutoAddTeamMembers provides a mock function with given fields: ctx, minGroupMembersCreateAt, hints +func (_m *LayeredStoreSupplier) PendingAutoAddTeamMembers(ctx context.Context, minGroupMembersCreateAt int64, hints ...store.LayeredStoreHint) *store.LayeredStoreSupplierResult { + _va := make([]interface{}, len(hints)) + for _i := range hints { + _va[_i] = hints[_i] + } + var _ca []interface{} + _ca = append(_ca, ctx, minGroupMembersCreateAt) + _ca = append(_ca, _va...) + ret := _m.Called(_ca...) + + var r0 *store.LayeredStoreSupplierResult + if rf, ok := ret.Get(0).(func(context.Context, int64, ...store.LayeredStoreHint) *store.LayeredStoreSupplierResult); ok { + r0 = rf(ctx, minGroupMembersCreateAt, hints...) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*store.LayeredStoreSupplierResult) + } + } + + return r0 +} + // ReactionDelete provides a mock function with given fields: ctx, reaction, hints func (_m *LayeredStoreSupplier) ReactionDelete(ctx context.Context, reaction *model.Reaction, hints ...store.LayeredStoreHint) *store.LayeredStoreSupplierResult { _va := make([]interface{}, len(hints)) diff --git a/store/storetest/mocks/Store.go b/store/storetest/mocks/Store.go index 1f52d98ecc..10282660ed 100644 --- a/store/storetest/mocks/Store.go +++ b/store/storetest/mocks/Store.go @@ -166,6 +166,22 @@ func (_m *Store) FileInfo() store.FileInfoStore { return r0 } +// Group provides a mock function with given fields: +func (_m *Store) Group() store.GroupStore { + ret := _m.Called() + + var r0 store.GroupStore + if rf, ok := ret.Get(0).(func() store.GroupStore); ok { + r0 = rf() + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(store.GroupStore) + } + } + + return r0 +} + // Job provides a mock function with given fields: func (_m *Store) Job() store.JobStore { ret := _m.Called() diff --git a/store/storetest/store.go b/store/storetest/store.go index 15971a53fa..4d5d5e0428 100644 --- a/store/storetest/store.go +++ b/store/storetest/store.go @@ -46,6 +46,7 @@ type Store struct { RoleStore mocks.RoleStore SchemeStore mocks.SchemeStore TermsOfServiceStore mocks.TermsOfServiceStore + GroupStore mocks.GroupStore UserTermsOfServiceStore mocks.UserTermsOfServiceStore } @@ -79,6 +80,7 @@ func (s *Store) UserTermsOfService() store.UserTermsOfServiceStore { return &s.U func (s *Store) ChannelMemberHistory() store.ChannelMemberHistoryStore { return &s.ChannelMemberHistoryStore } +func (s *Store) Group() store.GroupStore { return &s.GroupStore } func (s *Store) MarkSystemRanUnitTests() { /* do nothing */ } func (s *Store) Close() { /* do nothing */ } func (s *Store) LockToMaster() { /* do nothing */ } diff --git a/tests/add-groups.ldif b/tests/add-groups.ldif new file mode 100644 index 0000000000..e6202871bb --- /dev/null +++ b/tests/add-groups.ldif @@ -0,0 +1,81 @@ +dn: ou=testgroups,dc=mm,dc=test,dc=com +changetype: add +objectclass: organizationalunit + +# groupOfNames +dn: cn=outsiders,ou=testgroups,dc=mm,dc=test,dc=com +changetype: add +objectclass: groupOfNames +member: uid=board.three,ou=testusers,dc=mm,dc=test,dc=com + +dn: cn=board,ou=testgroups,dc=mm,dc=test,dc=com +changetype: add +objectclass: groupOfNames +member: uid=board.one,ou=testusers,dc=mm,dc=test,dc=com +member: uid=board.two,ou=testusers,dc=mm,dc=test,dc=com +member: cn=outsiders,ou=testgroups,dc=mm,dc=test,dc=com + +dn: cn=executive,ou=testgroups,dc=mm,dc=test,dc=com +changetype: add +objectclass: groupOfNames +member: uid=exec.one,ou=testusers,dc=mm,dc=test,dc=com +member: uid=exec.two,ou=testusers,dc=mm,dc=test,dc=com +member: cn=board,ou=testgroups,dc=mm,dc=test,dc=com + +dn: cn=tgroup-84,ou=testgroups,dc=mm,dc=test,dc=com +changetype: add +objectclass: groupOfNames +member: cn=tgroup-9,ou=testgroups,dc=mm,dc=test,dc=com +member: uid=test.five,ou=testusers,dc=mm,dc=test,dc=com + +dn: cn=tgroup-9,ou=testgroups,dc=mm,dc=test,dc=com +changetype: add +objectclass: groupOfNames +member: cn=tgroup-97,ou=testgroups,dc=mm,dc=test,dc=com + +dn: cn=tgroup-97,ou=testgroups,dc=mm,dc=test,dc=com +changetype: add +objectclass: groupOfNames +member: uid=test.four,ou=testusers,dc=mm,dc=test,dc=com + +# groupOfUniqueNames +dn: cn=tgroup,ou=testgroups,dc=mm,dc=test,dc=com +changetype: add +objectclass: groupOfUniqueNames +uniqueMember: uid=test.one,ou=testusers,dc=mm,dc=test,dc=com + +dn: cn=ugroup,cn=tgroup,ou=testgroups,dc=mm,dc=test,dc=com +changetype: add +objectclass: groupOfUniqueNames +uniqueMember: uid=test.two,ou=testusers,dc=mm,dc=test,dc=com + +dn: cn=vgroup,cn=tgroup,ou=testgroups,dc=mm,dc=test,dc=com +changetype: add +objectclass: groupOfUniqueNames +uniqueMember: uid=test.three,ou=testusers,dc=mm,dc=test,dc=com + +# Adds a group with a cycle +dn: cn=team-one-a,ou=testgroups,dc=mm,dc=test,dc=com +changetype: add +objectclass: groupOfUniqueNames +uniqueMember: uid=dev.four,ou=testusers,dc=mm,dc=test,dc=com +uniqueMember: cn=developers,ou=testgroups,dc=mm,dc=test,dc=com + +dn: cn=team-one,ou=testgroups,dc=mm,dc=test,dc=com +changetype: add +objectclass: groupOfUniqueNames +uniqueMember: uid=dev.one,ou=testusers,dc=mm,dc=test,dc=com +uniqueMember: uid=dev.three,ou=testusers,dc=mm,dc=test,dc=com +uniqueMember: cn=team-one-a,ou=testgroups,dc=mm,dc=test,dc=com + +dn: cn=team-two,ou=testgroups,dc=mm,dc=test,dc=com +changetype: add +objectclass: groupOfUniqueNames +uniqueMember: uid=dev.two,ou=testusers,dc=mm,dc=test,dc=com + +dn: cn=developers,ou=testgroups,dc=mm,dc=test,dc=com +changetype: add +objectclass: groupOfUniqueNames +uniqueMember: uid=dev-ops.one,ou=testusers,dc=mm,dc=test,dc=com +uniqueMember: cn=team-one,ou=testgroups,dc=mm,dc=test,dc=com +uniqueMember: cn=team-two,ou=testgroups,dc=mm,dc=test,dc=com \ No newline at end of file diff --git a/tests/add-users.ldif b/tests/add-users.ldif new file mode 100644 index 0000000000..d96d0e46c1 --- /dev/null +++ b/tests/add-users.ldif @@ -0,0 +1,130 @@ +dn: ou=testusers,dc=mm,dc=test,dc=com +changetype: add +objectclass: organizationalunit + +# generic test users +dn: uid=test.one,ou=testusers,dc=mm,dc=test,dc=com +changetype: add +objectclass: iNetOrgPerson +sn: User +cn: Test1 +mail: success+testone@simulator.amazonses.com +userPassword: Password1 + +dn: uid=test.two,ou=testusers,dc=mm,dc=test,dc=com +changetype: add +objectclass: iNetOrgPerson +sn: User +cn: Test2 +mail: success+testtwo@simulator.amazonses.com +userPassword: Password1 + +dn: uid=test.three,ou=testusers,dc=mm,dc=test,dc=com +changetype: add +objectclass: iNetOrgPerson +sn: User +cn: Test3 +mail: success+testthree@simulator.amazonses.com +userPassword: Password1 + +dn: uid=test.four,ou=testusers,dc=mm,dc=test,dc=com +changetype: add +objectclass: iNetOrgPerson +sn: User +cn: Test4 +mail: success+testfour@simulator.amazonses.com +userPassword: Password1 + +dn: uid=test.five,ou=testusers,dc=mm,dc=test,dc=com +changetype: add +objectclass: iNetOrgPerson +sn: User +cn: Test5 +mail: success+testfive@simulator.amazonses.com +userPassword: Password1 + + +# developers +dn: uid=dev-ops.one,ou=testusers,dc=mm,dc=test,dc=com +changetype: add +objectclass: iNetOrgPerson +sn: User +cn: Dev3 +mail: success+devopsone@simulator.amazonses.com +userPassword: Password1 + +dn: uid=dev.one,ou=testusers,dc=mm,dc=test,dc=com +changetype: add +objectclass: iNetOrgPerson +sn: User +cn: Dev1 +mail: success+devone@simulator.amazonses.com +userPassword: Password1 + +dn: uid=dev.two,ou=testusers,dc=mm,dc=test,dc=com +changetype: add +objectclass: iNetOrgPerson +sn: User +cn: Dev2 +mail: success+devtwo@simulator.amazonses.com +userPassword: Password1 + +dn: uid=dev.three,ou=testusers,dc=mm,dc=test,dc=com +changetype: add +objectclass: iNetOrgPerson +sn: User +cn: Dev3 +mail: success+devthree@simulator.amazonses.com +userPassword: Password1 + +dn: uid=dev.four,ou=testusers,dc=mm,dc=test,dc=com +changetype: add +objectclass: iNetOrgPerson +sn: User +cn: Dev4 +mail: success+devfour@simulator.amazonses.com +userPassword: Password1 + + +# executive +dn: uid=exec.one,ou=testusers,dc=mm,dc=test,dc=com +changetype: add +objectclass: iNetOrgPerson +sn: User +cn: Exec1 +mail: success+execone@simulator.amazonses.com +userPassword: Password1 + +dn: uid=exec.two,ou=testusers,dc=mm,dc=test,dc=com +changetype: add +objectclass: iNetOrgPerson +sn: User +cn: Exec2 +mail: success+exectwo@simulator.amazonses.com +userPassword: Password1 + + +# board of directors +dn: uid=board.one,ou=testusers,dc=mm,dc=test,dc=com +changetype: add +objectclass: iNetOrgPerson +sn: User +cn: Board1 +mail: success+boardone@simulator.amazonses.com +userPassword: Password1 + +dn: uid=board.two,ou=testusers,dc=mm,dc=test,dc=com +changetype: add +objectclass: iNetOrgPerson +sn: User +cn: Board2 +mail: success+boardtwo@simulator.amazonses.com +userPassword: Password1 + +dn: uid=board.three,ou=testusers,dc=mm,dc=test,dc=com +changetype: add +objectclass: iNetOrgPerson +sn: User +cn: Board3 +mail: success+boardthree@simulator.amazonses.com +userPassword: Password1 \ No newline at end of file diff --git a/tests/qa-data-graph.png b/tests/qa-data-graph.png new file mode 100644 index 0000000000..1d8fe16e13 Binary files /dev/null and b/tests/qa-data-graph.png differ diff --git a/tests/qa-data.ldif b/tests/qa-data.ldif new file mode 100644 index 0000000000..31842c91df --- /dev/null +++ b/tests/qa-data.ldif @@ -0,0 +1,103 @@ +dn: ou=groups,dc=mm,dc=test,dc=com +changetype: add +objectclass: organizationalunit + +dn: ou=users,dc=mm,dc=test,dc=com +changetype: add +objectclass: organizationalunit + +dn: uid=corey.test,ou=users,dc=mm,dc=test,dc=com +changetype: add +objectclass: iNetOrgPerson +cn: Corey +sn: Test +mail: corey.test@simulator.amazonses.com +userPassword: Password1 + +dn: uid=william.test,ou=users,dc=mm,dc=test,dc=com +changetype: add +objectclass: iNetOrgPerson +cn: William +sn: Test +mail: william.test@simulator.amazonses.com +userPassword: Password1 + +dn: uid=linda.test,ou=users,dc=mm,dc=test,dc=com +changetype: add +objectclass: iNetOrgPerson +cn: Linda +sn: Test +mail: linda.test@simulator.amazonses.com +userPassword: Password1 + +dn: uid=lindy.test,ou=users,dc=mm,dc=test,dc=com +changetype: add +objectclass: iNetOrgPerson +cn: Lindy +sn: Test +mail: lindy.test@simulator.amazonses.com +userPassword: Password1 + +dn: uid=george.test,ou=users,dc=mm,dc=test,dc=com +changetype: add +objectclass: iNetOrgPerson +cn: George +sn: Test +mail: george.test@simulator.amazonses.com +userPassword: Password1 + +dn: uid=jesus.test,ou=users,dc=mm,dc=test,dc=com +changetype: add +objectclass: iNetOrgPerson +cn: Jesus +sn: Test +mail: jesus.test@simulator.amazonses.com +userPassword: Password1 + +dn: uid=carlos.test,ou=users,dc=mm,dc=test,dc=com +changetype: add +objectclass: iNetOrgPerson +cn: Carlos +sn: Test +mail: carlos.test@simulator.amazonses.com +userPassword: Password1 + +dn: uid=jesse.test,ou=users,dc=mm,dc=test,dc=com +changetype: add +objectclass: iNetOrgPerson +cn: Jesse +sn: Test +mail: jesse.test@simulator.amazonses.com +userPassword: Password1 + +dn: cn=engineering,ou=groups,dc=mm,dc=test,dc=com +changetype: add +objectclass: groupOfNames +member: uid=corey.test,ou=users,dc=mm,dc=test,dc=com +member: cn=qa,ou=groups,dc=mm,dc=test,dc=com +member: cn=developers,ou=groups,dc=mm,dc=test,dc=com + +dn: cn=qa,ou=groups,dc=mm,dc=test,dc=com +changetype: add +objectclass: groupOfNames +member: uid=lindy.test,ou=users,dc=mm,dc=test,dc=com +member: uid=linda.test,ou=users,dc=mm,dc=test,dc=com + +dn: cn=developers,ou=groups,dc=mm,dc=test,dc=com +changetype: add +objectclass: groupOfNames +member: uid=william.test,ou=users,dc=mm,dc=test,dc=com +member: cn=xyz,ou=groups,dc=mm,dc=test,dc=com +member: cn=abc,ou=groups,dc=mm,dc=test,dc=com + +dn: cn=xyz,ou=groups,dc=mm,dc=test,dc=com +changetype: add +objectclass: groupOfNames +member: uid=jesus.test,ou=users,dc=mm,dc=test,dc=com +member: uid=george.test,ou=users,dc=mm,dc=test,dc=com + +dn: cn=abc,ou=groups,dc=mm,dc=test,dc=com +changetype: add +objectclass: groupOfNames +member: uid=carlos.test,ou=users,dc=mm,dc=test,dc=com +member: uid=jesse.test,ou=users,dc=mm,dc=test,dc=com diff --git a/tests/test-config.json b/tests/test-config.json new file mode 100644 index 0000000000..8574f5f131 --- /dev/null +++ b/tests/test-config.json @@ -0,0 +1,402 @@ +{ + "ServiceSettings": { + "SiteURL": "", + "WebsocketURL": "", + "LicenseFileLocation": "", + "ListenAddress": ":8065", + "ConnectionSecurity": "", + "TLSCertFile": "", + "TLSKeyFile": "", + "UseLetsEncrypt": false, + "LetsEncryptCertificateCacheFile": "./config/letsencrypt.cache", + "Forward80To443": false, + "ReadTimeout": 300, + "WriteTimeout": 300, + "MaximumLoginAttempts": 10, + "GoroutineHealthThreshold": -1, + "GoogleDeveloperKey": "", + "EnableOAuthServiceProvider": false, + "EnableIncomingWebhooks": true, + "EnableOutgoingWebhooks": true, + "EnableCommands": true, + "EnableOnlyAdminIntegrations": true, + "EnablePostUsernameOverride": false, + "EnablePostIconOverride": false, + "EnableLinkPreviews": false, + "EnableTesting": false, + "EnableDeveloper": false, + "EnableSecurityFixAlert": true, + "EnableInsecureOutgoingConnections": false, + "AllowedUntrustedInternalConnections": "", + "EnableMultifactorAuthentication": false, + "EnforceMultifactorAuthentication": false, + "EnableUserAccessTokens": false, + "AllowCorsFrom": "", + "AllowCookiesForSubdomains": false, + "SessionLengthWebInDays": 30, + "SessionLengthMobileInDays": 30, + "SessionLengthSSOInDays": 30, + "SessionCacheInMinutes": 10, + "SessionIdleTimeoutInMinutes": 0, + "WebsocketSecurePort": 443, + "WebsocketPort": 80, + "WebserverMode": "gzip", + "EnableCustomEmoji": false, + "EnableEmojiPicker": true, + "RestrictCustomEmojiCreation": "all", + "RestrictPostDelete": "all", + "AllowEditPost": "always", + "PostEditTimeLimit": -1, + "TimeBetweenUserTypingUpdatesMilliseconds": 5000, + "EnablePostSearch": true, + "EnableUserTypingMessages": true, + "EnableChannelViewedMessages": true, + "EnableUserStatuses": true, + "ExperimentalEnableAuthenticationTransfer": true, + "ClusterLogTimeoutMilliseconds": 2000, + "CloseUnusedDirectMessages": false, + "EnablePreviewFeatures": true, + "EnableTutorial": true, + "ExperimentalEnableDefaultChannelLeaveJoinMessages": true, + "ExperimentalGroupUnreadChannels": "disabled", + "ImageProxyType": "", + "ImageProxyURL": "", + "ImageProxyOptions": "", + "EnableAPITeamDeletion": false, + "ExperimentalEnableHardenedMode": false, + "ExperimentalLdapGroupSync": true + }, + "TeamSettings": { + "SiteName": "Mattermost", + "MaxUsersPerTeam": 50, + "EnableTeamCreation": true, + "EnableUserCreation": true, + "EnableOpenServer": false, + "EnableUserDeactivation": false, + "RestrictCreationToDomains": "", + "EnableCustomBrand": false, + "CustomBrandText": "", + "CustomDescriptionText": "", + "RestrictDirectMessage": "any", + "RestrictTeamInvite": "all", + "RestrictPublicChannelManagement": "all", + "RestrictPrivateChannelManagement": "all", + "RestrictPublicChannelCreation": "all", + "RestrictPrivateChannelCreation": "all", + "RestrictPublicChannelDeletion": "all", + "RestrictPrivateChannelDeletion": "all", + "RestrictPrivateChannelManageMembers": "all", + "EnableXToLeaveChannelsFromLHS": false, + "UserStatusAwayTimeout": 300, + "MaxChannelsPerTeam": 2000, + "MaxNotificationsPerChannel": 1000, + "EnableConfirmNotificationsToChannel": true, + "TeammateNameDisplay": "username", + "ExperimentalEnableAutomaticReplies": false, + "ExperimentalHideTownSquareinLHS": false, + "ExperimentalTownSquareIsReadOnly": false, + "ExperimentalPrimaryTeam": "" + }, + "ClientRequirements": { + "AndroidLatestVersion": "", + "AndroidMinVersion": "", + "DesktopLatestVersion": "", + "DesktopMinVersion": "", + "IosLatestVersion": "", + "IosMinVersion": "" + }, + "SqlSettings": { + "DriverName": "mysql", + "DataSource": "mmuser:mostest@tcp(dockerhost:3306)/mattermost_test?charset=utf8mb4,utf8\u0026readTimeout=30s\u0026writeTimeout=30s", + "DataSourceReplicas": [], + "DataSourceSearchReplicas": [], + "MaxIdleConns": 20, + "MaxOpenConns": 300, + "Trace": false, + "AtRestEncryptKey": "jdh9iergmse3w9mt53snasugmmi9r6it", + "QueryTimeout": 30 + }, + "LogSettings": { + "EnableConsole": true, + "ConsoleLevel": "DEBUG", + "ConsoleJson": true, + "EnableFile": true, + "FileLevel": "INFO", + "FileJson": true, + "FileLocation": "", + "EnableWebhookDebugging": true, + "EnableDiagnostics": true + }, + "PasswordSettings": { + "MinimumLength": 5, + "Lowercase": false, + "Number": false, + "Uppercase": false, + "Symbol": false + }, + "FileSettings": { + "EnableFileAttachments": true, + "EnableMobileUpload": true, + "EnableMobileDownload": true, + "MaxFileSize": 52428800, + "DriverName": "local", + "Directory": "./data/", + "EnablePublicLink": false, + "PublicLinkSalt": "3xh7ztscuezjp1jkdjybtejrtw59xjt1", + "InitialFont": "luximbi.ttf", + "AmazonS3AccessKeyId": "", + "AmazonS3SecretAccessKey": "", + "AmazonS3Bucket": "", + "AmazonS3Region": "", + "AmazonS3Endpoint": "s3.amazonaws.com", + "AmazonS3SSL": true, + "AmazonS3SignV2": false, + "AmazonS3SSE": false, + "AmazonS3Trace": false + }, + "EmailSettings": { + "EnableSignUpWithEmail": true, + "EnableSignInWithEmail": true, + "EnableSignInWithUsername": true, + "SendEmailNotifications": true, + "UseChannelInEmailNotifications": false, + "RequireEmailVerification": false, + "FeedbackName": "", + "FeedbackEmail": "test@example.com", + "FeedbackOrganization": "", + "EnableSMTPAuth": false, + "SMTPUsername": "", + "SMTPPassword": "", + "SMTPServer": "dockerhost", + "SMTPPort": "2500", + "ConnectionSecurity": "", + "InviteSalt": "n3mceqsek4j5ichs5hw9sudwx3cfbtqa", + "SendPushNotifications": false, + "PushNotificationServer": "", + "PushNotificationContents": "generic", + "EnableEmailBatching": false, + "EmailBatchingBufferSize": 256, + "EmailBatchingInterval": 30, + "EnablePreviewModeBanner": true, + "SkipServerCertificateVerification": false, + "EmailNotificationContentsType": "full", + "LoginButtonColor": "", + "LoginButtonBorderColor": "", + "LoginButtonTextColor": "" + }, + "RateLimitSettings": { + "Enable": false, + "PerSec": 10, + "MaxBurst": 100, + "MemoryStoreSize": 10000, + "VaryByRemoteAddr": true, + "VaryByUser": false, + "VaryByHeader": "" + }, + "PrivacySettings": { + "ShowEmailAddress": true, + "ShowFullName": true + }, + "SupportSettings": { + "TermsOfServiceLink": "https://about.mattermost.com/default-terms/", + "PrivacyPolicyLink": "https://about.mattermost.com/default-privacy-policy/", + "AboutLink": "https://about.mattermost.com/default-about/", + "HelpLink": "https://about.mattermost.com/default-help/", + "ReportAProblemLink": "https://about.mattermost.com/default-report-a-problem/", + "SupportEmail": "feedback@mattermost.com" + }, + "AnnouncementSettings": { + "EnableBanner": false, + "BannerText": "", + "BannerColor": "#f2a93b", + "BannerTextColor": "#333333", + "AllowBannerDismissal": true + }, + "ThemeSettings": { + "EnableThemeSelection": true, + "DefaultTheme": "default", + "AllowCustomThemes": true, + "AllowedThemes": [] + }, + "GitLabSettings": { + "Enable": false, + "Secret": "", + "Id": "", + "Scope": "", + "AuthEndpoint": "", + "TokenEndpoint": "", + "UserApiEndpoint": "" + }, + "GoogleSettings": { + "Enable": false, + "Secret": "", + "Id": "", + "Scope": "profile email", + "AuthEndpoint": "https://accounts.google.com/o/oauth2/v2/auth", + "TokenEndpoint": "https://www.googleapis.com/oauth2/v4/token", + "UserApiEndpoint": "https://www.googleapis.com/plus/v1/people/me" + }, + "Office365Settings": { + "Enable": false, + "Secret": "", + "Id": "", + "Scope": "User.Read", + "AuthEndpoint": "https://login.microsoftonline.com/common/oauth2/v2.0/authorize", + "TokenEndpoint": "https://login.microsoftonline.com/common/oauth2/v2.0/token", + "UserApiEndpoint": "https://graph.microsoft.com/v1.0/me" + }, + "LdapSettings": { + "Enable": false, + "EnableSync": false, + "LdapServer": "", + "LdapPort": 389, + "ConnectionSecurity": "", + "BaseDN": "", + "BindUsername": "", + "BindPassword": "", + "UserFilter": "", + "FirstNameAttribute": "", + "LastNameAttribute": "", + "EmailAttribute": "", + "UsernameAttribute": "", + "NicknameAttribute": "", + "IdAttribute": "", + "PositionAttribute": "", + "LoginIdAttribute": "", + "SyncIntervalMinutes": 60, + "SkipCertificateVerification": false, + "QueryTimeout": 60, + "MaxPageSize": 0, + "LoginFieldName": "", + "LoginButtonColor": "", + "LoginButtonBorderColor": "", + "LoginButtonTextColor": "" + }, + "ComplianceSettings": { + "Enable": false, + "Directory": "./data/", + "EnableDaily": false + }, + "LocalizationSettings": { + "DefaultServerLocale": "en", + "DefaultClientLocale": "en", + "AvailableLocales": "" + }, + "SamlSettings": { + "Enable": false, + "EnableSyncWithLdap": false, + "Verify": true, + "Encrypt": true, + "IdpUrl": "", + "IdpDescriptorUrl": "", + "AssertionConsumerServiceURL": "", + "ScopingIDPProviderId": "", + "ScopingIDPName": "", + "IdpCertificateFile": "", + "PublicCertificateFile": "", + "PrivateKeyFile": "", + "FirstNameAttribute": "", + "LastNameAttribute": "", + "EmailAttribute": "", + "UsernameAttribute": "", + "NicknameAttribute": "", + "LocaleAttribute": "", + "PositionAttribute": "", + "LoginButtonText": "With SAML", + "LoginButtonColor": "", + "LoginButtonBorderColor": "", + "LoginButtonTextColor": "" + }, + "NativeAppSettings": { + "AppDownloadLink": "https://about.mattermost.com/downloads/", + "AndroidAppDownloadLink": "https://about.mattermost.com/mattermost-android-app/", + "IosAppDownloadLink": "https://about.mattermost.com/mattermost-ios-app/" + }, + "ClusterSettings": { + "Enable": false, + "ClusterName": "", + "OverrideHostname": "", + "UseIpAddress": true, + "UseExperimentalGossip": false, + "ReadOnlyConfig": true, + "GossipPort": 8074, + "StreamingPort": 8075, + "MaxIdleConns": 100, + "MaxIdleConnsPerHost": 128, + "IdleConnTimeoutMilliseconds": 90000 + }, + "MetricsSettings": { + "Enable": false, + "BlockProfileRate": 0, + "ListenAddress": ":8067" + }, + "ExperimentalSettings": { + "ClientSideCertEnable": false, + "ClientSideCertCheck": "secondary" + }, + "AnalyticsSettings": { + "MaxUsersForStatistics": 2500 + }, + "ElasticsearchSettings": { + "ConnectionUrl": "http://dockerhost:9200", + "Username": "elastic", + "Password": "changeme", + "EnableIndexing": false, + "EnableSearching": false, + "Sniff": true, + "PostIndexReplicas": 1, + "PostIndexShards": 1, + "AggregatePostsAfterDays": 365, + "PostsAggregatorJobStartTime": "03:00", + "IndexPrefix": "", + "LiveIndexingBatchSize": 1, + "BulkIndexingTimeWindowSeconds": 3600, + "RequestTimeoutSeconds": 30 + }, + "DataRetentionSettings": { + "EnableMessageDeletion": false, + "EnableFileDeletion": false, + "MessageRetentionDays": 365, + "FileRetentionDays": 365, + "DeletionJobStartTime": "02:00" + }, + "MessageExportSettings": { + "EnableExport": false, + "ExportFormat": "actiance", + "DailyRunTime": "01:00", + "ExportFromTimestamp": 0, + "BatchSize": 10000, + "GlobalRelaySettings": { + "CustomerType": "A9", + "SmtpUsername": "", + "SmtpPassword": "", + "EmailAddress": "" + } + }, + "JobSettings": { + "RunJobs": true, + "RunScheduler": true + }, + "PluginSettings": { + "Enable": true, + "EnableUploads": true, + "Directory": "./test-plugins", + "ClientDirectory": "./test-client-plugins", + "Plugins": {}, + "PluginStates": { + "jira": { + "Enable": true + }, + "testplugin": { + "Enable": false + } + } + }, + "DisplaySettings": { + "CustomUrlSchemes": [], + "ExperimentalTimezone": false + }, + "TimezoneSettings": { + "SupportedTimezonesPath": "timezones.json" + } +} \ No newline at end of file diff --git a/tests/test-data-graph.png b/tests/test-data-graph.png new file mode 100644 index 0000000000..57247a0f30 Binary files /dev/null and b/tests/test-data-graph.png differ diff --git a/utils/license.go b/utils/license.go index 27365d4477..338ad17586 100644 --- a/utils/license.go +++ b/utils/license.go @@ -133,6 +133,7 @@ func GetClientLicense(l *model.License) map[string]string { props["SkuShortName"] = l.SkuShortName props["Users"] = strconv.Itoa(*l.Features.Users) props["LDAP"] = strconv.FormatBool(*l.Features.LDAP) + props["LDAPGroups"] = strconv.FormatBool(*l.Features.LDAPGroups) props["MFA"] = strconv.FormatBool(*l.Features.MFA) props["SAML"] = strconv.FormatBool(*l.Features.SAML) props["Cluster"] = strconv.FormatBool(*l.Features.Cluster) diff --git a/web/context.go b/web/context.go index 1744ae3905..38557261ec 100644 --- a/web/context.go +++ b/web/context.go @@ -519,3 +519,47 @@ func (c *Context) RequireRoleName() *Context { return c } + +func (c *Context) RequireGroupId() *Context { + if c.Err != nil { + return c + } + + if len(c.Params.GroupId) != 26 { + c.SetInvalidUrlParam("group_id") + } + return c +} + +func (c *Context) RequireRemoteId() *Context { + if c.Err != nil { + return c + } + + if len(c.Params.RemoteId) == 0 { + c.SetInvalidUrlParam("remote_id") + } + return c +} + +func (c *Context) RequireSyncableId() *Context { + if c.Err != nil { + return c + } + + if len(c.Params.SyncableId) != 26 { + c.SetInvalidUrlParam("syncable_id") + } + return c +} + +func (c *Context) RequireSyncableType() *Context { + if c.Err != nil { + return c + } + + if c.Params.SyncableType != model.GroupSyncableTypeTeam && c.Params.SyncableType != model.GroupSyncableTypeChannel { + c.SetInvalidUrlParam("syncable_type") + } + return c +} diff --git a/web/params.go b/web/params.go index 2c30ba4c03..286286c4d4 100644 --- a/web/params.go +++ b/web/params.go @@ -9,6 +9,7 @@ import ( "strings" "github.com/gorilla/mux" + "github.com/mattermost/mattermost-server/model" ) const ( @@ -49,10 +50,14 @@ type Params struct { RoleName string SchemeId string Scope string + GroupId string Page int PerPage int LogsPerPage int Permanent bool + RemoteId string + SyncableId string + SyncableType model.GroupSyncableType } func ParamsFromRequest(r *http.Request) *Params { @@ -173,6 +178,14 @@ func ParamsFromRequest(r *http.Request) *Params { params.SchemeId = val } + if val, ok := props["group_id"]; ok { + params.GroupId = val + } + + if val, ok := props["remote_id"]; ok { + params.RemoteId = val + } + params.Scope = query.Get("scope") if val, err := strconv.Atoi(query.Get("page")); err != nil || val < 0 { @@ -201,5 +214,17 @@ func ParamsFromRequest(r *http.Request) *Params { params.LogsPerPage = val } + if val, ok := props["syncable_id"]; ok { + params.SyncableId = val + } + + if val, ok := props["syncable_type"]; ok { + switch val { + case "teams": + params.SyncableType = model.GroupSyncableTypeTeam + case "channels": + params.SyncableType = model.GroupSyncableTypeChannel + } + } return params }