[MM-22133] Allow exporting and importing archived channels (#23724)

This commit is contained in:
Julien Tant 2023-08-25 17:55:47 -07:00 committed by GitHub
parent f787fd6336
commit 9d569df9b4
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
22 changed files with 394 additions and 109 deletions

View File

@ -97,17 +97,17 @@ func (a *App) BulkExport(ctx request.CTX, writer io.Writer, outPath string, job
}
ctx.Logger().Info("Bulk export: exporting channels")
if err = a.exportAllChannels(ctx, job, writer, teamNames); err != nil {
if err = a.exportAllChannels(ctx, job, writer, teamNames, opts.IncludeArchivedChannels); err != nil {
return err
}
ctx.Logger().Info("Bulk export: exporting users")
if err = a.exportAllUsers(ctx, job, writer); err != nil {
if err = a.exportAllUsers(ctx, job, writer, opts.IncludeArchivedChannels); err != nil {
return err
}
ctx.Logger().Info("Bulk export: exporting posts")
attachments, err := a.exportAllPosts(ctx, job, writer, opts.IncludeAttachments)
attachments, err := a.exportAllPosts(ctx, job, writer, opts.IncludeAttachments, opts.IncludeArchivedChannels)
if err != nil {
return err
}
@ -219,7 +219,7 @@ func (a *App) exportAllTeams(ctx request.CTX, job *model.Job, writer io.Writer)
return teamNames, nil
}
func (a *App) exportAllChannels(ctx request.CTX, job *model.Job, writer io.Writer, teamNames map[string]bool) *model.AppError {
func (a *App) exportAllChannels(ctx request.CTX, job *model.Job, writer io.Writer, teamNames map[string]bool, withArchived bool) *model.AppError {
afterId := strings.Repeat("0", 26)
cnt := 0
for {
@ -239,7 +239,7 @@ func (a *App) exportAllChannels(ctx request.CTX, job *model.Job, writer io.Write
afterId = channel.Id
// Skip deleted.
if channel.DeleteAt != 0 {
if channel.DeleteAt != 0 && !withArchived {
continue
}
// Skip channels on deleted teams.
@ -257,7 +257,7 @@ func (a *App) exportAllChannels(ctx request.CTX, job *model.Job, writer io.Write
return nil
}
func (a *App) exportAllUsers(ctx request.CTX, job *model.Job, writer io.Writer) *model.AppError {
func (a *App) exportAllUsers(ctx request.CTX, job *model.Job, writer io.Writer, includeArchivedChannels bool) *model.AppError {
afterId := strings.Repeat("0", 26)
cnt := 0
for {
@ -319,7 +319,7 @@ func (a *App) exportAllUsers(ctx request.CTX, job *model.Job, writer io.Writer)
userLine.User.NotifyProps = a.buildUserNotifyProps(user.NotifyProps)
// Do the Team Memberships.
members, err := a.buildUserTeamAndChannelMemberships(user.Id)
members, err := a.buildUserTeamAndChannelMemberships(user.Id, includeArchivedChannels)
if err != nil {
return err
}
@ -335,7 +335,7 @@ func (a *App) exportAllUsers(ctx request.CTX, job *model.Job, writer io.Writer)
return nil
}
func (a *App) buildUserTeamAndChannelMemberships(userID string) (*[]imports.UserTeamImportData, *model.AppError) {
func (a *App) buildUserTeamAndChannelMemberships(userID string, includeArchivedChannels bool) (*[]imports.UserTeamImportData, *model.AppError) {
var memberships []imports.UserTeamImportData
members, err := a.Srv().Store().Team().GetTeamMembersForExport(userID)
@ -353,7 +353,7 @@ func (a *App) buildUserTeamAndChannelMemberships(userID string) (*[]imports.User
memberData := ImportUserTeamDataFromTeamMember(member)
// Do the Channel Memberships.
channelMembers, err := a.buildUserChannelMemberships(userID, member.TeamId)
channelMembers, err := a.buildUserChannelMemberships(userID, member.TeamId, includeArchivedChannels)
if err != nil {
return nil, err
}
@ -372,8 +372,8 @@ func (a *App) buildUserTeamAndChannelMemberships(userID string) (*[]imports.User
return &memberships, nil
}
func (a *App) buildUserChannelMemberships(userID string, teamID string) (*[]imports.UserChannelImportData, *model.AppError) {
members, nErr := a.Srv().Store().Channel().GetChannelMembersForExport(userID, teamID)
func (a *App) buildUserChannelMemberships(userID string, teamID string, includeArchivedChannels bool) (*[]imports.UserChannelImportData, *model.AppError) {
members, nErr := a.Srv().Store().Channel().GetChannelMembersForExport(userID, teamID, includeArchivedChannels)
if nErr != nil {
return nil, model.NewAppError("buildUserChannelMemberships", "app.channel.get_members.app_error", nil, "", http.StatusInternalServerError).Wrap(nErr)
}
@ -411,7 +411,7 @@ func (a *App) buildUserNotifyProps(notifyProps model.StringMap) *imports.UserNot
}
}
func (a *App) exportAllPosts(ctx request.CTX, job *model.Job, writer io.Writer, withAttachments bool) ([]imports.AttachmentImportData, *model.AppError) {
func (a *App) exportAllPosts(ctx request.CTX, job *model.Job, writer io.Writer, withAttachments bool, includeArchivedChannels bool) ([]imports.AttachmentImportData, *model.AppError) {
var attachments []imports.AttachmentImportData
afterId := strings.Repeat("0", 26)
var postProcessCount uint64
@ -424,7 +424,7 @@ func (a *App) exportAllPosts(ctx request.CTX, job *model.Job, writer io.Writer,
logCheckpoint = time.Now()
}
posts, nErr := a.Srv().Store().Post().GetParentsForExportAfter(1000, afterId)
posts, nErr := a.Srv().Store().Post().GetParentsForExportAfter(1000, afterId, includeArchivedChannels)
if nErr != nil {
return nil, model.NewAppError("exportAllPosts", "app.post.get_posts.app_error", nil, "", http.StatusInternalServerError).Wrap(nErr)
}

View File

@ -35,6 +35,7 @@ func ImportLineFromChannel(channel *model.ChannelForExport) *imports.LineImportD
Header: &channel.Header,
Purpose: &channel.Purpose,
Scheme: channel.SchemeName,
DeletedAt: &channel.DeleteAt,
},
}
}

View File

@ -102,7 +102,7 @@ func TestExportUserChannels(t *testing.T) {
require.NoError(t, err)
th.App.UpdateChannelMemberNotifyProps(th.Context, notifyProps, channel.Id, user.Id)
exportData, appErr := th.App.buildUserChannelMemberships(user.Id, team.Id)
exportData, appErr := th.App.buildUserChannelMemberships(user.Id, team.Id, false)
require.Nil(t, appErr)
assert.Equal(t, len(*exportData), 3)
for _, data := range *exportData {
@ -767,3 +767,38 @@ func TestExportDeletedTeams(t *testing.T) {
assert.NotContains(t, team.Id, team1.Id)
}
}
func TestExportArchivedChannels(t *testing.T) {
th1 := Setup(t).InitBasic()
defer th1.TearDown()
archivedChannel := th1.CreateChannel(th1.Context, th1.BasicTeam)
th1.CreatePost(archivedChannel)
appErr := th1.App.DeleteChannel(th1.Context, archivedChannel, th1.SystemAdminUser.Id)
require.Nil(t, appErr)
var b bytes.Buffer
appErr = th1.App.BulkExport(th1.Context, &b, "somePath", nil, model.BulkExportOpts{
IncludeArchivedChannels: true,
})
require.Nil(t, appErr)
th2 := Setup(t)
defer th2.TearDown()
err, i := th2.App.BulkImport(th2.Context, &b, nil, false, 5)
assert.Nil(t, err)
assert.Equal(t, 0, i)
channels2, err := th2.App.GetAllChannels(th1.Context, 0, 10, model.ChannelSearchOpts{
IncludeDeleted: true,
})
assert.Nil(t, err)
found := false
for i := range channels2 {
if channels2[i].Name == archivedChannel.Name {
found = true
break
}
}
require.True(t, found, "archived channel not found after import")
}

View File

@ -275,7 +275,7 @@ func (a *App) importChannel(c request.CTX, data *imports.ChannelImportData, dryR
}
var channel *model.Channel
if result, err := a.Srv().Store().Channel().GetByNameIncludeDeleted(team.Id, *data.Name, true); err == nil {
if result, gErr := a.Srv().Store().Channel().GetByNameIncludeDeleted(team.Id, *data.Name, true); gErr == nil {
channel = result
} else {
channel = &model.Channel{}
@ -311,13 +311,20 @@ func (a *App) importChannel(c request.CTX, data *imports.ChannelImportData, dryR
channel.SchemeId = &scheme.Id
}
var chErr *model.AppError
if channel.Id == "" {
if _, err := a.CreateChannel(c, channel, false); err != nil {
return err
if _, chErr = a.CreateChannel(c, channel, false); chErr != nil {
return chErr
}
} else {
if _, err := a.UpdateChannel(c, channel); err != nil {
return err
if _, chErr = a.UpdateChannel(c, channel); chErr != nil {
return chErr
}
}
if data.DeletedAt != nil && *data.DeletedAt > 0 {
if err := a.Srv().Store().Channel().Delete(channel.Id, *data.DeletedAt); err != nil {
return model.NewAppError("BulkImport", "app.import.import_channel.deleting.app_error", nil, "", http.StatusInternalServerError).Wrap(err)
}
}
@ -1374,7 +1381,7 @@ func (a *App) getTeamsByNames(names []string) (map[string]*model.Team, *model.Ap
}
func (a *App) getChannelsByNames(names []string, teamID string) (map[string]*model.Channel, *model.AppError) {
allChannels, err := a.Srv().Store().Channel().GetByNames(teamID, names, true)
allChannels, err := a.Srv().Store().Channel().GetByNamesIncludeDeleted(teamID, names, true)
if err != nil {
return nil, model.NewAppError("BulkImport", "app.import.get_teams_by_names.some_teams_not_found.error", nil, "", http.StatusBadRequest).Wrap(err)
}

View File

@ -704,6 +704,21 @@ func TestImportImportChannel(t *testing.T) {
assert.Equal(t, *data.Header, channel.Header)
assert.Equal(t, *data.Purpose, channel.Purpose)
assert.Equal(t, scheme2.Id, *channel.SchemeId)
// Do a valid archived channel.
now := model.GetMillis()
data.Name = ptrStr("archivedchannel")
data.DisplayName = ptrStr("Archived Channel")
data.Type = &chanOpen
data.Header = ptrStr("Archived Channel Header")
data.Purpose = ptrStr("Archived Channel Purpose")
data.Scheme = &scheme1.Name
data.DeletedAt = &now
err = th.App.importChannel(th.Context, &data, false)
require.Nil(t, err, "Expected success in apply mode")
aChan, err := th.App.GetChannelByName(th.Context, *data.Name, team.Id, true)
require.Nil(t, err, "Failed to get channel from database.")
assert.Equal(t, *data.Name, aChan.Name)
}
func TestImportImportUser(t *testing.T) {

View File

@ -50,6 +50,7 @@ type ChannelImportData struct {
Header *string `json:"header,omitempty"`
Purpose *string `json:"purpose,omitempty"`
Scheme *string `json:"scheme,omitempty"`
DeletedAt *int64 `json:"deleted_at,omitempty"`
}
type UserImportData struct {

View File

@ -38,6 +38,11 @@ func MakeWorker(jobServer *jobs.JobServer, app AppIface) model.Worker {
opts.IncludeAttachments = true
}
includeArchivedChannels, ok := job.Data["include_archived_channels"]
if ok && includeArchivedChannels == "true" {
opts.IncludeArchivedChannels = true
}
outPath := *app.Config().ExportSettings.Directory
exportFilename := job.Id + "_export.zip"

View File

@ -1106,6 +1106,24 @@ func (s *OpenTracingLayerChannelStore) GetByNames(team_id string, names []string
return result, err
}
func (s *OpenTracingLayerChannelStore) GetByNamesIncludeDeleted(team_id string, names []string, allowFromCache bool) ([]*model.Channel, error) {
origCtx := s.Root.Store.Context()
span, newCtx := tracing.StartSpanWithParentByContext(s.Root.Store.Context(), "ChannelStore.GetByNamesIncludeDeleted")
s.Root.Store.SetContext(newCtx)
defer func() {
s.Root.Store.SetContext(origCtx)
}()
defer span.Finish()
result, err := s.ChannelStore.GetByNamesIncludeDeleted(team_id, names, allowFromCache)
if err != nil {
span.LogFields(spanlog.Error(err))
ext.Error.Set(span, true)
}
return result, err
}
func (s *OpenTracingLayerChannelStore) GetChannelCounts(teamID string, userID string) (*model.ChannelCounts, error) {
origCtx := s.Root.Store.Context()
span, newCtx := tracing.StartSpanWithParentByContext(s.Root.Store.Context(), "ChannelStore.GetChannelCounts")
@ -1124,7 +1142,7 @@ func (s *OpenTracingLayerChannelStore) GetChannelCounts(teamID string, userID st
return result, err
}
func (s *OpenTracingLayerChannelStore) GetChannelMembersForExport(userID string, teamID string) ([]*model.ChannelMemberForExport, error) {
func (s *OpenTracingLayerChannelStore) GetChannelMembersForExport(userID string, teamID string, includeArchivedChannel bool) ([]*model.ChannelMemberForExport, error) {
origCtx := s.Root.Store.Context()
span, newCtx := tracing.StartSpanWithParentByContext(s.Root.Store.Context(), "ChannelStore.GetChannelMembersForExport")
s.Root.Store.SetContext(newCtx)
@ -1133,7 +1151,7 @@ func (s *OpenTracingLayerChannelStore) GetChannelMembersForExport(userID string,
}()
defer span.Finish()
result, err := s.ChannelStore.GetChannelMembersForExport(userID, teamID)
result, err := s.ChannelStore.GetChannelMembersForExport(userID, teamID, includeArchivedChannel)
if err != nil {
span.LogFields(spanlog.Error(err))
ext.Error.Set(span, true)
@ -6097,7 +6115,7 @@ func (s *OpenTracingLayerPostStore) GetOldestEntityCreationTime() (int64, error)
return result, err
}
func (s *OpenTracingLayerPostStore) GetParentsForExportAfter(limit int, afterID string) ([]*model.PostForExport, error) {
func (s *OpenTracingLayerPostStore) GetParentsForExportAfter(limit int, afterID string, includeArchivedChannels bool) ([]*model.PostForExport, error) {
origCtx := s.Root.Store.Context()
span, newCtx := tracing.StartSpanWithParentByContext(s.Root.Store.Context(), "PostStore.GetParentsForExportAfter")
s.Root.Store.SetContext(newCtx)
@ -6106,7 +6124,7 @@ func (s *OpenTracingLayerPostStore) GetParentsForExportAfter(limit int, afterID
}()
defer span.Finish()
result, err := s.PostStore.GetParentsForExportAfter(limit, afterID)
result, err := s.PostStore.GetParentsForExportAfter(limit, afterID, includeArchivedChannels)
if err != nil {
span.LogFields(spanlog.Error(err))
ext.Error.Set(span, true)

View File

@ -1216,6 +1216,27 @@ func (s *RetryLayerChannelStore) GetByNames(team_id string, names []string, allo
}
func (s *RetryLayerChannelStore) GetByNamesIncludeDeleted(team_id string, names []string, allowFromCache bool) ([]*model.Channel, error) {
tries := 0
for {
result, err := s.ChannelStore.GetByNamesIncludeDeleted(team_id, names, allowFromCache)
if err == nil {
return result, nil
}
if !isRepeatableError(err) {
return result, err
}
tries++
if tries >= 3 {
err = errors.Wrap(err, "giving up after 3 consecutive repeatable transaction failures")
return result, err
}
timepkg.Sleep(100 * timepkg.Millisecond)
}
}
func (s *RetryLayerChannelStore) GetChannelCounts(teamID string, userID string) (*model.ChannelCounts, error) {
tries := 0
@ -1237,11 +1258,11 @@ func (s *RetryLayerChannelStore) GetChannelCounts(teamID string, userID string)
}
func (s *RetryLayerChannelStore) GetChannelMembersForExport(userID string, teamID string) ([]*model.ChannelMemberForExport, error) {
func (s *RetryLayerChannelStore) GetChannelMembersForExport(userID string, teamID string, includeArchivedChannel bool) ([]*model.ChannelMemberForExport, error) {
tries := 0
for {
result, err := s.ChannelStore.GetChannelMembersForExport(userID, teamID)
result, err := s.ChannelStore.GetChannelMembersForExport(userID, teamID, includeArchivedChannel)
if err == nil {
return result, nil
}
@ -6892,11 +6913,11 @@ func (s *RetryLayerPostStore) GetOldestEntityCreationTime() (int64, error) {
}
func (s *RetryLayerPostStore) GetParentsForExportAfter(limit int, afterID string) ([]*model.PostForExport, error) {
func (s *RetryLayerPostStore) GetParentsForExportAfter(limit int, afterID string, includeArchivedChannels bool) ([]*model.PostForExport, error) {
tries := 0
for {
result, err := s.PostStore.GetParentsForExportAfter(limit, afterID)
result, err := s.PostStore.GetParentsForExportAfter(limit, afterID, includeArchivedChannels)
if err == nil {
return result, nil
}

View File

@ -1468,7 +1468,15 @@ func (s SqlChannelStore) GetByName(teamId string, name string, allowFromCache bo
return s.getByName(teamId, name, false, allowFromCache)
}
func (s SqlChannelStore) GetByNamesIncludeDeleted(teamId string, names []string, allowFromCache bool) ([]*model.Channel, error) {
return s.getByNames(teamId, names, allowFromCache, true)
}
func (s SqlChannelStore) GetByNames(teamId string, names []string, allowFromCache bool) ([]*model.Channel, error) {
return s.getByNames(teamId, names, allowFromCache, false)
}
func (s SqlChannelStore) getByNames(teamId string, names []string, allowFromCache, includeArchivedChannels bool) ([]*model.Channel, error) {
var channels []*model.Channel
if allowFromCache {
@ -1481,7 +1489,9 @@ func (s SqlChannelStore) GetByNames(teamId string, names []string, allowFromCach
visited[name] = struct{}{}
var cacheItem *model.Channel
if err := channelByNameCache.Get(teamId+name, &cacheItem); err == nil {
channels = append(channels, cacheItem)
if includeArchivedChannels || cacheItem.DeleteAt == 0 {
channels = append(channels, cacheItem)
}
} else {
misses = append(misses, name)
}
@ -1490,15 +1500,17 @@ func (s SqlChannelStore) GetByNames(teamId string, names []string, allowFromCach
}
if len(names) > 0 {
cond := sq.And{
sq.Eq{"Name": names},
}
if !includeArchivedChannels {
cond = append(cond, sq.Eq{"DeleteAt": 0})
}
builder := s.getQueryBuilder().
Select("*").
From("Channels").
Where(
sq.And{
sq.Eq{"Name": names},
sq.Eq{"DeleteAt": 0},
},
)
Where(cond)
if teamId != "" {
builder = builder.Where(sq.Eq{"TeamId": teamId})
@ -4191,34 +4203,36 @@ func (s SqlChannelStore) GetAllChannelsForExportAfter(limit int, afterId string)
return channels, nil
}
func (s SqlChannelStore) GetChannelMembersForExport(userId string, teamId string) ([]*model.ChannelMemberForExport, error) {
func (s SqlChannelStore) GetChannelMembersForExport(userId string, teamId string, includeArchivedChannel bool) ([]*model.ChannelMemberForExport, error) {
members := []*model.ChannelMemberForExport{}
err := s.GetReplicaX().Select(&members, `
SELECT
ChannelMembers.ChannelId,
ChannelMembers.UserId,
ChannelMembers.Roles,
ChannelMembers.LastViewedAt,
ChannelMembers.MsgCount,
ChannelMembers.MentionCount,
ChannelMembers.MentionCountRoot,
COALESCE(ChannelMembers.UrgentMentionCount, 0) AS UrgentMentionCount,
ChannelMembers.MsgCountRoot,
ChannelMembers.NotifyProps,
ChannelMembers.LastUpdateAt,
ChannelMembers.SchemeUser,
ChannelMembers.SchemeAdmin,
(ChannelMembers.SchemeGuest IS NOT NULL AND ChannelMembers.SchemeGuest) as SchemeGuest,
Channels.Name as ChannelName
FROM
ChannelMembers
INNER JOIN
Channels ON ChannelMembers.ChannelId = Channels.Id
WHERE
ChannelMembers.UserId = ?
AND Channels.TeamId = ?
AND Channels.DeleteAt = 0`,
userId, teamId)
q := `
SELECT
ChannelMembers.ChannelId,
ChannelMembers.UserId,
ChannelMembers.Roles,
ChannelMembers.LastViewedAt,
ChannelMembers.MsgCount,
ChannelMembers.MentionCount,
ChannelMembers.MentionCountRoot,
COALESCE(ChannelMembers.UrgentMentionCount, 0) AS UrgentMentionCount,
ChannelMembers.MsgCountRoot,
ChannelMembers.NotifyProps,
ChannelMembers.LastUpdateAt,
ChannelMembers.SchemeUser,
ChannelMembers.SchemeAdmin,
(ChannelMembers.SchemeGuest IS NOT NULL AND ChannelMembers.SchemeGuest) as SchemeGuest,
Channels.Name as ChannelName
FROM
ChannelMembers
INNER JOIN
Channels ON ChannelMembers.ChannelId = Channels.Id
WHERE
ChannelMembers.UserId = ?
AND Channels.TeamId = ?`
if !includeArchivedChannel {
q += " AND Channels.DeleteAt = 0"
}
err := s.GetReplicaX().Select(&members, q, userId, teamId)
if err != nil {
return nil, errors.Wrap(err, "failed to find Channels for export")
}

View File

@ -2556,7 +2556,7 @@ func (s *SqlPostStore) GetMaxPostSize() int {
return s.maxPostSizeCached
}
func (s *SqlPostStore) GetParentsForExportAfter(limit int, afterId string) ([]*model.PostForExport, error) {
func (s *SqlPostStore) GetParentsForExportAfter(limit int, afterId string, includeArchivedChannel bool) ([]*model.PostForExport, error) {
for {
rootIds := []string{}
err := s.GetReplicaX().Select(&rootIds,
@ -2580,16 +2580,20 @@ func (s *SqlPostStore) GetParentsForExportAfter(limit int, afterId string) ([]*m
return postsForExport, nil
}
excludeDeletedCond := sq.And{
sq.Eq{"Teams.DeleteAt": 0},
}
if !includeArchivedChannel {
excludeDeletedCond = append(excludeDeletedCond, sq.Eq{"Channels.DeleteAt": 0})
}
builder := s.getQueryBuilder().
Select("p1.*, Users.Username as Username, Teams.Name as TeamName, Channels.Name as ChannelName").
FromSelect(sq.Select("*").From("Posts").Where(sq.Eq{"Posts.Id": rootIds}), "p1").
InnerJoin("Channels ON p1.ChannelId = Channels.Id").
InnerJoin("Teams ON Channels.TeamId = Teams.Id").
InnerJoin("Users ON p1.UserId = Users.Id").
Where(sq.And{
sq.Eq{"Channels.DeleteAt": 0},
sq.Eq{"Teams.DeleteAt": 0},
}).
Where(excludeDeletedCond).
OrderBy("p1.Id")
query, args, err := builder.ToSql()

View File

@ -192,6 +192,7 @@ type ChannelStore interface {
GetByName(team_id string, name string, allowFromCache bool) (*model.Channel, error)
GetByNames(team_id string, names []string, allowFromCache bool) ([]*model.Channel, error)
GetByNameIncludeDeleted(team_id string, name string, allowFromCache bool) (*model.Channel, error)
GetByNamesIncludeDeleted(team_id string, names []string, allowFromCache bool) ([]*model.Channel, error)
GetDeletedByName(team_id string, name string) (*model.Channel, error)
GetDeleted(team_id string, offset int, limit int, userID string) (model.ChannelList, error)
GetChannels(teamID, userID string, opts *model.ChannelSearchOpts) (model.ChannelList, error)
@ -285,7 +286,7 @@ type ChannelStore interface {
DeleteSidebarCategory(categoryID string) error
GetAllChannelsForExportAfter(limit int, afterID string) ([]*model.ChannelForExport, error)
GetAllDirectChannelsForExportAfter(limit int, afterID string) ([]*model.DirectChannelForExport, error)
GetChannelMembersForExport(userID string, teamID string) ([]*model.ChannelMemberForExport, error)
GetChannelMembersForExport(userID string, teamID string, includeArchivedChannel bool) ([]*model.ChannelMemberForExport, error)
RemoveAllDeactivatedMembers(channelID string) error
GetChannelsBatchForIndexing(startTime int64, startChannelID string, limit int) ([]*model.Channel, error)
UserBelongsToChannels(userID string, channelIds []string) (bool, error)
@ -379,7 +380,7 @@ type PostStore interface {
PermanentDeleteBatch(endTime int64, limit int64) (int64, error)
GetOldest() (*model.Post, error)
GetMaxPostSize() int
GetParentsForExportAfter(limit int, afterID string) ([]*model.PostForExport, error)
GetParentsForExportAfter(limit int, afterID string, includeArchivedChannels bool) ([]*model.PostForExport, error)
GetRepliesForExport(parentID string) ([]*model.ReplyForExport, error)
GetDirectPostParentsForExportAfter(limit int, afterID string) ([]*model.DirectPostForExport, error)
SearchPostsForUser(paramsList []*model.SearchParams, userID, teamID string, page, perPage int) (*model.PostSearchResults, error)

View File

@ -81,6 +81,7 @@ func TestChannelStore(t *testing.T, ss store.Store, s SqlStore) {
t.Run("Delete", func(t *testing.T) { testChannelStoreDelete(t, ss) })
t.Run("GetByName", func(t *testing.T) { testChannelStoreGetByName(t, ss) })
t.Run("GetByNames", func(t *testing.T) { testChannelStoreGetByNames(t, ss) })
t.Run("GetByNamesIncludeDeleted", func(t *testing.T) { testChannelStoreGetByNamesIncludeDeleted(t, ss) })
t.Run("GetDeletedByName", func(t *testing.T) { testChannelStoreGetDeletedByName(t, ss) })
t.Run("GetDeleted", func(t *testing.T) { testChannelStoreGetDeleted(t, ss) })
t.Run("ChannelMemberStore", func(t *testing.T) { testChannelMemberStore(t, ss) })
@ -859,6 +860,17 @@ func testChannelStoreGetByNames(t *testing.T, ss store.Store) {
_, nErr = ss.Channel().Save(&o2, -1)
require.NoError(t, nErr)
o3 := model.Channel{
TeamId: o1.TeamId,
DisplayName: "Name",
Name: NewTestId(),
Type: model.ChannelTypeOpen,
}
_, nErr = ss.Channel().Save(&o3, -1)
require.NoError(t, nErr)
nErr = ss.Channel().Delete(o3.Id, model.GetMillis())
require.NoError(t, nErr)
for index, tc := range []struct {
TeamId string
Names []string
@ -895,6 +907,53 @@ func testChannelStoreGetByNames(t *testing.T, ss store.Store) {
assert.Empty(t, channels)
}
func testChannelStoreGetByNamesIncludeDeleted(t *testing.T, ss store.Store) {
o1 := model.Channel{
TeamId: model.NewId(),
DisplayName: "Name",
Name: NewTestId(),
Type: model.ChannelTypeOpen,
}
_, nErr := ss.Channel().Save(&o1, -1)
require.NoError(t, nErr)
o2 := model.Channel{
TeamId: o1.TeamId,
DisplayName: "Name",
Name: NewTestId(),
Type: model.ChannelTypeOpen,
}
_, nErr = ss.Channel().Save(&o2, -1)
require.NoError(t, nErr)
nErr = ss.Channel().Delete(o2.Id, model.GetMillis())
require.NoError(t, nErr, "channel should have been deleted")
for index, tc := range []struct {
TeamId string
Names []string
ExpectedIds []string
}{
{o1.TeamId, []string{o1.Name}, []string{o1.Id}},
{o1.TeamId, []string{o1.Name, o2.Name}, []string{o1.Id, o2.Id}},
{o1.TeamId, nil, nil},
{o1.TeamId, []string{"foo"}, nil},
{o1.TeamId, []string{o1.Name, "foo", o2.Name, o2.Name}, []string{o1.Id, o2.Id}},
{"", []string{o1.Name, "foo", o2.Name, o2.Name}, []string{o1.Id, o2.Id}},
{"asd", []string{o1.Name, "foo", o2.Name, o2.Name}, nil},
} {
var channels []*model.Channel
channels, err := ss.Channel().GetByNamesIncludeDeleted(tc.TeamId, tc.Names, true)
require.NoError(t, err)
var ids []string
for _, channel := range channels {
ids = append(ids, channel.Id)
}
sort.Strings(ids)
sort.Strings(tc.ExpectedIds)
assert.Equal(t, tc.ExpectedIds, ids, "tc %v", index)
}
}
func testChannelStoreGetDeletedByName(t *testing.T, ss store.Store) {
o1 := &model.Channel{}
o1.TeamId = model.NewId()
@ -7648,7 +7707,7 @@ func testChannelStoreGetChannelMembersForExport(t *testing.T, ss store.Store) {
_, err = ss.Channel().SaveMember(&m2)
require.NoError(t, err)
d1, err := ss.Channel().GetChannelMembersForExport(u1.Id, t1.Id)
d1, err := ss.Channel().GetChannelMembersForExport(u1.Id, t1.Id, false)
assert.NoError(t, err)
assert.Len(t, d1, 1)

View File

@ -674,6 +674,32 @@ func (_m *ChannelStore) GetByNames(team_id string, names []string, allowFromCach
return r0, r1
}
// GetByNamesIncludeDeleted provides a mock function with given fields: team_id, names, allowFromCache
func (_m *ChannelStore) GetByNamesIncludeDeleted(team_id string, names []string, allowFromCache bool) ([]*model.Channel, error) {
ret := _m.Called(team_id, names, allowFromCache)
var r0 []*model.Channel
var r1 error
if rf, ok := ret.Get(0).(func(string, []string, bool) ([]*model.Channel, error)); ok {
return rf(team_id, names, allowFromCache)
}
if rf, ok := ret.Get(0).(func(string, []string, bool) []*model.Channel); ok {
r0 = rf(team_id, names, allowFromCache)
} else {
if ret.Get(0) != nil {
r0 = ret.Get(0).([]*model.Channel)
}
}
if rf, ok := ret.Get(1).(func(string, []string, bool) error); ok {
r1 = rf(team_id, names, allowFromCache)
} else {
r1 = ret.Error(1)
}
return r0, r1
}
// GetChannelCounts provides a mock function with given fields: teamID, userID
func (_m *ChannelStore) GetChannelCounts(teamID string, userID string) (*model.ChannelCounts, error) {
ret := _m.Called(teamID, userID)
@ -700,25 +726,25 @@ func (_m *ChannelStore) GetChannelCounts(teamID string, userID string) (*model.C
return r0, r1
}
// GetChannelMembersForExport provides a mock function with given fields: userID, teamID
func (_m *ChannelStore) GetChannelMembersForExport(userID string, teamID string) ([]*model.ChannelMemberForExport, error) {
ret := _m.Called(userID, teamID)
// GetChannelMembersForExport provides a mock function with given fields: userID, teamID, includeArchivedChannel
func (_m *ChannelStore) GetChannelMembersForExport(userID string, teamID string, includeArchivedChannel bool) ([]*model.ChannelMemberForExport, error) {
ret := _m.Called(userID, teamID, includeArchivedChannel)
var r0 []*model.ChannelMemberForExport
var r1 error
if rf, ok := ret.Get(0).(func(string, string) ([]*model.ChannelMemberForExport, error)); ok {
return rf(userID, teamID)
if rf, ok := ret.Get(0).(func(string, string, bool) ([]*model.ChannelMemberForExport, error)); ok {
return rf(userID, teamID, includeArchivedChannel)
}
if rf, ok := ret.Get(0).(func(string, string) []*model.ChannelMemberForExport); ok {
r0 = rf(userID, teamID)
if rf, ok := ret.Get(0).(func(string, string, bool) []*model.ChannelMemberForExport); ok {
r0 = rf(userID, teamID, includeArchivedChannel)
} else {
if ret.Get(0) != nil {
r0 = ret.Get(0).([]*model.ChannelMemberForExport)
}
}
if rf, ok := ret.Get(1).(func(string, string) error); ok {
r1 = rf(userID, teamID)
if rf, ok := ret.Get(1).(func(string, string, bool) error); ok {
r1 = rf(userID, teamID, includeArchivedChannel)
} else {
r1 = ret.Error(1)
}

View File

@ -395,25 +395,25 @@ func (_m *PostStore) GetOldestEntityCreationTime() (int64, error) {
return r0, r1
}
// GetParentsForExportAfter provides a mock function with given fields: limit, afterID
func (_m *PostStore) GetParentsForExportAfter(limit int, afterID string) ([]*model.PostForExport, error) {
ret := _m.Called(limit, afterID)
// GetParentsForExportAfter provides a mock function with given fields: limit, afterID, includeArchivedChannels
func (_m *PostStore) GetParentsForExportAfter(limit int, afterID string, includeArchivedChannels bool) ([]*model.PostForExport, error) {
ret := _m.Called(limit, afterID, includeArchivedChannels)
var r0 []*model.PostForExport
var r1 error
if rf, ok := ret.Get(0).(func(int, string) ([]*model.PostForExport, error)); ok {
return rf(limit, afterID)
if rf, ok := ret.Get(0).(func(int, string, bool) ([]*model.PostForExport, error)); ok {
return rf(limit, afterID, includeArchivedChannels)
}
if rf, ok := ret.Get(0).(func(int, string) []*model.PostForExport); ok {
r0 = rf(limit, afterID)
if rf, ok := ret.Get(0).(func(int, string, bool) []*model.PostForExport); ok {
r0 = rf(limit, afterID, includeArchivedChannels)
} else {
if ret.Get(0) != nil {
r0 = ret.Get(0).([]*model.PostForExport)
}
}
if rf, ok := ret.Get(1).(func(int, string) error); ok {
r1 = rf(limit, afterID)
if rf, ok := ret.Get(1).(func(int, string, bool) error); ok {
r1 = rf(limit, afterID, includeArchivedChannels)
} else {
r1 = ret.Error(1)
}

View File

@ -4154,6 +4154,14 @@ func testPostStoreGetParentsForExportAfter(t *testing.T, ss store.Store) {
_, nErr := ss.Channel().Save(&c1, -1)
require.NoError(t, nErr)
c2 := model.Channel{}
c2.TeamId = t1.Id
c2.DisplayName = "Channel2"
c2.Name = NewTestId()
c2.Type = model.ChannelTypeOpen
_, nErr = ss.Channel().Save(&c2, -1)
require.NoError(t, nErr)
u1 := model.User{}
u1.Username = model.NewId()
u1.Email = MakeEmail()
@ -4169,21 +4177,56 @@ func testPostStoreGetParentsForExportAfter(t *testing.T, ss store.Store) {
p1, nErr = ss.Post().Save(p1)
require.NoError(t, nErr)
posts, err := ss.Post().GetParentsForExportAfter(10000, strings.Repeat("0", 26))
assert.NoError(t, err)
p2 := &model.Post{}
p2.ChannelId = c2.Id
p2.UserId = u1.Id
p2.Message = NewTestId()
p2.CreateAt = 1000
p2, nErr = ss.Post().Save(p2)
require.NoError(t, nErr)
nErr = ss.Channel().Delete(c2.Id, model.GetMillis())
require.NoError(t, nErr)
found := false
for _, p := range posts {
if p.Id == p1.Id {
found = true
assert.Equal(t, p.Id, p1.Id)
assert.Equal(t, p.Message, p1.Message)
assert.Equal(t, p.Username, u1.Username)
assert.Equal(t, p.TeamName, t1.Name)
assert.Equal(t, p.ChannelName, c1.Name)
t.Run("without archived channels", func(t *testing.T) {
posts, err := ss.Post().GetParentsForExportAfter(10000, strings.Repeat("0", 26), false)
assert.NoError(t, err)
found := false
foundArchived := false
for _, p := range posts {
if p.Id == p1.Id {
found = true
assert.Equal(t, p.Id, p1.Id)
assert.Equal(t, p.Message, p1.Message)
assert.Equal(t, p.Username, u1.Username)
assert.Equal(t, p.TeamName, t1.Name)
assert.Equal(t, p.ChannelName, c1.Name)
}
if p.Id == p2.Id {
foundArchived = true
}
}
}
assert.True(t, found)
assert.True(t, found)
assert.False(t, foundArchived, "posts from archived channel should not be returned")
})
t.Run("with archived channels", func(t *testing.T) {
posts, err := ss.Post().GetParentsForExportAfter(10000, strings.Repeat("0", 26), true)
assert.NoError(t, err)
found := false
for _, p := range posts {
if p.Id == p2.Id {
found = true
assert.Equal(t, p.Id, p2.Id)
assert.Equal(t, p.Message, p2.Message)
assert.Equal(t, p.Username, u1.Username)
assert.Equal(t, p.TeamName, t1.Name)
assert.Equal(t, p.ChannelName, c2.Name)
}
}
assert.True(t, found)
})
}
func testPostStoreGetRepliesForExport(t *testing.T, ss store.Store) {

View File

@ -1040,6 +1040,22 @@ func (s *TimerLayerChannelStore) GetByNames(team_id string, names []string, allo
return result, err
}
func (s *TimerLayerChannelStore) GetByNamesIncludeDeleted(team_id string, names []string, allowFromCache bool) ([]*model.Channel, error) {
start := time.Now()
result, err := s.ChannelStore.GetByNamesIncludeDeleted(team_id, names, allowFromCache)
elapsed := float64(time.Since(start)) / float64(time.Second)
if s.Root.Metrics != nil {
success := "false"
if err == nil {
success = "true"
}
s.Root.Metrics.ObserveStoreMethodDuration("ChannelStore.GetByNamesIncludeDeleted", success, elapsed)
}
return result, err
}
func (s *TimerLayerChannelStore) GetChannelCounts(teamID string, userID string) (*model.ChannelCounts, error) {
start := time.Now()
@ -1056,10 +1072,10 @@ func (s *TimerLayerChannelStore) GetChannelCounts(teamID string, userID string)
return result, err
}
func (s *TimerLayerChannelStore) GetChannelMembersForExport(userID string, teamID string) ([]*model.ChannelMemberForExport, error) {
func (s *TimerLayerChannelStore) GetChannelMembersForExport(userID string, teamID string, includeArchivedChannel bool) ([]*model.ChannelMemberForExport, error) {
start := time.Now()
result, err := s.ChannelStore.GetChannelMembersForExport(userID, teamID)
result, err := s.ChannelStore.GetChannelMembersForExport(userID, teamID, includeArchivedChannel)
elapsed := float64(time.Since(start)) / float64(time.Second)
if s.Root.Metrics != nil {
@ -5525,10 +5541,10 @@ func (s *TimerLayerPostStore) GetOldestEntityCreationTime() (int64, error) {
return result, err
}
func (s *TimerLayerPostStore) GetParentsForExportAfter(limit int, afterID string) ([]*model.PostForExport, error) {
func (s *TimerLayerPostStore) GetParentsForExportAfter(limit int, afterID string, includeArchivedChannels bool) ([]*model.PostForExport, error) {
start := time.Now()
result, err := s.PostStore.GetParentsForExportAfter(limit, afterID)
result, err := s.PostStore.GetParentsForExportAfter(limit, afterID, includeArchivedChannels)
elapsed := float64(time.Since(start)) / float64(time.Second)
if s.Root.Metrics != nil {

View File

@ -81,6 +81,7 @@ func init() {
GlobalRelayZipExportCmd.Flags().Int("limit", -1, "The number of posts to export. The default of -1 means no limit.")
BulkExportCmd.Flags().Bool("all-teams", true, "Export all teams from the server.")
BulkExportCmd.Flags().Bool("with-archived-channels", false, "Also exports archived channels.")
BulkExportCmd.Flags().Bool("attachments", false, "Also export file attachments.")
BulkExportCmd.Flags().Bool("archive", false, "Outputs a single archive file.")
@ -226,6 +227,11 @@ func bulkExportCmdF(command *cobra.Command, args []string) error {
return errors.Wrap(err, "archive flag error")
}
withArchivedChannels, err := command.Flags().GetBool("with-archived-channels")
if err != nil {
return errors.Wrap(err, "with-archived-channels flag error")
}
fileWriter, err := os.Create(args[0])
if err != nil {
return err
@ -240,6 +246,7 @@ func bulkExportCmdF(command *cobra.Command, args []string) error {
var opts model.BulkExportOpts
opts.IncludeAttachments = attachments
opts.CreateArchive = archive
opts.IncludeArchivedChannels = withArchivedChannels
if err := a.BulkExport(request.EmptyContext(a.Log()), fileWriter, filepath.Dir(outPath), nil /* nil job since it's spawned from CLI */, opts); err != nil {
CommandPrintErrorln(err.Error())
return err

View File

@ -100,6 +100,7 @@ func init() {
_ = ExportCreateCmd.Flags().MarkDeprecated("attachments", "the tool now includes attachments by default. The flag will be removed in a future version.")
ExportCreateCmd.Flags().Bool("no-attachments", false, "Set to true to exclude file attachments in the export file.")
ExportCreateCmd.Flags().Bool("include-archived-channels", false, "Set to true to include archived channels in the export file.")
ExportDownloadCmd.Flags().Bool("resume", false, "Set to true to resume an export download.")
_ = ExportDownloadCmd.Flags().MarkHidden("resume")
@ -136,6 +137,11 @@ func exportCreateCmdF(c client.Client, command *cobra.Command, args []string) er
data["include_attachments"] = "true"
}
includeArchivedChannels, _ := command.Flags().GetBool("include-archived-channels")
if includeArchivedChannels {
data["include_archived_channels"] = "true"
}
job, _, err := c.CreateJob(context.TODO(), &model.Job{
Type: model.JobTypeExportProcess,
Data: data,

View File

@ -20,8 +20,9 @@ Options
::
-h, --help help for create
--no-attachments Set to true to exclude file attachments in the export file.
-h, --help help for create
--include-archived-channels Set to true to include archived channels in the export file.
--no-attachments Set to true to exclude file attachments in the export file.
Options inherited from parent commands
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~

View File

@ -5303,6 +5303,10 @@
"id": "app.import.get_users_by_username.some_users_not_found.error",
"translation": "Some users not found"
},
{
"id": "app.import.import_channel.deleting.app_error",
"translation": "Unable to archive imported channel."
},
{
"id": "app.import.import_channel.scheme_deleted.error",
"translation": "Unable to set a channel to use a deleted scheme."

View File

@ -8,6 +8,7 @@ package model
const ExportDataDir = "data"
type BulkExportOpts struct {
IncludeAttachments bool
CreateArchive bool
IncludeAttachments bool
IncludeArchivedChannels bool
CreateArchive bool
}