[MM-55053] Use request-specific logger in channels/app/file.go (#25037)

This commit is contained in:
Ben Schumacher 2023-11-07 10:04:16 +01:00 committed by GitHub
parent 45d6fb122f
commit b0bf5d0765
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
20 changed files with 334 additions and 325 deletions

View File

@ -86,7 +86,7 @@ func getDrafts(c *Context, w http.ResponseWriter, r *http.Request) {
return return
} }
drafts, err := c.App.GetDraftsForUser(c.AppContext.Session().UserId, c.Params.TeamId) drafts, err := c.App.GetDraftsForUser(c.AppContext, c.AppContext.Session().UserId, c.Params.TeamId)
if err != nil { if err != nil {
c.Err = err c.Err = err
return return
@ -135,7 +135,7 @@ func deleteDraft(c *Context, w http.ResponseWriter, r *http.Request) {
return return
} }
if _, err := c.App.DeleteDraft(userID, channelID, rootID, connectionID); err != nil { if _, err := c.App.DeleteDraft(c.AppContext, userID, channelID, rootID, connectionID); err != nil {
c.Err = err c.Err = err
return return
} }

View File

@ -452,7 +452,7 @@ func getFile(c *Context, w http.ResponseWriter, r *http.Request) {
defer c.LogAuditRec(auditRec) defer c.LogAuditRec(auditRec)
audit.AddEventParameter(auditRec, "force_download", forceDownload) audit.AddEventParameter(auditRec, "force_download", forceDownload)
info, err := c.App.GetFileInfo(c.Params.FileId) info, err := c.App.GetFileInfo(c.AppContext, c.Params.FileId)
if err != nil { if err != nil {
c.Err = err c.Err = err
setInaccessibleFileHeader(w, err) setInaccessibleFileHeader(w, err)
@ -485,7 +485,7 @@ func getFileThumbnail(c *Context, w http.ResponseWriter, r *http.Request) {
} }
forceDownload, _ := strconv.ParseBool(r.URL.Query().Get("download")) forceDownload, _ := strconv.ParseBool(r.URL.Query().Get("download"))
info, err := c.App.GetFileInfo(c.Params.FileId) info, err := c.App.GetFileInfo(c.AppContext, c.Params.FileId)
if err != nil { if err != nil {
c.Err = err c.Err = err
setInaccessibleFileHeader(w, err) setInaccessibleFileHeader(w, err)
@ -527,7 +527,7 @@ func getFileLink(c *Context, w http.ResponseWriter, r *http.Request) {
auditRec := c.MakeAuditRecord("getFileLink", audit.Fail) auditRec := c.MakeAuditRecord("getFileLink", audit.Fail)
defer c.LogAuditRec(auditRec) defer c.LogAuditRec(auditRec)
info, err := c.App.GetFileInfo(c.Params.FileId) info, err := c.App.GetFileInfo(c.AppContext, c.Params.FileId)
if err != nil { if err != nil {
c.Err = err c.Err = err
setInaccessibleFileHeader(w, err) setInaccessibleFileHeader(w, err)
@ -561,7 +561,7 @@ func getFilePreview(c *Context, w http.ResponseWriter, r *http.Request) {
} }
forceDownload, _ := strconv.ParseBool(r.URL.Query().Get("download")) forceDownload, _ := strconv.ParseBool(r.URL.Query().Get("download"))
info, err := c.App.GetFileInfo(c.Params.FileId) info, err := c.App.GetFileInfo(c.AppContext, c.Params.FileId)
if err != nil { if err != nil {
c.Err = err c.Err = err
setInaccessibleFileHeader(w, err) setInaccessibleFileHeader(w, err)
@ -595,7 +595,7 @@ func getFileInfo(c *Context, w http.ResponseWriter, r *http.Request) {
return return
} }
info, err := c.App.GetFileInfo(c.Params.FileId) info, err := c.App.GetFileInfo(c.AppContext, c.Params.FileId)
if err != nil { if err != nil {
c.Err = err c.Err = err
setInaccessibleFileHeader(w, err) setInaccessibleFileHeader(w, err)
@ -624,7 +624,7 @@ func getPublicFile(c *Context, w http.ResponseWriter, r *http.Request) {
return return
} }
info, err := c.App.GetFileInfo(c.Params.FileId) info, err := c.App.GetFileInfo(c.AppContext, c.Params.FileId)
if err != nil { if err != nil {
c.Err = err c.Err = err
setInaccessibleFileHeader(w, err) setInaccessibleFileHeader(w, err)

View File

@ -1145,7 +1145,7 @@ func getFileInfosForPost(c *Context, w http.ResponseWriter, r *http.Request) {
return return
} }
infos, appErr := c.App.GetFileInfosForPostWithMigration(c.Params.PostId, includeDeleted) infos, appErr := c.App.GetFileInfosForPostWithMigration(c.AppContext, c.Params.PostId, includeDeleted)
if appErr != nil { if appErr != nil {
c.Err = appErr c.Err = appErr
return return

View File

@ -105,7 +105,7 @@ type AppIface interface {
// their zero values. // their zero values.
CreateUser(c request.CTX, user *model.User) (*model.User, *model.AppError) CreateUser(c request.CTX, user *model.User) (*model.User, *model.AppError)
// Creates and stores FileInfos for a post created before the FileInfos table existed. // Creates and stores FileInfos for a post created before the FileInfos table existed.
MigrateFilenamesToFileInfos(post *model.Post) []*model.FileInfo MigrateFilenamesToFileInfos(rctx request.CTX, post *model.Post) []*model.FileInfo
// DefaultChannelNames returns the list of system-wide default channel names. // DefaultChannelNames returns the list of system-wide default channel names.
// //
// By default the list will be (not necessarily in this order): // By default the list will be (not necessarily in this order):
@ -185,7 +185,7 @@ type AppIface interface {
// If filter is not nil and returns false for a struct field, that field will be omitted. // If filter is not nil and returns false for a struct field, that field will be omitted.
GetEnvironmentConfig(filter func(reflect.StructField) bool) map[string]any GetEnvironmentConfig(filter func(reflect.StructField) bool) map[string]any
// GetFileInfosForPost also returns firstInaccessibleFileTime based on cloud plan's limit. // GetFileInfosForPost also returns firstInaccessibleFileTime based on cloud plan's limit.
GetFileInfosForPost(postID string, fromMaster bool, includeDeleted bool) ([]*model.FileInfo, int64, *model.AppError) GetFileInfosForPost(rctx request.CTX, postID string, fromMaster bool, includeDeleted bool) ([]*model.FileInfo, int64, *model.AppError)
// GetFilteredUsersStats is used to get a count of users based on the set of filters supported by UserCountOptions. // GetFilteredUsersStats is used to get a count of users based on the set of filters supported by UserCountOptions.
GetFilteredUsersStats(options *model.UserCountOptions) (*model.UsersStats, *model.AppError) GetFilteredUsersStats(options *model.UserCountOptions) (*model.UsersStats, *model.AppError)
// GetGroupsByTeam returns the paged list and the total count of group associated to the given team. // GetGroupsByTeam returns the paged list and the total count of group associated to the given team.
@ -523,7 +523,7 @@ type AppIface interface {
DeleteBrandImage() *model.AppError DeleteBrandImage() *model.AppError
DeleteChannel(c request.CTX, channel *model.Channel, userID string) *model.AppError DeleteChannel(c request.CTX, channel *model.Channel, userID string) *model.AppError
DeleteCommand(commandID string) *model.AppError DeleteCommand(commandID string) *model.AppError
DeleteDraft(userID, channelID, rootID, connectionID string) (*model.Draft, *model.AppError) DeleteDraft(rctx request.CTX, userID, channelID, rootID, connectionID string) (*model.Draft, *model.AppError)
DeleteEmoji(c request.CTX, emoji *model.Emoji) *model.AppError DeleteEmoji(c request.CTX, emoji *model.Emoji) *model.AppError
DeleteEphemeralPost(userID, postID string) DeleteEphemeralPost(userID, postID string)
DeleteExport(name string) *model.AppError DeleteExport(name string) *model.AppError
@ -641,16 +641,16 @@ type AppIface interface {
GetDefaultProfileImage(user *model.User) ([]byte, *model.AppError) GetDefaultProfileImage(user *model.User) ([]byte, *model.AppError)
GetDeletedChannels(c request.CTX, teamID string, offset int, limit int, userID string) (model.ChannelList, *model.AppError) GetDeletedChannels(c request.CTX, teamID string, offset int, limit int, userID string) (model.ChannelList, *model.AppError)
GetDraft(userID, channelID, rootID string) (*model.Draft, *model.AppError) GetDraft(userID, channelID, rootID string) (*model.Draft, *model.AppError)
GetDraftsForUser(userID, teamID string) ([]*model.Draft, *model.AppError) GetDraftsForUser(rctx request.CTX, userID, teamID string) ([]*model.Draft, *model.AppError)
GetEditHistoryForPost(postID string) ([]*model.Post, *model.AppError) GetEditHistoryForPost(postID string) ([]*model.Post, *model.AppError)
GetEmoji(c request.CTX, emojiId string) (*model.Emoji, *model.AppError) GetEmoji(c request.CTX, emojiId string) (*model.Emoji, *model.AppError)
GetEmojiByName(c request.CTX, emojiName string) (*model.Emoji, *model.AppError) GetEmojiByName(c request.CTX, emojiName string) (*model.Emoji, *model.AppError)
GetEmojiImage(c request.CTX, emojiId string) ([]byte, string, *model.AppError) GetEmojiImage(c request.CTX, emojiId string) ([]byte, string, *model.AppError)
GetEmojiList(c request.CTX, page, perPage int, sort string) ([]*model.Emoji, *model.AppError) GetEmojiList(c request.CTX, page, perPage int, sort string) ([]*model.Emoji, *model.AppError)
GetFile(fileID string) ([]byte, *model.AppError) GetFile(rctx request.CTX, fileID string) ([]byte, *model.AppError)
GetFileInfo(fileID string) (*model.FileInfo, *model.AppError) GetFileInfo(rctx request.CTX, fileID string) (*model.FileInfo, *model.AppError)
GetFileInfos(page, perPage int, opt *model.GetFileInfosOptions) ([]*model.FileInfo, *model.AppError) GetFileInfos(rctx request.CTX, page, perPage int, opt *model.GetFileInfosOptions) ([]*model.FileInfo, *model.AppError)
GetFileInfosForPostWithMigration(postID string, includeDeleted bool) ([]*model.FileInfo, *model.AppError) GetFileInfosForPostWithMigration(rctx request.CTX, postID string, includeDeleted bool) ([]*model.FileInfo, *model.AppError)
GetFlaggedPosts(userID string, offset int, limit int) (*model.PostList, *model.AppError) GetFlaggedPosts(userID string, offset int, limit int) (*model.PostList, *model.AppError)
GetFlaggedPostsForChannel(userID, channelID string, offset int, limit int) (*model.PostList, *model.AppError) GetFlaggedPostsForChannel(userID, channelID string, offset int, limit int) (*model.PostList, *model.AppError)
GetFlaggedPostsForTeam(userID, teamID string, offset int, limit int) (*model.PostList, *model.AppError) GetFlaggedPostsForTeam(userID, teamID string, offset int, limit int) (*model.PostList, *model.AppError)
@ -858,7 +858,7 @@ type AppIface interface {
HandleCommandResponse(c request.CTX, command *model.Command, args *model.CommandArgs, response *model.CommandResponse, builtIn bool) (*model.CommandResponse, *model.AppError) HandleCommandResponse(c request.CTX, command *model.Command, args *model.CommandArgs, response *model.CommandResponse, builtIn bool) (*model.CommandResponse, *model.AppError)
HandleCommandResponsePost(c request.CTX, command *model.Command, args *model.CommandArgs, response *model.CommandResponse, builtIn bool) (*model.Post, *model.AppError) HandleCommandResponsePost(c request.CTX, command *model.Command, args *model.CommandArgs, response *model.CommandResponse, builtIn bool) (*model.Post, *model.AppError)
HandleCommandWebhook(c request.CTX, hookID string, response *model.CommandResponse) *model.AppError HandleCommandWebhook(c request.CTX, hookID string, response *model.CommandResponse) *model.AppError
HandleImages(previewPathList []string, thumbnailPathList []string, fileData [][]byte) HandleImages(rctx request.CTX, previewPathList []string, thumbnailPathList []string, fileData [][]byte)
HandleIncomingWebhook(c request.CTX, hookID string, req *model.IncomingWebhookRequest) *model.AppError HandleIncomingWebhook(c request.CTX, hookID string, req *model.IncomingWebhookRequest) *model.AppError
HandleMessageExportConfig(cfg *model.Config, appCfg *model.Config) HandleMessageExportConfig(cfg *model.Config, appCfg *model.Config)
HasPermissionTo(askingUserId string, permission *model.Permission) bool HasPermissionTo(askingUserId string, permission *model.Permission) bool

View File

@ -70,12 +70,12 @@ func (a *App) UpsertDraft(c request.CTX, draft *model.Draft, connectionID string
return nil, model.NewAppError("CreateDraft", "app.draft.save.app_error", nil, nErr.Error(), http.StatusInternalServerError) return nil, model.NewAppError("CreateDraft", "app.draft.save.app_error", nil, nErr.Error(), http.StatusInternalServerError)
} }
dt = a.prepareDraftWithFileInfos(draft.UserId, dt) dt = a.prepareDraftWithFileInfos(c, draft.UserId, dt)
message := model.NewWebSocketEvent(model.WebsocketEventDraftCreated, "", dt.ChannelId, dt.UserId, nil, connectionID) message := model.NewWebSocketEvent(model.WebsocketEventDraftCreated, "", dt.ChannelId, dt.UserId, nil, connectionID)
draftJSON, jsonErr := json.Marshal(dt) draftJSON, jsonErr := json.Marshal(dt)
if jsonErr != nil { if jsonErr != nil {
mlog.Warn("Failed to encode draft to JSON", mlog.Err(jsonErr)) c.Logger().Warn("Failed to encode draft to JSON", mlog.Err(jsonErr))
} }
message.Add("draft", string(draftJSON)) message.Add("draft", string(draftJSON))
a.Publish(message) a.Publish(message)
@ -83,7 +83,7 @@ func (a *App) UpsertDraft(c request.CTX, draft *model.Draft, connectionID string
return dt, nil return dt, nil
} }
func (a *App) GetDraftsForUser(userID, teamID string) ([]*model.Draft, *model.AppError) { func (a *App) GetDraftsForUser(rctx request.CTX, userID, teamID string) ([]*model.Draft, *model.AppError) {
if !*a.Config().ServiceSettings.AllowSyncedDrafts { if !*a.Config().ServiceSettings.AllowSyncedDrafts {
return nil, model.NewAppError("GetDraftsForUser", "app.draft.feature_disabled", nil, "", http.StatusNotImplemented) return nil, model.NewAppError("GetDraftsForUser", "app.draft.feature_disabled", nil, "", http.StatusNotImplemented)
} }
@ -95,14 +95,14 @@ func (a *App) GetDraftsForUser(userID, teamID string) ([]*model.Draft, *model.Ap
} }
for _, draft := range drafts { for _, draft := range drafts {
a.prepareDraftWithFileInfos(userID, draft) a.prepareDraftWithFileInfos(rctx, userID, draft)
} }
return drafts, nil return drafts, nil
} }
func (a *App) prepareDraftWithFileInfos(userID string, draft *model.Draft) *model.Draft { func (a *App) prepareDraftWithFileInfos(rctx request.CTX, userID string, draft *model.Draft) *model.Draft {
if fileInfos, err := a.getFileInfosForDraft(draft); err != nil { if fileInfos, err := a.getFileInfosForDraft(rctx, draft); err != nil {
mlog.Error("Failed to get files for a user's drafts", mlog.String("user_id", userID), mlog.Err(err)) rctx.Logger().Error("Failed to get files for a user's drafts", mlog.String("user_id", userID), mlog.Err(err))
} else { } else {
draft.Metadata = &model.PostMetadata{} draft.Metadata = &model.PostMetadata{}
draft.Metadata.Files = fileInfos draft.Metadata.Files = fileInfos
@ -111,7 +111,7 @@ func (a *App) prepareDraftWithFileInfos(userID string, draft *model.Draft) *mode
return draft return draft
} }
func (a *App) getFileInfosForDraft(draft *model.Draft) ([]*model.FileInfo, *model.AppError) { func (a *App) getFileInfosForDraft(rctx request.CTX, draft *model.Draft) ([]*model.FileInfo, *model.AppError) {
if len(draft.FileIds) == 0 { if len(draft.FileIds) == 0 {
return nil, nil return nil, nil
} }
@ -126,7 +126,7 @@ func (a *App) getFileInfosForDraft(draft *model.Draft) ([]*model.FileInfo, *mode
if fileInfo.PostId == "" && fileInfo.CreatorId == draft.UserId { if fileInfo.PostId == "" && fileInfo.CreatorId == draft.UserId {
fileInfos = append(fileInfos, fileInfo) fileInfos = append(fileInfos, fileInfo)
} else { } else {
mlog.Debug("Invalid file id in draft", mlog.String("file_id", fileInfo.Id), mlog.String("user_id", draft.UserId)) rctx.Logger().Debug("Invalid file id in draft", mlog.String("file_id", fileInfo.Id), mlog.String("user_id", draft.UserId))
} }
} }
@ -134,12 +134,12 @@ func (a *App) getFileInfosForDraft(draft *model.Draft) ([]*model.FileInfo, *mode
return nil, nil return nil, nil
} }
a.generateMiniPreviewForInfos(fileInfos) a.generateMiniPreviewForInfos(rctx, fileInfos)
return fileInfos, nil return fileInfos, nil
} }
func (a *App) DeleteDraft(userID, channelID, rootID, connectionID string) (*model.Draft, *model.AppError) { func (a *App) DeleteDraft(rctx request.CTX, userID, channelID, rootID, connectionID string) (*model.Draft, *model.AppError) {
if !*a.Config().ServiceSettings.AllowSyncedDrafts { if !*a.Config().ServiceSettings.AllowSyncedDrafts {
return nil, model.NewAppError("DeleteDraft", "app.draft.feature_disabled", nil, "", http.StatusNotImplemented) return nil, model.NewAppError("DeleteDraft", "app.draft.feature_disabled", nil, "", http.StatusNotImplemented)
} }
@ -155,7 +155,7 @@ func (a *App) DeleteDraft(userID, channelID, rootID, connectionID string) (*mode
draftJSON, jsonErr := json.Marshal(draft) draftJSON, jsonErr := json.Marshal(draft)
if jsonErr != nil { if jsonErr != nil {
mlog.Warn("Failed to encode draft to JSON") rctx.Logger().Warn("Failed to encode draft to JSON")
} }
message := model.NewWebSocketEvent(model.WebsocketEventDraftDeleted, "", draft.ChannelId, draft.UserId, nil, connectionID) message := model.NewWebSocketEvent(model.WebsocketEventDraftDeleted, "", draft.ChannelId, draft.UserId, nil, connectionID)

View File

@ -87,7 +87,7 @@ func TestUpsertDraft(t *testing.T) {
_, err := th.App.UpsertDraft(th.Context, draft, "") _, err := th.App.UpsertDraft(th.Context, draft, "")
assert.Nil(t, err) assert.Nil(t, err)
drafts, err := th.App.GetDraftsForUser(user.Id, th.BasicTeam.Id) drafts, err := th.App.GetDraftsForUser(th.Context, user.Id, th.BasicTeam.Id)
assert.Nil(t, err) assert.Nil(t, err)
assert.Len(t, drafts, 1) assert.Len(t, drafts, 1)
draft1 := drafts[0] draft1 := drafts[0]
@ -104,7 +104,7 @@ func TestUpsertDraft(t *testing.T) {
_, err = th.App.UpsertDraft(th.Context, draft, "") _, err = th.App.UpsertDraft(th.Context, draft, "")
assert.Nil(t, err) assert.Nil(t, err)
drafts, err = th.App.GetDraftsForUser(user.Id, th.BasicTeam.Id) drafts, err = th.App.GetDraftsForUser(th.Context, user.Id, th.BasicTeam.Id)
assert.Nil(t, err) assert.Nil(t, err)
assert.Len(t, drafts, 1) assert.Len(t, drafts, 1)
draft2 := drafts[0] draft2 := drafts[0]
@ -221,7 +221,7 @@ func TestUpdateDraft(t *testing.T) {
_, err := th.App.UpsertDraft(th.Context, draft1, "") _, err := th.App.UpsertDraft(th.Context, draft1, "")
assert.Nil(t, err) assert.Nil(t, err)
drafts, err := th.App.GetDraftsForUser(user.Id, th.BasicTeam.Id) drafts, err := th.App.GetDraftsForUser(th.Context, user.Id, th.BasicTeam.Id)
assert.Nil(t, err) assert.Nil(t, err)
draftResp := drafts[0] draftResp := drafts[0]
@ -269,7 +269,7 @@ func TestGetDraftsForUser(t *testing.T) {
assert.Nil(t, createDraftErr2) assert.Nil(t, createDraftErr2)
t.Run("get drafts", func(t *testing.T) { t.Run("get drafts", func(t *testing.T) {
draftResp, err := th.App.GetDraftsForUser(user.Id, th.BasicTeam.Id) draftResp, err := th.App.GetDraftsForUser(th.Context, user.Id, th.BasicTeam.Id)
assert.Nil(t, err) assert.Nil(t, err)
assert.Equal(t, draft2.Message, draftResp[0].Message) assert.Equal(t, draft2.Message, draftResp[0].Message)
@ -297,7 +297,7 @@ func TestGetDraftsForUser(t *testing.T) {
assert.Equal(t, draftWithFiles.ChannelId, draftResp.ChannelId) assert.Equal(t, draftWithFiles.ChannelId, draftResp.ChannelId)
assert.ElementsMatch(t, draftWithFiles.FileIds, draftResp.FileIds) assert.ElementsMatch(t, draftWithFiles.FileIds, draftResp.FileIds)
draftsWithFilesResp, err := th.App.GetDraftsForUser(user.Id, th.BasicTeam.Id) draftsWithFilesResp, err := th.App.GetDraftsForUser(th.Context, user.Id, th.BasicTeam.Id)
assert.Nil(t, err) assert.Nil(t, err)
assert.Equal(t, draftWithFiles.Message, draftsWithFilesResp[0].Message) assert.Equal(t, draftWithFiles.Message, draftsWithFilesResp[0].Message)
@ -334,7 +334,7 @@ func TestGetDraftsForUser(t *testing.T) {
assert.Len(t, draftWithFiles.Metadata.Files, 1) assert.Len(t, draftWithFiles.Metadata.Files, 1)
assert.Equal(t, fileResp1.Name, draftWithFiles.Metadata.Files[0].Name) assert.Equal(t, fileResp1.Name, draftWithFiles.Metadata.Files[0].Name)
draftsWithFilesResp, err := th.App.GetDraftsForUser(user.Id, th.BasicTeam.Id) draftsWithFilesResp, err := th.App.GetDraftsForUser(th.Context, user.Id, th.BasicTeam.Id)
assert.Nil(t, err) assert.Nil(t, err)
assert.Equal(t, draftWithFiles.Message, draftsWithFilesResp[0].Message) assert.Equal(t, draftWithFiles.Message, draftsWithFilesResp[0].Message)
@ -354,7 +354,7 @@ func TestGetDraftsForUser(t *testing.T) {
th.App.UpdateConfig(func(cfg *model.Config) { *cfg.ServiceSettings.AllowSyncedDrafts = false }) th.App.UpdateConfig(func(cfg *model.Config) { *cfg.ServiceSettings.AllowSyncedDrafts = false })
defer th.App.UpdateConfig(func(cfg *model.Config) { *cfg.ServiceSettings.AllowSyncedDrafts = true }) defer th.App.UpdateConfig(func(cfg *model.Config) { *cfg.ServiceSettings.AllowSyncedDrafts = true })
_, err := th.App.GetDraftsForUser(user.Id, th.BasicTeam.Id) _, err := th.App.GetDraftsForUser(th.Context, user.Id, th.BasicTeam.Id)
assert.NotNil(t, err) assert.NotNil(t, err)
}) })
} }
@ -383,7 +383,7 @@ func TestDeleteDraft(t *testing.T) {
assert.Nil(t, createDraftErr) assert.Nil(t, createDraftErr)
t.Run("delete draft", func(t *testing.T) { t.Run("delete draft", func(t *testing.T) {
draftResp, err := th.App.DeleteDraft(user.Id, channel.Id, "", "") draftResp, err := th.App.DeleteDraft(th.Context, user.Id, channel.Id, "", "")
assert.Nil(t, err) assert.Nil(t, err)
assert.Equal(t, draft1.Message, draftResp.Message) assert.Equal(t, draft1.Message, draftResp.Message)
@ -402,7 +402,7 @@ func TestDeleteDraft(t *testing.T) {
th.App.UpdateConfig(func(cfg *model.Config) { *cfg.ServiceSettings.AllowSyncedDrafts = false }) th.App.UpdateConfig(func(cfg *model.Config) { *cfg.ServiceSettings.AllowSyncedDrafts = false })
defer th.App.UpdateConfig(func(cfg *model.Config) { *cfg.ServiceSettings.AllowSyncedDrafts = true }) defer th.App.UpdateConfig(func(cfg *model.Config) { *cfg.ServiceSettings.AllowSyncedDrafts = true })
_, err := th.App.DeleteDraft(user.Id, channel.Id, "", "") _, err := th.App.DeleteDraft(th.Context, user.Id, channel.Id, "", "")
assert.NotNil(t, err) assert.NotNil(t, err)
}) })
} }

View File

@ -326,7 +326,7 @@ func (a *App) RemoveDirectory(path string) *model.AppError {
return nil return nil
} }
func (a *App) getInfoForFilename(post *model.Post, teamID, channelID, userID, oldId, filename string) *model.FileInfo { func (a *App) getInfoForFilename(rctx request.CTX, post *model.Post, teamID, channelID, userID, oldId, filename string) *model.FileInfo {
name, _ := url.QueryUnescape(filename) name, _ := url.QueryUnescape(filename)
pathPrefix := fmt.Sprintf("teams/%s/channels/%s/users/%s/%s/", teamID, channelID, userID, oldId) pathPrefix := fmt.Sprintf("teams/%s/channels/%s/users/%s/%s/", teamID, channelID, userID, oldId)
path := pathPrefix + name path := pathPrefix + name
@ -334,7 +334,7 @@ func (a *App) getInfoForFilename(post *model.Post, teamID, channelID, userID, ol
// Open the file and populate the fields of the FileInfo // Open the file and populate the fields of the FileInfo
data, err := a.ReadFile(path) data, err := a.ReadFile(path)
if err != nil { if err != nil {
mlog.Error( rctx.Logger().Error(
"File not found when migrating post to use FileInfos", "File not found when migrating post to use FileInfos",
mlog.String("post_id", post.Id), mlog.String("post_id", post.Id),
mlog.String("filename", filename), mlog.String("filename", filename),
@ -346,7 +346,7 @@ func (a *App) getInfoForFilename(post *model.Post, teamID, channelID, userID, ol
info, err := getInfoForBytes(name, bytes.NewReader(data), len(data)) info, err := getInfoForBytes(name, bytes.NewReader(data), len(data))
if err != nil { if err != nil {
mlog.Warn( rctx.Logger().Warn(
"Unable to fully decode file info when migrating post to use FileInfos", "Unable to fully decode file info when migrating post to use FileInfos",
mlog.String("post_id", post.Id), mlog.String("post_id", post.Id),
mlog.String("filename", filename), mlog.String("filename", filename),
@ -372,13 +372,13 @@ func (a *App) getInfoForFilename(post *model.Post, teamID, channelID, userID, ol
return info return info
} }
func (a *App) findTeamIdForFilename(post *model.Post, id, filename string) string { func (a *App) findTeamIdForFilename(rctx request.CTX, post *model.Post, id, filename string) string {
name, _ := url.QueryUnescape(filename) name, _ := url.QueryUnescape(filename)
// This post is in a direct channel so we need to figure out what team the files are stored under. // This post is in a direct channel so we need to figure out what team the files are stored under.
teams, err := a.Srv().Store().Team().GetTeamsByUserId(post.UserId) teams, err := a.Srv().Store().Team().GetTeamsByUserId(post.UserId)
if err != nil { if err != nil {
mlog.Error("Unable to get teams when migrating post to use FileInfo", mlog.Err(err), mlog.String("post_id", post.Id)) rctx.Logger().Error("Unable to get teams when migrating post to use FileInfo", mlog.Err(err), mlog.String("post_id", post.Id))
return "" return ""
} }
@ -402,18 +402,18 @@ var fileMigrationLock sync.Mutex
var oldFilenameMatchExp = regexp.MustCompile(`^\/([a-z\d]{26})\/([a-z\d]{26})\/([a-z\d]{26})\/([^\/]+)$`) var oldFilenameMatchExp = regexp.MustCompile(`^\/([a-z\d]{26})\/([a-z\d]{26})\/([a-z\d]{26})\/([^\/]+)$`)
// Parse the path from the Filename of the form /{channelID}/{userID}/{uid}/{nameWithExtension} // Parse the path from the Filename of the form /{channelID}/{userID}/{uid}/{nameWithExtension}
func parseOldFilenames(filenames []string, channelID, userID string) [][]string { func parseOldFilenames(rctx request.CTX, filenames []string, channelID, userID string) [][]string {
parsed := [][]string{} parsed := [][]string{}
for _, filename := range filenames { for _, filename := range filenames {
matches := oldFilenameMatchExp.FindStringSubmatch(filename) matches := oldFilenameMatchExp.FindStringSubmatch(filename)
if len(matches) != 5 { if len(matches) != 5 {
mlog.Error("Failed to parse old Filename", mlog.String("filename", filename)) rctx.Logger().Error("Failed to parse old Filename", mlog.String("filename", filename))
continue continue
} }
if matches[1] != channelID { if matches[1] != channelID {
mlog.Error("ChannelId in Filename does not match", mlog.String("channel_id", channelID), mlog.String("matched", matches[1])) rctx.Logger().Error("ChannelId in Filename does not match", mlog.String("channel_id", channelID), mlog.String("matched", matches[1]))
} else if matches[2] != userID { } else if matches[2] != userID {
mlog.Error("UserId in Filename does not match", mlog.String("user_id", userID), mlog.String("matched", matches[2])) rctx.Logger().Error("UserId in Filename does not match", mlog.String("user_id", userID), mlog.String("matched", matches[2]))
} else { } else {
parsed = append(parsed, matches[1:]) parsed = append(parsed, matches[1:])
} }
@ -422,9 +422,9 @@ func parseOldFilenames(filenames []string, channelID, userID string) [][]string
} }
// Creates and stores FileInfos for a post created before the FileInfos table existed. // Creates and stores FileInfos for a post created before the FileInfos table existed.
func (a *App) MigrateFilenamesToFileInfos(post *model.Post) []*model.FileInfo { func (a *App) MigrateFilenamesToFileInfos(rctx request.CTX, post *model.Post) []*model.FileInfo {
if len(post.Filenames) == 0 { if len(post.Filenames) == 0 {
mlog.Warn("Unable to migrate post to use FileInfos with an empty Filenames field", mlog.String("post_id", post.Id)) rctx.Logger().Warn("Unable to migrate post to use FileInfos with an empty Filenames field", mlog.String("post_id", post.Id))
return []*model.FileInfo{} return []*model.FileInfo{}
} }
@ -432,7 +432,7 @@ func (a *App) MigrateFilenamesToFileInfos(post *model.Post) []*model.FileInfo {
// There's a weird bug that rarely happens where a post ends up with duplicate Filenames so remove those // There's a weird bug that rarely happens where a post ends up with duplicate Filenames so remove those
filenames := utils.RemoveDuplicatesFromStringArray(post.Filenames) filenames := utils.RemoveDuplicatesFromStringArray(post.Filenames)
if errCh != nil { if errCh != nil {
mlog.Error( rctx.Logger().Error(
"Unable to get channel when migrating post to use FileInfos", "Unable to get channel when migrating post to use FileInfos",
mlog.String("post_id", post.Id), mlog.String("post_id", post.Id),
mlog.String("channel_id", post.ChannelId), mlog.String("channel_id", post.ChannelId),
@ -442,10 +442,10 @@ func (a *App) MigrateFilenamesToFileInfos(post *model.Post) []*model.FileInfo {
} }
// Parse and validate filenames before further processing // Parse and validate filenames before further processing
parsedFilenames := parseOldFilenames(filenames, post.ChannelId, post.UserId) parsedFilenames := parseOldFilenames(rctx, filenames, post.ChannelId, post.UserId)
if len(parsedFilenames) == 0 { if len(parsedFilenames) == 0 {
mlog.Error("Unable to parse filenames") rctx.Logger().Error("Unable to parse filenames")
return []*model.FileInfo{} return []*model.FileInfo{}
} }
@ -453,7 +453,7 @@ func (a *App) MigrateFilenamesToFileInfos(post *model.Post) []*model.FileInfo {
var teamID string var teamID string
if channel.TeamId == "" { if channel.TeamId == "" {
// This post was made in a cross-team DM channel, so we need to find where its files were saved // This post was made in a cross-team DM channel, so we need to find where its files were saved
teamID = a.findTeamIdForFilename(post, parsedFilenames[0][2], parsedFilenames[0][3]) teamID = a.findTeamIdForFilename(rctx, post, parsedFilenames[0][2], parsedFilenames[0][3])
} else { } else {
teamID = channel.TeamId teamID = channel.TeamId
} }
@ -461,14 +461,14 @@ func (a *App) MigrateFilenamesToFileInfos(post *model.Post) []*model.FileInfo {
// Create FileInfo objects for this post // Create FileInfo objects for this post
infos := make([]*model.FileInfo, 0, len(filenames)) infos := make([]*model.FileInfo, 0, len(filenames))
if teamID == "" { if teamID == "" {
mlog.Error( rctx.Logger().Error(
"Unable to find team id for files when migrating post to use FileInfos", "Unable to find team id for files when migrating post to use FileInfos",
mlog.String("filenames", strings.Join(filenames, ",")), mlog.String("filenames", strings.Join(filenames, ",")),
mlog.String("post_id", post.Id), mlog.String("post_id", post.Id),
) )
} else { } else {
for _, parsed := range parsedFilenames { for _, parsed := range parsedFilenames {
info := a.getInfoForFilename(post, teamID, parsed[0], parsed[1], parsed[2], parsed[3]) info := a.getInfoForFilename(rctx, post, teamID, parsed[0], parsed[1], parsed[2], parsed[3])
if info == nil { if info == nil {
continue continue
} }
@ -483,7 +483,7 @@ func (a *App) MigrateFilenamesToFileInfos(post *model.Post) []*model.FileInfo {
result, nErr := a.Srv().Store().Post().Get(context.Background(), post.Id, model.GetPostsOptions{}, "", a.Config().GetSanitizeOptions()) result, nErr := a.Srv().Store().Post().Get(context.Background(), post.Id, model.GetPostsOptions{}, "", a.Config().GetSanitizeOptions())
if nErr != nil { if nErr != nil {
mlog.Error("Unable to get post when migrating post to use FileInfos", mlog.Err(nErr), mlog.String("post_id", post.Id)) rctx.Logger().Error("Unable to get post when migrating post to use FileInfos", mlog.Err(nErr), mlog.String("post_id", post.Id))
return []*model.FileInfo{} return []*model.FileInfo{}
} }
@ -492,21 +492,21 @@ func (a *App) MigrateFilenamesToFileInfos(post *model.Post) []*model.FileInfo {
var fileInfos []*model.FileInfo var fileInfos []*model.FileInfo
fileInfos, nErr = a.Srv().Store().FileInfo().GetForPost(post.Id, true, false, false) fileInfos, nErr = a.Srv().Store().FileInfo().GetForPost(post.Id, true, false, false)
if nErr != nil { if nErr != nil {
mlog.Error("Unable to get FileInfos for migrated post", mlog.Err(nErr), mlog.String("post_id", post.Id)) rctx.Logger().Error("Unable to get FileInfos for migrated post", mlog.Err(nErr), mlog.String("post_id", post.Id))
return []*model.FileInfo{} return []*model.FileInfo{}
} }
mlog.Debug("Post already migrated to use FileInfos", mlog.String("post_id", post.Id)) rctx.Logger().Debug("Post already migrated to use FileInfos", mlog.String("post_id", post.Id))
return fileInfos return fileInfos
} }
mlog.Debug("Migrating post to use FileInfos", mlog.String("post_id", post.Id)) rctx.Logger().Debug("Migrating post to use FileInfos", mlog.String("post_id", post.Id))
savedInfos := make([]*model.FileInfo, 0, len(infos)) savedInfos := make([]*model.FileInfo, 0, len(infos))
fileIDs := make([]string, 0, len(filenames)) fileIDs := make([]string, 0, len(filenames))
for _, info := range infos { for _, info := range infos {
if _, nErr = a.Srv().Store().FileInfo().Save(info); nErr != nil { if _, nErr = a.Srv().Store().FileInfo().Save(info); nErr != nil {
mlog.Error( rctx.Logger().Error(
"Unable to save file info when migrating post to use FileInfos", "Unable to save file info when migrating post to use FileInfos",
mlog.String("post_id", post.Id), mlog.String("post_id", post.Id),
mlog.String("file_info_id", info.Id), mlog.String("file_info_id", info.Id),
@ -528,7 +528,7 @@ func (a *App) MigrateFilenamesToFileInfos(post *model.Post) []*model.FileInfo {
// Update Posts to clear Filenames and set FileIds // Update Posts to clear Filenames and set FileIds
if _, nErr = a.Srv().Store().Post().Update(newPost, post); nErr != nil { if _, nErr = a.Srv().Store().Post().Update(newPost, post); nErr != nil {
mlog.Error( rctx.Logger().Error(
"Unable to save migrated post when migrating to use FileInfos", "Unable to save migrated post when migrating to use FileInfos",
mlog.String("new_file_ids", strings.Join(newPost.FileIds, ",")), mlog.String("new_file_ids", strings.Join(newPost.FileIds, ",")),
mlog.String("old_filenames", strings.Join(post.Filenames, ",")), mlog.String("old_filenames", strings.Join(post.Filenames, ",")),
@ -585,7 +585,7 @@ func (a *App) UploadFileForUserAndTeam(c request.CTX, data []byte, channelID str
thumbnailPathList := []string{info.ThumbnailPath} thumbnailPathList := []string{info.ThumbnailPath}
imageDataList := [][]byte{data} imageDataList := [][]byte{data}
a.HandleImages(previewPathList, thumbnailPathList, imageDataList) a.HandleImages(c, previewPathList, thumbnailPathList, imageDataList)
} }
return info, nil return info, nil
@ -633,6 +633,8 @@ func UploadFileSetRaw() func(t *UploadFileTask) {
} }
type UploadFileTask struct { type UploadFileTask struct {
Logger mlog.LoggerIFace
// File name. // File name.
Name string Name string
@ -720,7 +722,12 @@ func (t *UploadFileTask) init(a *App) {
// contained the last "good" FileInfo before the execution of that plugin. // contained the last "good" FileInfo before the execution of that plugin.
func (a *App) UploadFileX(c request.CTX, channelID, name string, input io.Reader, func (a *App) UploadFileX(c request.CTX, channelID, name string, input io.Reader,
opts ...func(*UploadFileTask)) (*model.FileInfo, *model.AppError) { opts ...func(*UploadFileTask)) (*model.FileInfo, *model.AppError) {
c.WithLogger(c.Logger().With(
mlog.String("file_name", name),
))
t := &UploadFileTask{ t := &UploadFileTask{
Logger: c.Logger(),
ChannelId: filepath.Base(channelID), ChannelId: filepath.Base(channelID),
Name: filepath.Base(name), Name: filepath.Base(name),
Input: input, Input: input,
@ -812,7 +819,7 @@ func (t *UploadFileTask) preprocessImage() *model.AppError {
if t.fileinfo.IsSvg() { if t.fileinfo.IsSvg() {
svgInfo, err := imaging.ParseSVG(t.teeInput) svgInfo, err := imaging.ParseSVG(t.teeInput)
if err != nil { if err != nil {
mlog.Warn("Failed to parse SVG", mlog.Err(err)) t.Logger.Warn("Failed to parse SVG", mlog.Err(err))
} }
if svgInfo.Width > 0 && svgInfo.Height > 0 { if svgInfo.Width > 0 && svgInfo.Height > 0 {
t.fileinfo.Width = svgInfo.Width t.fileinfo.Width = svgInfo.Width
@ -876,7 +883,7 @@ func (t *UploadFileTask) postprocessImage(file io.Reader) {
var release func() var release func()
decoded, imgType, release, err = t.imgDecoder.DecodeMemBounded(file) decoded, imgType, release, err = t.imgDecoder.DecodeMemBounded(file)
if err != nil { if err != nil {
mlog.Error("Unable to decode image", mlog.Err(err)) t.Logger.Error("Unable to decode image", mlog.Err(err))
return return
} }
defer release() defer release()
@ -899,7 +906,7 @@ func (t *UploadFileTask) postprocessImage(file io.Reader) {
err = t.imgEncoder.EncodeJPEG(w, img, jpegEncQuality) err = t.imgEncoder.EncodeJPEG(w, img, jpegEncQuality)
} }
if err != nil { if err != nil {
mlog.Error("Unable to encode image as jpeg", mlog.String("path", path), mlog.Err(err)) t.Logger.Error("Unable to encode image as jpeg", mlog.String("path", path), mlog.Err(err))
w.CloseWithError(err) w.CloseWithError(err)
} else { } else {
w.Close() w.Close()
@ -907,7 +914,7 @@ func (t *UploadFileTask) postprocessImage(file io.Reader) {
}() }()
_, aerr := t.writeFile(r, path) _, aerr := t.writeFile(r, path)
if aerr != nil { if aerr != nil {
mlog.Error("Unable to upload", mlog.String("path", path), mlog.Err(aerr)) t.Logger.Error("Unable to upload", mlog.String("path", path), mlog.Err(aerr))
r.CloseWithError(aerr) // always returns nil r.CloseWithError(aerr) // always returns nil
return return
} }
@ -932,7 +939,7 @@ func (t *UploadFileTask) postprocessImage(file io.Reader) {
if t.fileinfo.MiniPreview == nil { if t.fileinfo.MiniPreview == nil {
if miniPreview, err := imaging.GenerateMiniPreviewImage(decoded, if miniPreview, err := imaging.GenerateMiniPreviewImage(decoded,
miniPreviewImageWidth, miniPreviewImageHeight, jpegEncQuality); err != nil { miniPreviewImageWidth, miniPreviewImageHeight, jpegEncQuality); err != nil {
mlog.Info("Unable to generate mini preview image", mlog.Err(err)) t.Logger.Info("Unable to generate mini preview image", mlog.Err(err))
} else { } else {
t.fileinfo.MiniPreview = &miniPreview t.fileinfo.MiniPreview = &miniPreview
} }
@ -1058,24 +1065,24 @@ func (a *App) DoUploadFileExpectModification(c request.CTX, now time.Time, rawTe
return info, data, nil return info, data, nil
} }
func (a *App) HandleImages(previewPathList []string, thumbnailPathList []string, fileData [][]byte) { func (a *App) HandleImages(rctx request.CTX, previewPathList []string, thumbnailPathList []string, fileData [][]byte) {
wg := new(sync.WaitGroup) wg := new(sync.WaitGroup)
for i := range fileData { for i := range fileData {
img, imgType, release, err := prepareImage(a.ch.imgDecoder, bytes.NewReader(fileData[i])) img, imgType, release, err := prepareImage(rctx, a.ch.imgDecoder, bytes.NewReader(fileData[i]))
if err != nil { if err != nil {
mlog.Debug("Failed to prepare image", mlog.Err(err)) rctx.Logger().Debug("Failed to prepare image", mlog.Err(err))
continue continue
} }
wg.Add(2) wg.Add(2)
go func(img image.Image, imgType, path string) { go func(img image.Image, imgType, path string) {
defer wg.Done() defer wg.Done()
a.generateThumbnailImage(img, imgType, path) a.generateThumbnailImage(rctx, img, imgType, path)
}(img, imgType, thumbnailPathList[i]) }(img, imgType, thumbnailPathList[i])
go func(img image.Image, imgType, path string) { go func(img image.Image, imgType, path string) {
defer wg.Done() defer wg.Done()
a.generatePreviewImage(img, imgType, path) a.generatePreviewImage(rctx, img, imgType, path)
}(img, imgType, previewPathList[i]) }(img, imgType, previewPathList[i])
wg.Wait() wg.Wait()
@ -1083,7 +1090,7 @@ func (a *App) HandleImages(previewPathList []string, thumbnailPathList []string,
} }
} }
func prepareImage(imgDecoder *imaging.Decoder, imgData io.ReadSeeker) (img image.Image, imgType string, release func(), err error) { func prepareImage(rctx request.CTX, imgDecoder *imaging.Decoder, imgData io.ReadSeeker) (img image.Image, imgType string, release func(), err error) {
// Decode image bytes into Image object // Decode image bytes into Image object
img, imgType, release, err = imgDecoder.DecodeMemBounded(imgData) img, imgType, release, err = imgDecoder.DecodeMemBounded(imgData)
if err != nil { if err != nil {
@ -1094,70 +1101,70 @@ func prepareImage(imgDecoder *imaging.Decoder, imgData io.ReadSeeker) (img image
// Flip the image to be upright // Flip the image to be upright
orientation, err := imaging.GetImageOrientation(imgData) orientation, err := imaging.GetImageOrientation(imgData)
if err != nil { if err != nil {
mlog.Debug("GetImageOrientation failed", mlog.Err(err)) rctx.Logger().Debug("GetImageOrientation failed", mlog.Err(err))
} }
img = imaging.MakeImageUpright(img, orientation) img = imaging.MakeImageUpright(img, orientation)
return img, imgType, release, nil return img, imgType, release, nil
} }
func (a *App) generateThumbnailImage(img image.Image, imgType, thumbnailPath string) { func (a *App) generateThumbnailImage(rctx request.CTX, img image.Image, imgType, thumbnailPath string) {
var buf bytes.Buffer var buf bytes.Buffer
thumb := imaging.GenerateThumbnail(img, imageThumbnailWidth, imageThumbnailHeight) thumb := imaging.GenerateThumbnail(img, imageThumbnailWidth, imageThumbnailHeight)
if imgType == "png" { if imgType == "png" {
if err := a.ch.imgEncoder.EncodePNG(&buf, thumb); err != nil { if err := a.ch.imgEncoder.EncodePNG(&buf, thumb); err != nil {
mlog.Error("Unable to encode image as png", mlog.String("path", thumbnailPath), mlog.Err(err)) rctx.Logger().Error("Unable to encode image as png", mlog.String("path", thumbnailPath), mlog.Err(err))
return return
} }
} else { } else {
if err := a.ch.imgEncoder.EncodeJPEG(&buf, thumb, jpegEncQuality); err != nil { if err := a.ch.imgEncoder.EncodeJPEG(&buf, thumb, jpegEncQuality); err != nil {
mlog.Error("Unable to encode image as jpeg", mlog.String("path", thumbnailPath), mlog.Err(err)) rctx.Logger().Error("Unable to encode image as jpeg", mlog.String("path", thumbnailPath), mlog.Err(err))
return return
} }
} }
if _, err := a.WriteFile(&buf, thumbnailPath); err != nil { if _, err := a.WriteFile(&buf, thumbnailPath); err != nil {
mlog.Error("Unable to upload thumbnail", mlog.String("path", thumbnailPath), mlog.Err(err)) rctx.Logger().Error("Unable to upload thumbnail", mlog.String("path", thumbnailPath), mlog.Err(err))
return return
} }
} }
func (a *App) generatePreviewImage(img image.Image, imgType, previewPath string) { func (a *App) generatePreviewImage(rctx request.CTX, img image.Image, imgType, previewPath string) {
var buf bytes.Buffer var buf bytes.Buffer
preview := imaging.GeneratePreview(img, imagePreviewWidth) preview := imaging.GeneratePreview(img, imagePreviewWidth)
if imgType == "png" { if imgType == "png" {
if err := a.ch.imgEncoder.EncodePNG(&buf, preview); err != nil { if err := a.ch.imgEncoder.EncodePNG(&buf, preview); err != nil {
mlog.Error("Unable to encode image as preview png", mlog.Err(err), mlog.String("path", previewPath)) rctx.Logger().Error("Unable to encode image as preview png", mlog.Err(err), mlog.String("path", previewPath))
return return
} }
} else { } else {
if err := a.ch.imgEncoder.EncodeJPEG(&buf, preview, jpegEncQuality); err != nil { if err := a.ch.imgEncoder.EncodeJPEG(&buf, preview, jpegEncQuality); err != nil {
mlog.Error("Unable to encode image as preview jpg", mlog.Err(err), mlog.String("path", previewPath)) rctx.Logger().Error("Unable to encode image as preview jpg", mlog.Err(err), mlog.String("path", previewPath))
return return
} }
} }
if _, err := a.WriteFile(&buf, previewPath); err != nil { if _, err := a.WriteFile(&buf, previewPath); err != nil {
mlog.Error("Unable to upload preview", mlog.Err(err), mlog.String("path", previewPath)) rctx.Logger().Error("Unable to upload preview", mlog.Err(err), mlog.String("path", previewPath))
return return
} }
} }
// generateMiniPreview updates mini preview if needed // generateMiniPreview updates mini preview if needed
// will save fileinfo with the preview added // will save fileinfo with the preview added
func (a *App) generateMiniPreview(fi *model.FileInfo) { func (a *App) generateMiniPreview(rctx request.CTX, fi *model.FileInfo) {
if fi.IsImage() && !fi.IsSvg() && fi.MiniPreview == nil { if fi.IsImage() && !fi.IsSvg() && fi.MiniPreview == nil {
file, appErr := a.FileReader(fi.Path) file, appErr := a.FileReader(fi.Path)
if appErr != nil { if appErr != nil {
mlog.Debug("error reading image file", mlog.Err(appErr)) rctx.Logger().Debug("Error reading image file", mlog.Err(appErr))
return return
} }
defer file.Close() defer file.Close()
img, _, release, err := prepareImage(a.ch.imgDecoder, file) img, _, release, err := prepareImage(rctx, a.ch.imgDecoder, file)
if err != nil { if err != nil {
mlog.Debug("generateMiniPreview: prepareImage failed", mlog.Err(err), rctx.Logger().Debug("generateMiniPreview: prepareImage failed", mlog.Err(err),
mlog.String("fileinfo_id", fi.Id), mlog.String("channel_id", fi.ChannelId), mlog.String("fileinfo_id", fi.Id), mlog.String("channel_id", fi.ChannelId),
mlog.String("creator_id", fi.CreatorId)) mlog.String("creator_id", fi.CreatorId))
return return
@ -1166,26 +1173,26 @@ func (a *App) generateMiniPreview(fi *model.FileInfo) {
var miniPreview []byte var miniPreview []byte
if miniPreview, err = imaging.GenerateMiniPreviewImage(img, if miniPreview, err = imaging.GenerateMiniPreviewImage(img,
miniPreviewImageWidth, miniPreviewImageHeight, jpegEncQuality); err != nil { miniPreviewImageWidth, miniPreviewImageHeight, jpegEncQuality); err != nil {
mlog.Info("Unable to generate mini preview image", mlog.Err(err)) rctx.Logger().Info("Unable to generate mini preview image", mlog.Err(err))
} else { } else {
fi.MiniPreview = &miniPreview fi.MiniPreview = &miniPreview
} }
if _, err = a.Srv().Store().FileInfo().Upsert(fi); err != nil { if _, err = a.Srv().Store().FileInfo().Upsert(fi); err != nil {
mlog.Debug("creating mini preview failed", mlog.Err(err)) rctx.Logger().Debug("Creating mini preview failed", mlog.Err(err))
} else { } else {
a.Srv().Store().FileInfo().InvalidateFileInfosForPostCache(fi.PostId, false) a.Srv().Store().FileInfo().InvalidateFileInfosForPostCache(fi.PostId, false)
} }
} }
} }
func (a *App) generateMiniPreviewForInfos(fileInfos []*model.FileInfo) { func (a *App) generateMiniPreviewForInfos(rctx request.CTX, fileInfos []*model.FileInfo) {
wg := new(sync.WaitGroup) wg := new(sync.WaitGroup)
wg.Add(len(fileInfos)) wg.Add(len(fileInfos))
for _, fileInfo := range fileInfos { for _, fileInfo := range fileInfos {
go func(fi *model.FileInfo) { go func(fi *model.FileInfo) {
defer wg.Done() defer wg.Done()
a.generateMiniPreview(fi) a.generateMiniPreview(rctx, fi)
}(fileInfo) }(fileInfo)
} }
wg.Wait() wg.Wait()
@ -1205,7 +1212,7 @@ func (s *Server) getFileInfo(fileID string) (*model.FileInfo, *model.AppError) {
return fileInfo, nil return fileInfo, nil
} }
func (a *App) GetFileInfo(fileID string) (*model.FileInfo, *model.AppError) { func (a *App) GetFileInfo(rctx request.CTX, fileID string) (*model.FileInfo, *model.AppError) {
fileInfo, appErr := a.Srv().getFileInfo(fileID) fileInfo, appErr := a.Srv().getFileInfo(fileID)
if appErr != nil { if appErr != nil {
return nil, appErr return nil, appErr
@ -1219,7 +1226,7 @@ func (a *App) GetFileInfo(fileID string) (*model.FileInfo, *model.AppError) {
return nil, model.NewAppError("GetFileInfo", "app.file.cloud.get.app_error", nil, "", http.StatusForbidden) return nil, model.NewAppError("GetFileInfo", "app.file.cloud.get.app_error", nil, "", http.StatusForbidden)
} }
a.generateMiniPreview(fileInfo) a.generateMiniPreview(rctx, fileInfo)
return fileInfo, appErr return fileInfo, appErr
} }
@ -1243,16 +1250,16 @@ func (a *App) SetFileSearchableContent(fileID string, data string) *model.AppErr
return nil return nil
} }
func (a *App) getFileInfoIgnoreCloudLimit(fileID string) (*model.FileInfo, *model.AppError) { func (a *App) getFileInfoIgnoreCloudLimit(rctx request.CTX, fileID string) (*model.FileInfo, *model.AppError) {
fileInfo, appErr := a.Srv().getFileInfo(fileID) fileInfo, appErr := a.Srv().getFileInfo(fileID)
if appErr == nil { if appErr == nil {
a.generateMiniPreview(fileInfo) a.generateMiniPreview(rctx, fileInfo)
} }
return fileInfo, appErr return fileInfo, appErr
} }
func (a *App) GetFileInfos(page, perPage int, opt *model.GetFileInfosOptions) ([]*model.FileInfo, *model.AppError) { func (a *App) GetFileInfos(rctx request.CTX, page, perPage int, opt *model.GetFileInfosOptions) ([]*model.FileInfo, *model.AppError) {
fileInfos, err := a.Srv().Store().FileInfo().GetWithOptions(page, perPage, opt) fileInfos, err := a.Srv().Store().FileInfo().GetWithOptions(page, perPage, opt)
if err != nil { if err != nil {
var invErr *store.ErrInvalidInput var invErr *store.ErrInvalidInput
@ -1277,13 +1284,13 @@ func (a *App) GetFileInfos(page, perPage int, opt *model.GetFileInfosOptions) ([
return nil, appErr return nil, appErr
} }
a.generateMiniPreviewForInfos(fileInfos) a.generateMiniPreviewForInfos(rctx, fileInfos)
return fileInfos, nil return fileInfos, nil
} }
func (a *App) GetFile(fileID string) ([]byte, *model.AppError) { func (a *App) GetFile(rctx request.CTX, fileID string) ([]byte, *model.AppError) {
info, err := a.GetFileInfo(fileID) info, err := a.GetFileInfo(rctx, fileID)
if err != nil { if err != nil {
return nil, err return nil, err
} }
@ -1296,8 +1303,8 @@ func (a *App) GetFile(fileID string) ([]byte, *model.AppError) {
return data, nil return data, nil
} }
func (a *App) getFileIgnoreCloudLimit(fileID string) ([]byte, *model.AppError) { func (a *App) getFileIgnoreCloudLimit(rctx request.CTX, fileID string) ([]byte, *model.AppError) {
info, err := a.getFileInfoIgnoreCloudLimit(fileID) info, err := a.getFileInfoIgnoreCloudLimit(rctx, fileID)
if err != nil { if err != nil {
return nil, err return nil, err
} }
@ -1473,7 +1480,7 @@ func (a *App) ExtractContentFromFileInfo(rctx request.CTX, fileInfo *model.FileI
} }
reloadFileInfo, storeErr := a.Srv().Store().FileInfo().Get(fileInfo.Id) reloadFileInfo, storeErr := a.Srv().Store().FileInfo().Get(fileInfo.Id)
if storeErr != nil { if storeErr != nil {
mlog.Warn("Failed to invalidate the fileInfo cache.", mlog.Err(storeErr), mlog.String("file_info_id", fileInfo.Id)) rctx.Logger().Warn("Failed to invalidate the fileInfo cache.", mlog.Err(storeErr), mlog.String("file_info_id", fileInfo.Id))
} else { } else {
a.Srv().Store().FileInfo().InvalidateFileInfosForPostCache(reloadFileInfo.PostId, false) a.Srv().Store().FileInfo().InvalidateFileInfosForPostCache(reloadFileInfo.PostId, false)
} }

View File

@ -217,7 +217,7 @@ func TestParseOldFilenames(t *testing.T) {
for _, test := range tests { for _, test := range tests {
t.Run(test.description, func(tt *testing.T) { t.Run(test.description, func(tt *testing.T) {
result := parseOldFilenames(test.filenames, test.channelID, test.userID) result := parseOldFilenames(th.Context, test.filenames, test.channelID, test.userID)
require.Equal(tt, result, test.expected) require.Equal(tt, result, test.expected)
}) })
} }
@ -230,7 +230,7 @@ func TestGetInfoForFilename(t *testing.T) {
post := th.BasicPost post := th.BasicPost
teamID := th.BasicTeam.Id teamID := th.BasicTeam.Id
info := th.App.getInfoForFilename(post, teamID, post.ChannelId, post.UserId, "someid", "somefile.png") info := th.App.getInfoForFilename(th.Context, post, teamID, post.ChannelId, post.UserId, "someid", "somefile.png")
assert.Nil(t, info, "Test non-existent file") assert.Nil(t, info, "Test non-existent file")
} }
@ -238,13 +238,13 @@ func TestFindTeamIdForFilename(t *testing.T) {
th := Setup(t).InitBasic() th := Setup(t).InitBasic()
defer th.TearDown() defer th.TearDown()
teamID := th.App.findTeamIdForFilename(th.BasicPost, "someid", "somefile.png") teamID := th.App.findTeamIdForFilename(th.Context, th.BasicPost, "someid", "somefile.png")
assert.Equal(t, th.BasicTeam.Id, teamID) assert.Equal(t, th.BasicTeam.Id, teamID)
_, err := th.App.CreateTeamWithUser(th.Context, &model.Team{Email: th.BasicUser.Email, Name: "zz" + model.NewId(), DisplayName: "Joram's Test Team", Type: model.TeamOpen}, th.BasicUser.Id) _, err := th.App.CreateTeamWithUser(th.Context, &model.Team{Email: th.BasicUser.Email, Name: "zz" + model.NewId(), DisplayName: "Joram's Test Team", Type: model.TeamOpen}, th.BasicUser.Id)
require.Nil(t, err) require.Nil(t, err)
teamID = th.App.findTeamIdForFilename(th.BasicPost, "someid", "somefile.png") teamID = th.App.findTeamIdForFilename(th.Context, th.BasicPost, "someid", "somefile.png")
assert.Equal(t, "", teamID) assert.Equal(t, "", teamID)
} }
@ -253,11 +253,11 @@ func TestMigrateFilenamesToFileInfos(t *testing.T) {
defer th.TearDown() defer th.TearDown()
post := th.BasicPost post := th.BasicPost
infos := th.App.MigrateFilenamesToFileInfos(post) infos := th.App.MigrateFilenamesToFileInfos(th.Context, post)
assert.Equal(t, 0, len(infos)) assert.Equal(t, 0, len(infos))
post.Filenames = []string{fmt.Sprintf("/%v/%v/%v/blargh.png", th.BasicChannel.Id, th.BasicUser.Id, "someid")} post.Filenames = []string{fmt.Sprintf("/%v/%v/%v/blargh.png", th.BasicChannel.Id, th.BasicUser.Id, "someid")}
infos = th.App.MigrateFilenamesToFileInfos(post) infos = th.App.MigrateFilenamesToFileInfos(th.Context, post)
assert.Equal(t, 0, len(infos)) assert.Equal(t, 0, len(infos))
path, _ := fileutils.FindDir("tests") path, _ := fileutils.FindDir("tests")
@ -272,13 +272,13 @@ func TestMigrateFilenamesToFileInfos(t *testing.T) {
rpost, err := th.App.CreatePost(th.Context, &model.Post{UserId: th.BasicUser.Id, ChannelId: th.BasicChannel.Id, Filenames: []string{fmt.Sprintf("/%v/%v/%v/test.png", th.BasicChannel.Id, th.BasicUser.Id, fileID)}}, th.BasicChannel, false, true) rpost, err := th.App.CreatePost(th.Context, &model.Post{UserId: th.BasicUser.Id, ChannelId: th.BasicChannel.Id, Filenames: []string{fmt.Sprintf("/%v/%v/%v/test.png", th.BasicChannel.Id, th.BasicUser.Id, fileID)}}, th.BasicChannel, false, true)
require.Nil(t, err) require.Nil(t, err)
infos = th.App.MigrateFilenamesToFileInfos(rpost) infos = th.App.MigrateFilenamesToFileInfos(th.Context, rpost)
assert.Equal(t, 1, len(infos)) assert.Equal(t, 1, len(infos))
rpost, err = th.App.CreatePost(th.Context, &model.Post{UserId: th.BasicUser.Id, ChannelId: th.BasicChannel.Id, Filenames: []string{fmt.Sprintf("/%v/%v/%v/../../test.png", th.BasicChannel.Id, th.BasicUser.Id, fileID)}}, th.BasicChannel, false, true) rpost, err = th.App.CreatePost(th.Context, &model.Post{UserId: th.BasicUser.Id, ChannelId: th.BasicChannel.Id, Filenames: []string{fmt.Sprintf("/%v/%v/%v/../../test.png", th.BasicChannel.Id, th.BasicUser.Id, fileID)}}, th.BasicChannel, false, true)
require.Nil(t, err) require.Nil(t, err)
infos = th.App.MigrateFilenamesToFileInfos(rpost) infos = th.App.MigrateFilenamesToFileInfos(th.Context, rpost)
assert.Equal(t, 0, len(infos)) assert.Equal(t, 0, len(infos))
} }
@ -355,7 +355,7 @@ func TestCopyFileInfos(t *testing.T) {
infoIds, err := th.App.CopyFileInfos(userID, []string{info1.Id}) infoIds, err := th.App.CopyFileInfos(userID, []string{info1.Id})
require.Nil(t, err) require.Nil(t, err)
info2, err := th.App.GetFileInfo(infoIds[0]) info2, err := th.App.GetFileInfo(th.Context, infoIds[0])
require.Nil(t, err) require.Nil(t, err)
defer func() { defer func() {
th.App.Srv().Store().FileInfo().PermanentDelete(info2.Id) th.App.Srv().Store().FileInfo().PermanentDelete(info2.Id)
@ -377,7 +377,7 @@ func TestGenerateThumbnailImage(t *testing.T) {
thumbnailPath := filepath.Join(dataPath, thumbnailName) thumbnailPath := filepath.Join(dataPath, thumbnailName)
// when // when
th.App.generateThumbnailImage(img, "jpg", thumbnailName) th.App.generateThumbnailImage(th.Context, img, "jpg", thumbnailName)
defer os.Remove(thumbnailPath) defer os.Remove(thumbnailPath)
// then // then

View File

@ -30,12 +30,12 @@ import (
// -- Bulk Import Functions -- // -- Bulk Import Functions --
// These functions import data directly into the database. Security and permission checks are bypassed but validity is // These functions import data directly into the database. Security and permission checks are bypassed but validity is
// still enforced. // still enforced.
func (a *App) importScheme(c request.CTX, data *imports.SchemeImportData, dryRun bool) *model.AppError { func (a *App) importScheme(rctx request.CTX, data *imports.SchemeImportData, dryRun bool) *model.AppError {
var fields []logr.Field var fields []logr.Field
if data != nil && data.Name != nil { if data != nil && data.Name != nil {
fields = append(fields, mlog.String("schema_name", *data.Name)) fields = append(fields, mlog.String("schema_name", *data.Name))
} }
c.Logger().Info("Validating schema", fields...) rctx.Logger().Info("Validating schema", fields...)
if err := imports.ValidateSchemeImportData(data); err != nil { if err := imports.ValidateSchemeImportData(data); err != nil {
return err return err
@ -46,7 +46,7 @@ func (a *App) importScheme(c request.CTX, data *imports.SchemeImportData, dryRun
return nil return nil
} }
c.Logger().Info("Importing schema", fields...) rctx.Logger().Info("Importing schema", fields...)
scheme, err := a.GetSchemeByName(*data.Name) scheme, err := a.GetSchemeByName(*data.Name)
if err != nil { if err != nil {
@ -75,12 +75,12 @@ func (a *App) importScheme(c request.CTX, data *imports.SchemeImportData, dryRun
if scheme.Scope == model.SchemeScopeTeam { if scheme.Scope == model.SchemeScopeTeam {
data.DefaultTeamAdminRole.Name = &scheme.DefaultTeamAdminRole data.DefaultTeamAdminRole.Name = &scheme.DefaultTeamAdminRole
if err := a.importRole(c, data.DefaultTeamAdminRole, dryRun, true); err != nil { if err := a.importRole(rctx, data.DefaultTeamAdminRole, dryRun, true); err != nil {
return err return err
} }
data.DefaultTeamUserRole.Name = &scheme.DefaultTeamUserRole data.DefaultTeamUserRole.Name = &scheme.DefaultTeamUserRole
if err := a.importRole(c, data.DefaultTeamUserRole, dryRun, true); err != nil { if err := a.importRole(rctx, data.DefaultTeamUserRole, dryRun, true); err != nil {
return err return err
} }
@ -90,19 +90,19 @@ func (a *App) importScheme(c request.CTX, data *imports.SchemeImportData, dryRun
} }
} }
data.DefaultTeamGuestRole.Name = &scheme.DefaultTeamGuestRole data.DefaultTeamGuestRole.Name = &scheme.DefaultTeamGuestRole
if err := a.importRole(c, data.DefaultTeamGuestRole, dryRun, true); err != nil { if err := a.importRole(rctx, data.DefaultTeamGuestRole, dryRun, true); err != nil {
return err return err
} }
} }
if scheme.Scope == model.SchemeScopeTeam || scheme.Scope == model.SchemeScopeChannel { if scheme.Scope == model.SchemeScopeTeam || scheme.Scope == model.SchemeScopeChannel {
data.DefaultChannelAdminRole.Name = &scheme.DefaultChannelAdminRole data.DefaultChannelAdminRole.Name = &scheme.DefaultChannelAdminRole
if err := a.importRole(c, data.DefaultChannelAdminRole, dryRun, true); err != nil { if err := a.importRole(rctx, data.DefaultChannelAdminRole, dryRun, true); err != nil {
return err return err
} }
data.DefaultChannelUserRole.Name = &scheme.DefaultChannelUserRole data.DefaultChannelUserRole.Name = &scheme.DefaultChannelUserRole
if err := a.importRole(c, data.DefaultChannelUserRole, dryRun, true); err != nil { if err := a.importRole(rctx, data.DefaultChannelUserRole, dryRun, true); err != nil {
return err return err
} }
@ -112,7 +112,7 @@ func (a *App) importScheme(c request.CTX, data *imports.SchemeImportData, dryRun
} }
} }
data.DefaultChannelGuestRole.Name = &scheme.DefaultChannelGuestRole data.DefaultChannelGuestRole.Name = &scheme.DefaultChannelGuestRole
if err := a.importRole(c, data.DefaultChannelGuestRole, dryRun, true); err != nil { if err := a.importRole(rctx, data.DefaultChannelGuestRole, dryRun, true); err != nil {
return err return err
} }
} }
@ -120,14 +120,14 @@ func (a *App) importScheme(c request.CTX, data *imports.SchemeImportData, dryRun
return nil return nil
} }
func (a *App) importRole(c request.CTX, data *imports.RoleImportData, dryRun bool, isSchemeRole bool) *model.AppError { func (a *App) importRole(rctx request.CTX, data *imports.RoleImportData, dryRun bool, isSchemeRole bool) *model.AppError {
var fields []logr.Field var fields []logr.Field
if data != nil && data.Name != nil { if data != nil && data.Name != nil {
fields = append(fields, mlog.String("role_name", *data.Name)) fields = append(fields, mlog.String("role_name", *data.Name))
} }
if !isSchemeRole { if !isSchemeRole {
c.Logger().Info("Validating role", fields...) rctx.Logger().Info("Validating role", fields...)
if err := imports.ValidateRoleImportData(data); err != nil { if err := imports.ValidateRoleImportData(data); err != nil {
return err return err
@ -139,7 +139,7 @@ func (a *App) importRole(c request.CTX, data *imports.RoleImportData, dryRun boo
return nil return nil
} }
c.Logger().Info("Importing role", fields...) rctx.Logger().Info("Importing role", fields...)
role, err := a.GetRoleByName(context.Background(), *data.Name) role, err := a.GetRoleByName(context.Background(), *data.Name)
if err != nil { if err != nil {
@ -175,12 +175,12 @@ func (a *App) importRole(c request.CTX, data *imports.RoleImportData, dryRun boo
return err return err
} }
func (a *App) importTeam(c request.CTX, data *imports.TeamImportData, dryRun bool) *model.AppError { func (a *App) importTeam(rctx request.CTX, data *imports.TeamImportData, dryRun bool) *model.AppError {
var fields []logr.Field var fields []logr.Field
if data != nil && data.Name != nil { if data != nil && data.Name != nil {
fields = append(fields, mlog.String("team_name", *data.Name)) fields = append(fields, mlog.String("team_name", *data.Name))
} }
c.Logger().Info("Validating team", fields...) rctx.Logger().Info("Validating team", fields...)
if err := imports.ValidateTeamImportData(data); err != nil { if err := imports.ValidateTeamImportData(data); err != nil {
return err return err
@ -191,7 +191,7 @@ func (a *App) importTeam(c request.CTX, data *imports.TeamImportData, dryRun boo
return nil return nil
} }
c.Logger().Info("Importing team", fields...) rctx.Logger().Info("Importing team", fields...)
var team *model.Team var team *model.Team
team, err := a.Srv().Store().Team().GetByName(*data.Name) team, err := a.Srv().Store().Team().GetByName(*data.Name)
@ -230,7 +230,7 @@ func (a *App) importTeam(c request.CTX, data *imports.TeamImportData, dryRun boo
} }
if team.Id == "" { if team.Id == "" {
if _, err := a.CreateTeam(c, team); err != nil { if _, err := a.CreateTeam(rctx, team); err != nil {
return err return err
} }
} else { } else {
@ -251,12 +251,12 @@ func (a *App) importTeam(c request.CTX, data *imports.TeamImportData, dryRun boo
return nil return nil
} }
func (a *App) importChannel(c request.CTX, data *imports.ChannelImportData, dryRun bool) *model.AppError { func (a *App) importChannel(rctx request.CTX, data *imports.ChannelImportData, dryRun bool) *model.AppError {
var fields []logr.Field var fields []logr.Field
if data != nil && data.Name != nil { if data != nil && data.Name != nil {
fields = append(fields, mlog.String("channel_name", *data.Name)) fields = append(fields, mlog.String("channel_name", *data.Name))
} }
c.Logger().Info("Validating channel", fields...) rctx.Logger().Info("Validating channel", fields...)
if err := imports.ValidateChannelImportData(data); err != nil { if err := imports.ValidateChannelImportData(data); err != nil {
return err return err
@ -267,7 +267,7 @@ func (a *App) importChannel(c request.CTX, data *imports.ChannelImportData, dryR
return nil return nil
} }
c.Logger().Info("Importing channel", fields...) rctx.Logger().Info("Importing channel", fields...)
team, err := a.Srv().Store().Team().GetByName(*data.Team) team, err := a.Srv().Store().Team().GetByName(*data.Team)
if err != nil { if err != nil {
@ -313,11 +313,11 @@ func (a *App) importChannel(c request.CTX, data *imports.ChannelImportData, dryR
var chErr *model.AppError var chErr *model.AppError
if channel.Id == "" { if channel.Id == "" {
if _, chErr = a.CreateChannel(c, channel, false); chErr != nil { if _, chErr = a.CreateChannel(rctx, channel, false); chErr != nil {
return chErr return chErr
} }
} else { } else {
if _, chErr = a.UpdateChannel(c, channel); chErr != nil { if _, chErr = a.UpdateChannel(rctx, channel); chErr != nil {
return chErr return chErr
} }
} }
@ -331,12 +331,12 @@ func (a *App) importChannel(c request.CTX, data *imports.ChannelImportData, dryR
return nil return nil
} }
func (a *App) importUser(c request.CTX, data *imports.UserImportData, dryRun bool) *model.AppError { func (a *App) importUser(rctx request.CTX, data *imports.UserImportData, dryRun bool) *model.AppError {
var fields []logr.Field var fields []logr.Field
if data != nil && data.Username != nil { if data != nil && data.Username != nil {
fields = append(fields, mlog.String("user_name", *data.Username)) fields = append(fields, mlog.String("user_name", *data.Username))
} }
c.Logger().Info("Validating user", fields...) rctx.Logger().Info("Validating user", fields...)
if err := imports.ValidateUserImportData(data); err != nil { if err := imports.ValidateUserImportData(data); err != nil {
return err return err
@ -347,7 +347,7 @@ func (a *App) importUser(c request.CTX, data *imports.UserImportData, dryRun boo
return nil return nil
} }
c.Logger().Info("Importing user", fields...) rctx.Logger().Info("Importing user", fields...)
// We want to avoid database writes if nothing has changed. // We want to avoid database writes if nothing has changed.
hasUserChanged := false hasUserChanged := false
@ -568,17 +568,17 @@ func (a *App) importUser(c request.CTX, data *imports.UserImportData, dryRun boo
pref := model.Preference{UserId: savedUser.Id, Category: model.PreferenceCategoryTutorialSteps, Name: savedUser.Id, Value: "0"} pref := model.Preference{UserId: savedUser.Id, Category: model.PreferenceCategoryTutorialSteps, Name: savedUser.Id, Value: "0"}
if err := a.Srv().Store().Preference().Save(model.Preferences{pref}); err != nil { if err := a.Srv().Store().Preference().Save(model.Preferences{pref}); err != nil {
c.Logger().Warn("Encountered error saving tutorial preference", mlog.Err(err)) rctx.Logger().Warn("Encountered error saving tutorial preference", mlog.Err(err))
} }
} else { } else {
var appErr *model.AppError var appErr *model.AppError
if hasUserChanged { if hasUserChanged {
if savedUser, appErr = a.UpdateUser(c, user, false); appErr != nil { if savedUser, appErr = a.UpdateUser(rctx, user, false); appErr != nil {
return appErr return appErr
} }
} }
if hasUserRolesChanged { if hasUserRolesChanged {
if savedUser, appErr = a.UpdateUserRoles(c, user.Id, roles, false); appErr != nil { if savedUser, appErr = a.UpdateUserRoles(rctx, user.Id, roles, false); appErr != nil {
return appErr return appErr
} }
} }
@ -630,14 +630,14 @@ func (a *App) importUser(c request.CTX, data *imports.UserImportData, dryRun boo
} }
if err != nil { if err != nil {
c.Logger().Warn("Unable to open the profile image.", mlog.Err(err)) rctx.Logger().Warn("Unable to open the profile image.", mlog.Err(err))
} else { } else {
defer file.Close() defer file.Close()
if limitErr := checkImageLimits(file, *a.Config().FileSettings.MaxImageResolution); limitErr != nil { if limitErr := checkImageLimits(file, *a.Config().FileSettings.MaxImageResolution); limitErr != nil {
return model.NewAppError("SetProfileImage", "api.user.upload_profile_user.check_image_limits.app_error", nil, "", http.StatusBadRequest) return model.NewAppError("SetProfileImage", "api.user.upload_profile_user.check_image_limits.app_error", nil, "", http.StatusBadRequest)
} }
if err := a.SetProfileImageFromFile(c, savedUser.Id, file); err != nil { if err := a.SetProfileImageFromFile(rctx, savedUser.Id, file); err != nil {
c.Logger().Warn("Unable to set the profile image from a file.", mlog.Err(err)) rctx.Logger().Warn("Unable to set the profile image from a file.", mlog.Err(err))
} }
} }
} }
@ -774,10 +774,10 @@ func (a *App) importUser(c request.CTX, data *imports.UserImportData, dryRun boo
} }
} }
return a.importUserTeams(c, savedUser, data.Teams) return a.importUserTeams(rctx, savedUser, data.Teams)
} }
func (a *App) importUserTeams(c request.CTX, user *model.User, data *[]imports.UserTeamImportData) *model.AppError { func (a *App) importUserTeams(rctx request.CTX, user *model.User, data *[]imports.UserTeamImportData) *model.AppError {
if data == nil { if data == nil {
return nil return nil
} }
@ -804,7 +804,7 @@ func (a *App) importUserTeams(c request.CTX, user *model.User, data *[]imports.U
isAdminByTeamId = map[string]bool{} isAdminByTeamId = map[string]bool{}
) )
existingMemberships, nErr := a.Srv().Store().Team().GetTeamsForUser(c, user.Id, "", true) existingMemberships, nErr := a.Srv().Store().Team().GetTeamsForUser(rctx, user.Id, "", true)
if nErr != nil { if nErr != nil {
return model.NewAppError("importUserTeams", "app.team.get_members.app_error", nil, "", http.StatusInternalServerError).Wrap(nErr) return model.NewAppError("importUserTeams", "app.team.get_members.app_error", nil, "", http.StatusInternalServerError).Wrap(nErr)
} }
@ -916,12 +916,12 @@ func (a *App) importUserTeams(c request.CTX, user *model.User, data *[]imports.U
for _, member := range append(newMembers, oldMembers...) { for _, member := range append(newMembers, oldMembers...) {
if member.ExplicitRoles != rolesByTeamId[member.TeamId] { if member.ExplicitRoles != rolesByTeamId[member.TeamId] {
if _, err = a.UpdateTeamMemberRoles(c, member.TeamId, user.Id, rolesByTeamId[member.TeamId]); err != nil { if _, err = a.UpdateTeamMemberRoles(rctx, member.TeamId, user.Id, rolesByTeamId[member.TeamId]); err != nil {
return err return err
} }
} }
a.UpdateTeamMemberSchemeRoles(c, member.TeamId, user.Id, isGuestByTeamId[member.TeamId], isUserByTeamId[member.TeamId], isAdminByTeamId[member.TeamId]) a.UpdateTeamMemberSchemeRoles(rctx, member.TeamId, user.Id, isGuestByTeamId[member.TeamId], isUserByTeamId[member.TeamId], isAdminByTeamId[member.TeamId])
} }
for _, team := range allTeams { for _, team := range allTeams {
@ -932,7 +932,7 @@ func (a *App) importUserTeams(c request.CTX, user *model.User, data *[]imports.U
} }
} }
channelsToImport := channels[team.Id] channelsToImport := channels[team.Id]
if err := a.importUserChannels(c, user, team, &channelsToImport); err != nil { if err := a.importUserChannels(rctx, user, team, &channelsToImport); err != nil {
return err return err
} }
} }
@ -940,7 +940,7 @@ func (a *App) importUserTeams(c request.CTX, user *model.User, data *[]imports.U
return nil return nil
} }
func (a *App) importUserChannels(c request.CTX, user *model.User, team *model.Team, data *[]imports.UserChannelImportData) *model.AppError { func (a *App) importUserChannels(rctx request.CTX, user *model.User, team *model.Team, data *[]imports.UserChannelImportData) *model.AppError {
if data == nil { if data == nil {
return nil return nil
} }
@ -1106,12 +1106,12 @@ func (a *App) importUserChannels(c request.CTX, user *model.User, team *model.Te
for _, member := range append(newMembers, oldMembers...) { for _, member := range append(newMembers, oldMembers...) {
if member.ExplicitRoles != rolesByChannelId[member.ChannelId] { if member.ExplicitRoles != rolesByChannelId[member.ChannelId] {
if _, err = a.UpdateChannelMemberRoles(c, member.ChannelId, user.Id, rolesByChannelId[member.ChannelId]); err != nil { if _, err = a.UpdateChannelMemberRoles(rctx, member.ChannelId, user.Id, rolesByChannelId[member.ChannelId]); err != nil {
return err return err
} }
} }
a.UpdateChannelMemberSchemeRoles(c, member.ChannelId, user.Id, isGuestByChannelId[member.ChannelId], isUserByChannelId[member.ChannelId], isAdminByChannelId[member.ChannelId]) a.UpdateChannelMemberSchemeRoles(rctx, member.ChannelId, user.Id, isGuestByChannelId[member.ChannelId], isUserByChannelId[member.ChannelId], isAdminByChannelId[member.ChannelId])
} }
for _, channel := range allChannels { for _, channel := range allChannels {
@ -1156,7 +1156,7 @@ func (a *App) importReaction(data *imports.ReactionImportData, post *model.Post)
return nil return nil
} }
func (a *App) importReplies(c request.CTX, data []imports.ReplyImportData, post *model.Post, teamID string) *model.AppError { func (a *App) importReplies(rctx request.CTX, data []imports.ReplyImportData, post *model.Post, teamID string) *model.AppError {
var err *model.AppError var err *model.AppError
usernames := []string{} usernames := []string{}
for _, replyData := range data { for _, replyData := range data {
@ -1205,7 +1205,7 @@ func (a *App) importReplies(c request.CTX, data []imports.ReplyImportData, post
reply.Message = *replyData.Message reply.Message = *replyData.Message
reply.CreateAt = *replyData.CreateAt reply.CreateAt = *replyData.CreateAt
if reply.CreateAt < post.CreateAt { if reply.CreateAt < post.CreateAt {
c.Logger().Warn("Reply CreateAt is before parent post CreateAt, setting it to parent post CreateAt", mlog.Int64("reply_create_at", reply.CreateAt), mlog.Int64("parent_create_at", post.CreateAt)) rctx.Logger().Warn("Reply CreateAt is before parent post CreateAt, setting it to parent post CreateAt", mlog.Int64("reply_create_at", reply.CreateAt), mlog.Int64("parent_create_at", post.CreateAt))
reply.CreateAt = post.CreateAt reply.CreateAt = post.CreateAt
} }
if replyData.Type != nil { if replyData.Type != nil {
@ -1215,7 +1215,7 @@ func (a *App) importReplies(c request.CTX, data []imports.ReplyImportData, post
reply.EditAt = *replyData.EditAt reply.EditAt = *replyData.EditAt
} }
fileIDs := a.uploadAttachments(c, replyData.Attachments, reply, teamID) fileIDs := a.uploadAttachments(rctx, replyData.Attachments, reply, teamID)
for _, fileID := range reply.FileIds { for _, fileID := range reply.FileIds {
if _, ok := fileIDs[fileID]; !ok { if _, ok := fileIDs[fileID]; !ok {
a.Srv().Store().FileInfo().PermanentDelete(fileID) a.Srv().Store().FileInfo().PermanentDelete(fileID)
@ -1254,13 +1254,13 @@ func (a *App) importReplies(c request.CTX, data []imports.ReplyImportData, post
} }
for _, postWithData := range postsWithData { for _, postWithData := range postsWithData {
a.updateFileInfoWithPostId(postWithData.post) a.updateFileInfoWithPostId(rctx, postWithData.post)
} }
return nil return nil
} }
func (a *App) importAttachment(c request.CTX, data *imports.AttachmentImportData, post *model.Post, teamID string) (*model.FileInfo, *model.AppError) { func (a *App) importAttachment(rctx request.CTX, data *imports.AttachmentImportData, post *model.Post, teamID string) (*model.FileInfo, *model.AppError) {
var ( var (
name string name string
file io.Reader file io.Reader
@ -1274,7 +1274,7 @@ func (a *App) importAttachment(c request.CTX, data *imports.AttachmentImportData
name = data.Data.Name name = data.Data.Name
file = zipFile.(io.Reader) file = zipFile.(io.Reader)
c.Logger().Info("Preparing file upload from ZIP", mlog.String("file_name", name), mlog.Uint64("file_size", data.Data.UncompressedSize64)) rctx.Logger().Info("Preparing file upload from ZIP", mlog.String("file_name", name), mlog.Uint64("file_size", data.Data.UncompressedSize64))
} else { } else {
realFile, err := os.Open(*data.Path) realFile, err := os.Open(*data.Path)
if err != nil { if err != nil {
@ -1288,7 +1288,7 @@ func (a *App) importAttachment(c request.CTX, data *imports.AttachmentImportData
if info, err := realFile.Stat(); err != nil { if info, err := realFile.Stat(); err != nil {
fields = append(fields, mlog.Int64("file_size", info.Size())) fields = append(fields, mlog.Int64("file_size", info.Size()))
} }
c.Logger().Info("Preparing file upload from file system", fields...) rctx.Logger().Info("Preparing file upload from file system", fields...)
} }
timestamp := utils.TimeFromMillis(post.CreateAt) timestamp := utils.TimeFromMillis(post.CreateAt)
@ -1300,7 +1300,7 @@ func (a *App) importAttachment(c request.CTX, data *imports.AttachmentImportData
// Go over existing files in the post and see if there already exists a file with the same name, size and hash. If so - skip it // Go over existing files in the post and see if there already exists a file with the same name, size and hash. If so - skip it
if post.Id != "" { if post.Id != "" {
oldFiles, err := a.getFileInfosForPostIgnoreCloudLimit(post.Id, true, false) oldFiles, err := a.getFileInfosForPostIgnoreCloudLimit(rctx, post.Id, true, false)
if err != nil { if err != nil {
return nil, model.NewAppError("BulkImport", "app.import.attachment.file_upload.error", map[string]any{"FilePath": *data.Path}, "", http.StatusBadRequest) return nil, model.NewAppError("BulkImport", "app.import.attachment.file_upload.error", map[string]any{"FilePath": *data.Path}, "", http.StatusBadRequest)
} }
@ -1311,29 +1311,29 @@ func (a *App) importAttachment(c request.CTX, data *imports.AttachmentImportData
// check sha1 // check sha1
newHash := sha1.Sum(fileData) newHash := sha1.Sum(fileData)
oldFileData, err := a.getFileIgnoreCloudLimit(oldFile.Id) oldFileData, err := a.getFileIgnoreCloudLimit(rctx, oldFile.Id)
if err != nil { if err != nil {
return nil, model.NewAppError("BulkImport", "app.import.attachment.file_upload.error", map[string]any{"FilePath": *data.Path}, "", http.StatusBadRequest) return nil, model.NewAppError("BulkImport", "app.import.attachment.file_upload.error", map[string]any{"FilePath": *data.Path}, "", http.StatusBadRequest)
} }
oldHash := sha1.Sum(oldFileData) oldHash := sha1.Sum(oldFileData)
if bytes.Equal(oldHash[:], newHash[:]) { if bytes.Equal(oldHash[:], newHash[:]) {
mlog.Info("Skipping uploading of file because name already exists", mlog.String("file_name", name)) rctx.Logger().Info("Skipping uploading of file because name already exists", mlog.String("file_name", name))
return oldFile, nil return oldFile, nil
} }
} }
} }
mlog.Info("Uploading file with name", mlog.String("file_name", name)) rctx.Logger().Info("Uploading file with name", mlog.String("file_name", name))
fileInfo, appErr := a.DoUploadFile(c, timestamp, teamID, post.ChannelId, post.UserId, name, fileData) fileInfo, appErr := a.DoUploadFile(rctx, timestamp, teamID, post.ChannelId, post.UserId, name, fileData)
if appErr != nil { if appErr != nil {
mlog.Error("Failed to upload file", mlog.Err(appErr), mlog.String("file_name", name)) rctx.Logger().Error("Failed to upload file", mlog.Err(appErr), mlog.String("file_name", name))
return nil, appErr return nil, appErr
} }
if fileInfo.IsImage() && !fileInfo.IsSvg() { if fileInfo.IsImage() && !fileInfo.IsSvg() {
a.HandleImages([]string{fileInfo.PreviewPath}, []string{fileInfo.ThumbnailPath}, [][]byte{fileData}) a.HandleImages(rctx, []string{fileInfo.PreviewPath}, []string{fileInfo.ThumbnailPath}, [][]byte{fileData})
} }
return fileInfo, nil return fileInfo, nil
@ -1421,12 +1421,12 @@ func getPostStrID(post *model.Post) string {
// importMultiplePostLines will return an error and the line that // importMultiplePostLines will return an error and the line that
// caused it whenever possible // caused it whenever possible
func (a *App) importMultiplePostLines(c request.CTX, lines []imports.LineImportWorkerData, dryRun bool) (int, *model.AppError) { func (a *App) importMultiplePostLines(rctx request.CTX, lines []imports.LineImportWorkerData, dryRun bool) (int, *model.AppError) {
if len(lines) == 0 { if len(lines) == 0 {
return 0, nil return 0, nil
} }
c.Logger().Info("Validating post lines", mlog.Int("count", len(lines)), mlog.Int("first_line", lines[0].LineNumber)) rctx.Logger().Info("Validating post lines", mlog.Int("count", len(lines)), mlog.Int("first_line", lines[0].LineNumber))
for _, line := range lines { for _, line := range lines {
if err := imports.ValidatePostImportData(line.Post, a.MaxPostSize()); err != nil { if err := imports.ValidatePostImportData(line.Post, a.MaxPostSize()); err != nil {
@ -1439,7 +1439,7 @@ func (a *App) importMultiplePostLines(c request.CTX, lines []imports.LineImportW
return 0, nil return 0, nil
} }
c.Logger().Info("Importing post lines", mlog.Int("count", len(lines)), mlog.Int("first_line", lines[0].LineNumber)) rctx.Logger().Info("Importing post lines", mlog.Int("count", len(lines)), mlog.Int("first_line", lines[0].LineNumber))
usernames := []string{} usernames := []string{}
teamNames := make([]string, len(lines)) teamNames := make([]string, len(lines))
@ -1518,7 +1518,7 @@ func (a *App) importMultiplePostLines(c request.CTX, lines []imports.LineImportW
post.IsPinned = *line.Post.IsPinned post.IsPinned = *line.Post.IsPinned
} }
fileIDs := a.uploadAttachments(c, line.Post.Attachments, post, team.Id) fileIDs := a.uploadAttachments(rctx, line.Post.Attachments, post, team.Id)
for _, fileID := range post.FileIds { for _, fileID := range post.FileIds {
if _, ok := fileIDs[fileID]; !ok { if _, ok := fileIDs[fileID]; !ok {
a.Srv().Store().FileInfo().PermanentDelete(fileID) a.Srv().Store().FileInfo().PermanentDelete(fileID)
@ -1606,33 +1606,33 @@ func (a *App) importMultiplePostLines(c request.CTX, lines []imports.LineImportW
} }
if postWithData.postData.Replies != nil && len(*postWithData.postData.Replies) > 0 { if postWithData.postData.Replies != nil && len(*postWithData.postData.Replies) > 0 {
err := a.importReplies(c, *postWithData.postData.Replies, postWithData.post, postWithData.team.Id) err := a.importReplies(rctx, *postWithData.postData.Replies, postWithData.post, postWithData.team.Id)
if err != nil { if err != nil {
return postWithData.lineNumber, err return postWithData.lineNumber, err
} }
} }
a.updateFileInfoWithPostId(postWithData.post) a.updateFileInfoWithPostId(rctx, postWithData.post)
} }
return 0, nil return 0, nil
} }
// uploadAttachments imports new attachments and returns current attachments of the post as a map // uploadAttachments imports new attachments and returns current attachments of the post as a map
func (a *App) uploadAttachments(c request.CTX, attachments *[]imports.AttachmentImportData, post *model.Post, teamID string) map[string]bool { func (a *App) uploadAttachments(rctx request.CTX, attachments *[]imports.AttachmentImportData, post *model.Post, teamID string) map[string]bool {
if attachments == nil { if attachments == nil {
return nil return nil
} }
fileIDs := make(map[string]bool) fileIDs := make(map[string]bool)
for _, attachment := range *attachments { for _, attachment := range *attachments {
attachment := attachment attachment := attachment
fileInfo, err := a.importAttachment(c, &attachment, post, teamID) fileInfo, err := a.importAttachment(rctx, &attachment, post, teamID)
if err != nil { if err != nil {
if attachment.Path != nil { if attachment.Path != nil {
mlog.Warn( rctx.Logger().Warn(
"failed to import attachment", "failed to import attachment",
mlog.String("path", *attachment.Path), mlog.String("path", *attachment.Path),
mlog.String("error", err.Error())) mlog.String("error", err.Error()))
} else { } else {
mlog.Warn("failed to import attachment; path was nil", rctx.Logger().Warn("failed to import attachment; path was nil",
mlog.String("error", err.Error())) mlog.String("error", err.Error()))
} }
continue continue
@ -1642,14 +1642,14 @@ func (a *App) uploadAttachments(c request.CTX, attachments *[]imports.Attachment
return fileIDs return fileIDs
} }
func (a *App) updateFileInfoWithPostId(post *model.Post) { func (a *App) updateFileInfoWithPostId(rctx request.CTX, post *model.Post) {
for _, fileID := range post.FileIds { for _, fileID := range post.FileIds {
if err := a.Srv().Store().FileInfo().AttachToPost(fileID, post.Id, post.ChannelId, post.UserId); err != nil { if err := a.Srv().Store().FileInfo().AttachToPost(fileID, post.Id, post.ChannelId, post.UserId); err != nil {
mlog.Error("Error attaching files to post.", mlog.String("post_id", post.Id), mlog.Array("post_file_ids", post.FileIds), mlog.Err(err)) rctx.Logger().Error("Error attaching files to post.", mlog.String("post_id", post.Id), mlog.Array("post_file_ids", post.FileIds), mlog.Err(err))
} }
} }
} }
func (a *App) importDirectChannel(c request.CTX, data *imports.DirectChannelImportData, dryRun bool) *model.AppError { func (a *App) importDirectChannel(rctx request.CTX, data *imports.DirectChannelImportData, dryRun bool) *model.AppError {
var err *model.AppError var err *model.AppError
if err = imports.ValidateDirectChannelImportData(data); err != nil { if err = imports.ValidateDirectChannelImportData(data); err != nil {
return err return err
@ -1672,13 +1672,13 @@ func (a *App) importDirectChannel(c request.CTX, data *imports.DirectChannelImpo
var channel *model.Channel var channel *model.Channel
if len(userIDs) == 2 { if len(userIDs) == 2 {
ch, err := a.createDirectChannel(c, userIDs[0], userIDs[1]) ch, err := a.createDirectChannel(rctx, userIDs[0], userIDs[1])
if err != nil && err.Id != store.ChannelExistsError { if err != nil && err.Id != store.ChannelExistsError {
return model.NewAppError("BulkImport", "app.import.import_direct_channel.create_direct_channel.error", nil, "", http.StatusBadRequest).Wrap(err) return model.NewAppError("BulkImport", "app.import.import_direct_channel.create_direct_channel.error", nil, "", http.StatusBadRequest).Wrap(err)
} }
channel = ch channel = ch
} else { } else {
ch, err := a.createGroupChannel(c, userIDs) ch, err := a.createGroupChannel(rctx, userIDs)
if err != nil && err.Id != store.ChannelExistsError { if err != nil && err.Id != store.ChannelExistsError {
return model.NewAppError("BulkImport", "app.import.import_direct_channel.create_group_channel.error", nil, "", http.StatusBadRequest).Wrap(err) return model.NewAppError("BulkImport", "app.import.import_direct_channel.create_group_channel.error", nil, "", http.StatusBadRequest).Wrap(err)
} }
@ -1730,7 +1730,7 @@ func (a *App) importDirectChannel(c request.CTX, data *imports.DirectChannelImpo
// importMultipleDirectPostLines will return an error and the line // importMultipleDirectPostLines will return an error and the line
// that caused it whenever possible // that caused it whenever possible
func (a *App) importMultipleDirectPostLines(c request.CTX, lines []imports.LineImportWorkerData, dryRun bool) (int, *model.AppError) { func (a *App) importMultipleDirectPostLines(rctx request.CTX, lines []imports.LineImportWorkerData, dryRun bool) (int, *model.AppError) {
if len(lines) == 0 { if len(lines) == 0 {
return 0, nil return 0, nil
} }
@ -1779,13 +1779,13 @@ func (a *App) importMultipleDirectPostLines(c request.CTX, lines []imports.LineI
var channel *model.Channel var channel *model.Channel
var ch *model.Channel var ch *model.Channel
if len(userIDs) == 2 { if len(userIDs) == 2 {
ch, err = a.GetOrCreateDirectChannel(c, userIDs[0], userIDs[1]) ch, err = a.GetOrCreateDirectChannel(rctx, userIDs[0], userIDs[1])
if err != nil && err.Id != store.ChannelExistsError { if err != nil && err.Id != store.ChannelExistsError {
return line.LineNumber, model.NewAppError("BulkImport", "app.import.import_direct_post.create_direct_channel.error", nil, "", http.StatusBadRequest).Wrap(err) return line.LineNumber, model.NewAppError("BulkImport", "app.import.import_direct_post.create_direct_channel.error", nil, "", http.StatusBadRequest).Wrap(err)
} }
channel = ch channel = ch
} else { } else {
ch, err = a.createGroupChannel(c, userIDs) ch, err = a.createGroupChannel(rctx, userIDs)
if err != nil && err.Id != store.ChannelExistsError { if err != nil && err.Id != store.ChannelExistsError {
return line.LineNumber, model.NewAppError("BulkImport", "app.import.import_direct_post.create_group_channel.error", nil, "", http.StatusBadRequest).Wrap(err) return line.LineNumber, model.NewAppError("BulkImport", "app.import.import_direct_post.create_group_channel.error", nil, "", http.StatusBadRequest).Wrap(err)
} }
@ -1831,7 +1831,7 @@ func (a *App) importMultipleDirectPostLines(c request.CTX, lines []imports.LineI
post.IsPinned = *line.DirectPost.IsPinned post.IsPinned = *line.DirectPost.IsPinned
} }
fileIDs := a.uploadAttachments(c, line.DirectPost.Attachments, post, "noteam") fileIDs := a.uploadAttachments(rctx, line.DirectPost.Attachments, post, "noteam")
for _, fileID := range post.FileIds { for _, fileID := range post.FileIds {
if _, ok := fileIDs[fileID]; !ok { if _, ok := fileIDs[fileID]; !ok {
a.Srv().Store().FileInfo().PermanentDelete(fileID) a.Srv().Store().FileInfo().PermanentDelete(fileID)
@ -1917,27 +1917,27 @@ func (a *App) importMultipleDirectPostLines(c request.CTX, lines []imports.LineI
} }
if postWithData.directPostData.Replies != nil { if postWithData.directPostData.Replies != nil {
if err := a.importReplies(c, *postWithData.directPostData.Replies, postWithData.post, "noteam"); err != nil { if err := a.importReplies(rctx, *postWithData.directPostData.Replies, postWithData.post, "noteam"); err != nil {
return postWithData.lineNumber, err return postWithData.lineNumber, err
} }
} }
a.updateFileInfoWithPostId(postWithData.post) a.updateFileInfoWithPostId(rctx, postWithData.post)
} }
return 0, nil return 0, nil
} }
func (a *App) importEmoji(c request.CTX, data *imports.EmojiImportData, dryRun bool) *model.AppError { func (a *App) importEmoji(rctx request.CTX, data *imports.EmojiImportData, dryRun bool) *model.AppError {
var fields []logr.Field var fields []logr.Field
if data != nil && data.Name != nil { if data != nil && data.Name != nil {
fields = append(fields, mlog.String("emoji_name", *data.Name)) fields = append(fields, mlog.String("emoji_name", *data.Name))
} }
c.Logger().Info("Validating emoji", fields...) rctx.Logger().Info("Validating emoji", fields...)
aerr := imports.ValidateEmojiImportData(data) aerr := imports.ValidateEmojiImportData(data)
if aerr != nil { if aerr != nil {
if aerr.Id == "model.emoji.system_emoji_name.app_error" { if aerr.Id == "model.emoji.system_emoji_name.app_error" {
mlog.Warn("Skipping emoji import due to name conflict with system emoji", mlog.String("emoji_name", *data.Name)) rctx.Logger().Warn("Skipping emoji import due to name conflict with system emoji", mlog.String("emoji_name", *data.Name))
return nil return nil
} }
return aerr return aerr
@ -1948,11 +1948,11 @@ func (a *App) importEmoji(c request.CTX, data *imports.EmojiImportData, dryRun b
return nil return nil
} }
c.Logger().Info("Importing emoji", fields...) rctx.Logger().Info("Importing emoji", fields...)
var emoji *model.Emoji var emoji *model.Emoji
emoji, err := a.Srv().Store().Emoji().GetByName(c, *data.Name, true) emoji, err := a.Srv().Store().Emoji().GetByName(rctx, *data.Name, true)
if err != nil { if err != nil {
var nfErr *store.ErrNotFound var nfErr *store.ErrNotFound
if !errors.As(err, &nfErr) { if !errors.As(err, &nfErr) {

View File

@ -3049,7 +3049,7 @@ func (a *OpenTracingAppLayer) DeleteCommand(commandID string) *model.AppError {
return resultVar0 return resultVar0
} }
func (a *OpenTracingAppLayer) DeleteDraft(userID string, channelID string, rootID string, connectionID string) (*model.Draft, *model.AppError) { func (a *OpenTracingAppLayer) DeleteDraft(rctx request.CTX, userID string, channelID string, rootID string, connectionID string) (*model.Draft, *model.AppError) {
origCtx := a.ctx origCtx := a.ctx
span, newCtx := tracing.StartSpanWithParentByContext(a.ctx, "app.DeleteDraft") span, newCtx := tracing.StartSpanWithParentByContext(a.ctx, "app.DeleteDraft")
@ -3061,7 +3061,7 @@ func (a *OpenTracingAppLayer) DeleteDraft(userID string, channelID string, rootI
}() }()
defer span.Finish() defer span.Finish()
resultVar0, resultVar1 := a.app.DeleteDraft(userID, channelID, rootID, connectionID) resultVar0, resultVar1 := a.app.DeleteDraft(rctx, userID, channelID, rootID, connectionID)
if resultVar1 != nil { if resultVar1 != nil {
span.LogFields(spanlog.Error(resultVar1)) span.LogFields(spanlog.Error(resultVar1))
@ -6098,7 +6098,7 @@ func (a *OpenTracingAppLayer) GetDraft(userID string, channelID string, rootID s
return resultVar0, resultVar1 return resultVar0, resultVar1
} }
func (a *OpenTracingAppLayer) GetDraftsForUser(userID string, teamID string) ([]*model.Draft, *model.AppError) { func (a *OpenTracingAppLayer) GetDraftsForUser(rctx request.CTX, userID string, teamID string) ([]*model.Draft, *model.AppError) {
origCtx := a.ctx origCtx := a.ctx
span, newCtx := tracing.StartSpanWithParentByContext(a.ctx, "app.GetDraftsForUser") span, newCtx := tracing.StartSpanWithParentByContext(a.ctx, "app.GetDraftsForUser")
@ -6110,7 +6110,7 @@ func (a *OpenTracingAppLayer) GetDraftsForUser(userID string, teamID string) ([]
}() }()
defer span.Finish() defer span.Finish()
resultVar0, resultVar1 := a.app.GetDraftsForUser(userID, teamID) resultVar0, resultVar1 := a.app.GetDraftsForUser(rctx, userID, teamID)
if resultVar1 != nil { if resultVar1 != nil {
span.LogFields(spanlog.Error(resultVar1)) span.LogFields(spanlog.Error(resultVar1))
@ -6269,7 +6269,7 @@ func (a *OpenTracingAppLayer) GetEnvironmentConfig(filter func(reflect.StructFie
return resultVar0 return resultVar0
} }
func (a *OpenTracingAppLayer) GetFile(fileID string) ([]byte, *model.AppError) { func (a *OpenTracingAppLayer) GetFile(rctx request.CTX, fileID string) ([]byte, *model.AppError) {
origCtx := a.ctx origCtx := a.ctx
span, newCtx := tracing.StartSpanWithParentByContext(a.ctx, "app.GetFile") span, newCtx := tracing.StartSpanWithParentByContext(a.ctx, "app.GetFile")
@ -6281,7 +6281,7 @@ func (a *OpenTracingAppLayer) GetFile(fileID string) ([]byte, *model.AppError) {
}() }()
defer span.Finish() defer span.Finish()
resultVar0, resultVar1 := a.app.GetFile(fileID) resultVar0, resultVar1 := a.app.GetFile(rctx, fileID)
if resultVar1 != nil { if resultVar1 != nil {
span.LogFields(spanlog.Error(resultVar1)) span.LogFields(spanlog.Error(resultVar1))
@ -6291,7 +6291,7 @@ func (a *OpenTracingAppLayer) GetFile(fileID string) ([]byte, *model.AppError) {
return resultVar0, resultVar1 return resultVar0, resultVar1
} }
func (a *OpenTracingAppLayer) GetFileInfo(fileID string) (*model.FileInfo, *model.AppError) { func (a *OpenTracingAppLayer) GetFileInfo(rctx request.CTX, fileID string) (*model.FileInfo, *model.AppError) {
origCtx := a.ctx origCtx := a.ctx
span, newCtx := tracing.StartSpanWithParentByContext(a.ctx, "app.GetFileInfo") span, newCtx := tracing.StartSpanWithParentByContext(a.ctx, "app.GetFileInfo")
@ -6303,7 +6303,7 @@ func (a *OpenTracingAppLayer) GetFileInfo(fileID string) (*model.FileInfo, *mode
}() }()
defer span.Finish() defer span.Finish()
resultVar0, resultVar1 := a.app.GetFileInfo(fileID) resultVar0, resultVar1 := a.app.GetFileInfo(rctx, fileID)
if resultVar1 != nil { if resultVar1 != nil {
span.LogFields(spanlog.Error(resultVar1)) span.LogFields(spanlog.Error(resultVar1))
@ -6313,7 +6313,7 @@ func (a *OpenTracingAppLayer) GetFileInfo(fileID string) (*model.FileInfo, *mode
return resultVar0, resultVar1 return resultVar0, resultVar1
} }
func (a *OpenTracingAppLayer) GetFileInfos(page int, perPage int, opt *model.GetFileInfosOptions) ([]*model.FileInfo, *model.AppError) { func (a *OpenTracingAppLayer) GetFileInfos(rctx request.CTX, page int, perPage int, opt *model.GetFileInfosOptions) ([]*model.FileInfo, *model.AppError) {
origCtx := a.ctx origCtx := a.ctx
span, newCtx := tracing.StartSpanWithParentByContext(a.ctx, "app.GetFileInfos") span, newCtx := tracing.StartSpanWithParentByContext(a.ctx, "app.GetFileInfos")
@ -6325,7 +6325,7 @@ func (a *OpenTracingAppLayer) GetFileInfos(page int, perPage int, opt *model.Get
}() }()
defer span.Finish() defer span.Finish()
resultVar0, resultVar1 := a.app.GetFileInfos(page, perPage, opt) resultVar0, resultVar1 := a.app.GetFileInfos(rctx, page, perPage, opt)
if resultVar1 != nil { if resultVar1 != nil {
span.LogFields(spanlog.Error(resultVar1)) span.LogFields(spanlog.Error(resultVar1))
@ -6335,7 +6335,7 @@ func (a *OpenTracingAppLayer) GetFileInfos(page int, perPage int, opt *model.Get
return resultVar0, resultVar1 return resultVar0, resultVar1
} }
func (a *OpenTracingAppLayer) GetFileInfosForPost(postID string, fromMaster bool, includeDeleted bool) ([]*model.FileInfo, int64, *model.AppError) { func (a *OpenTracingAppLayer) GetFileInfosForPost(rctx request.CTX, postID string, fromMaster bool, includeDeleted bool) ([]*model.FileInfo, int64, *model.AppError) {
origCtx := a.ctx origCtx := a.ctx
span, newCtx := tracing.StartSpanWithParentByContext(a.ctx, "app.GetFileInfosForPost") span, newCtx := tracing.StartSpanWithParentByContext(a.ctx, "app.GetFileInfosForPost")
@ -6347,7 +6347,7 @@ func (a *OpenTracingAppLayer) GetFileInfosForPost(postID string, fromMaster bool
}() }()
defer span.Finish() defer span.Finish()
resultVar0, resultVar1, resultVar2 := a.app.GetFileInfosForPost(postID, fromMaster, includeDeleted) resultVar0, resultVar1, resultVar2 := a.app.GetFileInfosForPost(rctx, postID, fromMaster, includeDeleted)
if resultVar2 != nil { if resultVar2 != nil {
span.LogFields(spanlog.Error(resultVar2)) span.LogFields(spanlog.Error(resultVar2))
@ -6357,7 +6357,7 @@ func (a *OpenTracingAppLayer) GetFileInfosForPost(postID string, fromMaster bool
return resultVar0, resultVar1, resultVar2 return resultVar0, resultVar1, resultVar2
} }
func (a *OpenTracingAppLayer) GetFileInfosForPostWithMigration(postID string, includeDeleted bool) ([]*model.FileInfo, *model.AppError) { func (a *OpenTracingAppLayer) GetFileInfosForPostWithMigration(rctx request.CTX, postID string, includeDeleted bool) ([]*model.FileInfo, *model.AppError) {
origCtx := a.ctx origCtx := a.ctx
span, newCtx := tracing.StartSpanWithParentByContext(a.ctx, "app.GetFileInfosForPostWithMigration") span, newCtx := tracing.StartSpanWithParentByContext(a.ctx, "app.GetFileInfosForPostWithMigration")
@ -6369,7 +6369,7 @@ func (a *OpenTracingAppLayer) GetFileInfosForPostWithMigration(postID string, in
}() }()
defer span.Finish() defer span.Finish()
resultVar0, resultVar1 := a.app.GetFileInfosForPostWithMigration(postID, includeDeleted) resultVar0, resultVar1 := a.app.GetFileInfosForPostWithMigration(rctx, postID, includeDeleted)
if resultVar1 != nil { if resultVar1 != nil {
span.LogFields(spanlog.Error(resultVar1)) span.LogFields(spanlog.Error(resultVar1))
@ -11332,7 +11332,7 @@ func (a *OpenTracingAppLayer) HandleCommandWebhook(c request.CTX, hookID string,
return resultVar0 return resultVar0
} }
func (a *OpenTracingAppLayer) HandleImages(previewPathList []string, thumbnailPathList []string, fileData [][]byte) { func (a *OpenTracingAppLayer) HandleImages(rctx request.CTX, previewPathList []string, thumbnailPathList []string, fileData [][]byte) {
origCtx := a.ctx origCtx := a.ctx
span, newCtx := tracing.StartSpanWithParentByContext(a.ctx, "app.HandleImages") span, newCtx := tracing.StartSpanWithParentByContext(a.ctx, "app.HandleImages")
@ -11344,7 +11344,7 @@ func (a *OpenTracingAppLayer) HandleImages(previewPathList []string, thumbnailPa
}() }()
defer span.Finish() defer span.Finish()
a.app.HandleImages(previewPathList, thumbnailPathList, fileData) a.app.HandleImages(rctx, previewPathList, thumbnailPathList, fileData)
} }
func (a *OpenTracingAppLayer) HandleIncomingWebhook(c request.CTX, hookID string, req *model.IncomingWebhookRequest) *model.AppError { func (a *OpenTracingAppLayer) HandleIncomingWebhook(c request.CTX, hookID string, req *model.IncomingWebhookRequest) *model.AppError {
@ -12542,7 +12542,7 @@ func (a *OpenTracingAppLayer) MentionsToTeamMembers(c request.CTX, message strin
return resultVar0 return resultVar0
} }
func (a *OpenTracingAppLayer) MigrateFilenamesToFileInfos(post *model.Post) []*model.FileInfo { func (a *OpenTracingAppLayer) MigrateFilenamesToFileInfos(rctx request.CTX, post *model.Post) []*model.FileInfo {
origCtx := a.ctx origCtx := a.ctx
span, newCtx := tracing.StartSpanWithParentByContext(a.ctx, "app.MigrateFilenamesToFileInfos") span, newCtx := tracing.StartSpanWithParentByContext(a.ctx, "app.MigrateFilenamesToFileInfos")
@ -12554,7 +12554,7 @@ func (a *OpenTracingAppLayer) MigrateFilenamesToFileInfos(post *model.Post) []*m
}() }()
defer span.Finish() defer span.Finish()
resultVar0 := a.app.MigrateFilenamesToFileInfos(post) resultVar0 := a.app.MigrateFilenamesToFileInfos(rctx, post)
return resultVar0 return resultVar0
} }

View File

@ -763,7 +763,7 @@ func (api *PluginAPI) CopyFileInfos(userID string, fileIDs []string) ([]string,
} }
func (api *PluginAPI) GetFileInfo(fileID string) (*model.FileInfo, *model.AppError) { func (api *PluginAPI) GetFileInfo(fileID string) (*model.FileInfo, *model.AppError) {
return api.app.GetFileInfo(fileID) return api.app.GetFileInfo(api.ctx, fileID)
} }
func (api *PluginAPI) SetFileSearchableContent(fileID string, content string) *model.AppError { func (api *PluginAPI) SetFileSearchableContent(fileID string, content string) *model.AppError {
@ -771,7 +771,7 @@ func (api *PluginAPI) SetFileSearchableContent(fileID string, content string) *m
} }
func (api *PluginAPI) GetFileInfos(page, perPage int, opt *model.GetFileInfosOptions) ([]*model.FileInfo, *model.AppError) { func (api *PluginAPI) GetFileInfos(page, perPage int, opt *model.GetFileInfosOptions) ([]*model.FileInfo, *model.AppError) {
return api.app.GetFileInfos(page, perPage, opt) return api.app.GetFileInfos(api.ctx, page, perPage, opt)
} }
func (api *PluginAPI) GetFileLink(fileID string) (string, *model.AppError) { func (api *PluginAPI) GetFileLink(fileID string) (string, *model.AppError) {
@ -779,7 +779,7 @@ func (api *PluginAPI) GetFileLink(fileID string) (string, *model.AppError) {
return "", model.NewAppError("GetFileLink", "plugin_api.get_file_link.disabled.app_error", nil, "", http.StatusNotImplemented) return "", model.NewAppError("GetFileLink", "plugin_api.get_file_link.disabled.app_error", nil, "", http.StatusNotImplemented)
} }
info, err := api.app.GetFileInfo(fileID) info, err := api.app.GetFileInfo(api.ctx, fileID)
if err != nil { if err != nil {
return "", err return "", err
} }
@ -796,7 +796,7 @@ func (api *PluginAPI) ReadFile(path string) ([]byte, *model.AppError) {
} }
func (api *PluginAPI) GetFile(fileID string) ([]byte, *model.AppError) { func (api *PluginAPI) GetFile(fileID string) ([]byte, *model.AppError) {
return api.app.GetFile(fileID) return api.app.GetFile(api.ctx, fileID)
} }
func (api *PluginAPI) UploadFile(data []byte, channelID string, filename string) (*model.FileInfo, *model.AppError) { func (api *PluginAPI) UploadFile(data []byte, channelID string, filename string) (*model.FileInfo, *model.AppError) {

View File

@ -611,7 +611,7 @@ func TestHookFileWillBeUploaded(t *testing.T) {
assert.NotNil(t, response) assert.NotNil(t, response)
fileID := response.Id fileID := response.Id
fileInfo, err := th.App.GetFileInfo(fileID) fileInfo, err := th.App.GetFileInfo(th.Context, fileID)
assert.Nil(t, err) assert.Nil(t, err)
assert.NotNil(t, fileInfo) assert.NotNil(t, fileInfo)
assert.Equal(t, "testhook.txt", fileInfo.Name) assert.Equal(t, "testhook.txt", fileInfo.Name)
@ -682,7 +682,7 @@ func TestHookFileWillBeUploaded(t *testing.T) {
assert.NotNil(t, response) assert.NotNil(t, response)
fileID := response.Id fileID := response.Id
fileInfo, err := th.App.GetFileInfo(fileID) fileInfo, err := th.App.GetFileInfo(th.Context, fileID)
assert.Nil(t, err) assert.Nil(t, err)
assert.NotNil(t, fileInfo) assert.NotNil(t, fileInfo)
assert.Equal(t, "modifiedinfo", fileInfo.Name) assert.Equal(t, "modifiedinfo", fileInfo.Name)

View File

@ -569,7 +569,7 @@ func (a *App) SendEphemeralPost(c request.CTX, userID string, post *model.Post)
sanitizedPost, appErr := a.SanitizePostMetadataForUser(c, post, userID) sanitizedPost, appErr := a.SanitizePostMetadataForUser(c, post, userID)
if appErr != nil { if appErr != nil {
mlog.Error("Failed to sanitize post metadata for user", mlog.String("user_id", userID), mlog.Err(appErr)) c.Logger().Error("Failed to sanitize post metadata for user", mlog.String("user_id", userID), mlog.Err(appErr))
// If we failed to sanitize the post, we still want to remove the metadata. // If we failed to sanitize the post, we still want to remove the metadata.
sanitizedPost = post.Clone() sanitizedPost = post.Clone()
@ -1662,7 +1662,7 @@ func (a *App) SearchPostsForUser(c request.CTX, terms string, userID string, tea
return postSearchResults, nil return postSearchResults, nil
} }
func (a *App) GetFileInfosForPostWithMigration(postID string, includeDeleted bool) ([]*model.FileInfo, *model.AppError) { func (a *App) GetFileInfosForPostWithMigration(rctx request.CTX, postID string, includeDeleted bool) ([]*model.FileInfo, *model.AppError) {
pchan := make(chan store.StoreResult, 1) pchan := make(chan store.StoreResult, 1)
go func() { go func() {
post, err := a.Srv().Store().Post().GetSingle(postID, includeDeleted) post, err := a.Srv().Store().Post().GetSingle(postID, includeDeleted)
@ -1670,7 +1670,7 @@ func (a *App) GetFileInfosForPostWithMigration(postID string, includeDeleted boo
close(pchan) close(pchan)
}() }()
infos, firstInaccessibleFileTime, err := a.GetFileInfosForPost(postID, false, includeDeleted) infos, firstInaccessibleFileTime, err := a.GetFileInfosForPost(rctx, postID, false, includeDeleted)
if err != nil { if err != nil {
return nil, err return nil, err
} }
@ -1693,7 +1693,7 @@ func (a *App) GetFileInfosForPostWithMigration(postID string, includeDeleted boo
a.Srv().Store().FileInfo().InvalidateFileInfosForPostCache(postID, false) a.Srv().Store().FileInfo().InvalidateFileInfosForPostCache(postID, false)
a.Srv().Store().FileInfo().InvalidateFileInfosForPostCache(postID, true) a.Srv().Store().FileInfo().InvalidateFileInfosForPostCache(postID, true)
// The post has Filenames that need to be replaced with FileInfos // The post has Filenames that need to be replaced with FileInfos
infos = a.MigrateFilenamesToFileInfos(post) infos = a.MigrateFilenamesToFileInfos(rctx, post)
} }
} }
@ -1701,7 +1701,7 @@ func (a *App) GetFileInfosForPostWithMigration(postID string, includeDeleted boo
} }
// GetFileInfosForPost also returns firstInaccessibleFileTime based on cloud plan's limit. // GetFileInfosForPost also returns firstInaccessibleFileTime based on cloud plan's limit.
func (a *App) GetFileInfosForPost(postID string, fromMaster bool, includeDeleted bool) ([]*model.FileInfo, int64, *model.AppError) { func (a *App) GetFileInfosForPost(rctx request.CTX, postID string, fromMaster bool, includeDeleted bool) ([]*model.FileInfo, int64, *model.AppError) {
fileInfos, err := a.Srv().Store().FileInfo().GetForPost(postID, fromMaster, includeDeleted, true) fileInfos, err := a.Srv().Store().FileInfo().GetForPost(postID, fromMaster, includeDeleted, true)
if err != nil { if err != nil {
return nil, 0, model.NewAppError("GetFileInfosForPost", "app.file_info.get_for_post.app_error", nil, "", http.StatusInternalServerError).Wrap(err) return nil, 0, model.NewAppError("GetFileInfosForPost", "app.file_info.get_for_post.app_error", nil, "", http.StatusInternalServerError).Wrap(err)
@ -1712,18 +1712,18 @@ func (a *App) GetFileInfosForPost(postID string, fromMaster bool, includeDeleted
return nil, 0, appErr return nil, 0, appErr
} }
a.generateMiniPreviewForInfos(fileInfos) a.generateMiniPreviewForInfos(rctx, fileInfos)
return fileInfos, firstInaccessibleFileTime, nil return fileInfos, firstInaccessibleFileTime, nil
} }
func (a *App) getFileInfosForPostIgnoreCloudLimit(postID string, fromMaster bool, includeDeleted bool) ([]*model.FileInfo, *model.AppError) { func (a *App) getFileInfosForPostIgnoreCloudLimit(rctx request.CTX, postID string, fromMaster bool, includeDeleted bool) ([]*model.FileInfo, *model.AppError) {
fileInfos, err := a.Srv().Store().FileInfo().GetForPost(postID, fromMaster, includeDeleted, true) fileInfos, err := a.Srv().Store().FileInfo().GetForPost(postID, fromMaster, includeDeleted, true)
if err != nil { if err != nil {
return nil, model.NewAppError("getFileInfosForPostIgnoreCloudLimit", "app.file_info.get_for_post.app_error", nil, "", http.StatusInternalServerError).Wrap(err) return nil, model.NewAppError("getFileInfosForPostIgnoreCloudLimit", "app.file_info.get_for_post.app_error", nil, "", http.StatusInternalServerError).Wrap(err)
} }
a.generateMiniPreviewForInfos(fileInfos) a.generateMiniPreviewForInfos(rctx, fileInfos)
return fileInfos, nil return fileInfos, nil
} }

View File

@ -126,15 +126,15 @@ func (a *App) PreparePostForClient(c request.CTX, originalPost *model.Post, isNe
// Emojis and reaction counts // Emojis and reaction counts
if emojis, reactions, err := a.getEmojisAndReactionsForPost(c, post); err != nil { if emojis, reactions, err := a.getEmojisAndReactionsForPost(c, post); err != nil {
mlog.Warn("Failed to get emojis and reactions for a post", mlog.String("post_id", post.Id), mlog.Err(err)) c.Logger().Warn("Failed to get emojis and reactions for a post", mlog.String("post_id", post.Id), mlog.Err(err))
} else { } else {
post.Metadata.Emojis = emojis post.Metadata.Emojis = emojis
post.Metadata.Reactions = reactions post.Metadata.Reactions = reactions
} }
// Files // Files
if fileInfos, _, err := a.getFileMetadataForPost(post, isNewPost || isEditPost); err != nil { if fileInfos, _, err := a.getFileMetadataForPost(c, post, isNewPost || isEditPost); err != nil {
mlog.Warn("Failed to get files for a post", mlog.String("post_id", post.Id), mlog.Err(err)) c.Logger().Warn("Failed to get files for a post", mlog.String("post_id", post.Id), mlog.Err(err))
} else { } else {
post.Metadata.Files = fileInfos post.Metadata.Files = fileInfos
} }
@ -142,14 +142,14 @@ func (a *App) PreparePostForClient(c request.CTX, originalPost *model.Post, isNe
if includePriority && a.IsPostPriorityEnabled() && post.RootId == "" { if includePriority && a.IsPostPriorityEnabled() && post.RootId == "" {
// Post's Priority if any // Post's Priority if any
if priority, err := a.GetPriorityForPost(post.Id); err != nil { if priority, err := a.GetPriorityForPost(post.Id); err != nil {
mlog.Warn("Failed to get post priority for a post", mlog.String("post_id", post.Id), mlog.Err(err)) c.Logger().Warn("Failed to get post priority for a post", mlog.String("post_id", post.Id), mlog.Err(err))
} else { } else {
post.Metadata.Priority = priority post.Metadata.Priority = priority
} }
// Post's acknowledgements if any // Post's acknowledgements if any
if acknowledgements, err := a.GetAcknowledgementsForPost(post.Id); err != nil { if acknowledgements, err := a.GetAcknowledgementsForPost(post.Id); err != nil {
mlog.Warn("Failed to get post acknowledgements for a post", mlog.String("post_id", post.Id), mlog.Err(err)) c.Logger().Warn("Failed to get post acknowledgements for a post", mlog.String("post_id", post.Id), mlog.Err(err))
} else { } else {
post.Metadata.Acknowledgements = acknowledgements post.Metadata.Acknowledgements = acknowledgements
} }
@ -246,12 +246,12 @@ func (a *App) SanitizePostListMetadataForUser(c request.CTX, postList *model.Pos
return clonedPostList, nil return clonedPostList, nil
} }
func (a *App) getFileMetadataForPost(post *model.Post, fromMaster bool) ([]*model.FileInfo, int64, *model.AppError) { func (a *App) getFileMetadataForPost(rctx request.CTX, post *model.Post, fromMaster bool) ([]*model.FileInfo, int64, *model.AppError) {
if len(post.FileIds) == 0 { if len(post.FileIds) == 0 {
return nil, 0, nil return nil, 0, nil
} }
return a.GetFileInfosForPost(post.Id, fromMaster, false) return a.GetFileInfosForPost(rctx, post.Id, fromMaster, false)
} }
func (a *App) getEmojisAndReactionsForPost(c request.CTX, post *model.Post) ([]*model.Emoji, []*model.Reaction, *model.AppError) { func (a *App) getEmojisAndReactionsForPost(c request.CTX, post *model.Post) ([]*model.Emoji, []*model.Reaction, *model.AppError) {

View File

@ -221,7 +221,7 @@ func TestAttachFilesToPost(t *testing.T) {
appErr := th.App.attachFilesToPost(post) appErr := th.App.attachFilesToPost(post)
assert.Nil(t, appErr) assert.Nil(t, appErr)
infos, _, appErr := th.App.GetFileInfosForPost(post.Id, false, false) infos, _, appErr := th.App.GetFileInfosForPost(th.Context, post.Id, false, false)
assert.Nil(t, appErr) assert.Nil(t, appErr)
assert.Len(t, infos, 2) assert.Len(t, infos, 2)
}) })
@ -249,7 +249,7 @@ func TestAttachFilesToPost(t *testing.T) {
appErr := th.App.attachFilesToPost(post) appErr := th.App.attachFilesToPost(post)
assert.Nil(t, appErr) assert.Nil(t, appErr)
infos, _, appErr := th.App.GetFileInfosForPost(post.Id, false, false) infos, _, appErr := th.App.GetFileInfosForPost(th.Context, post.Id, false, false)
assert.Nil(t, appErr) assert.Nil(t, appErr)
assert.Len(t, infos, 1) assert.Len(t, infos, 1)
assert.Equal(t, info2.Id, infos[0].Id) assert.Equal(t, info2.Id, infos[0].Id)
@ -814,7 +814,7 @@ func TestDeletePostWithFileAttachments(t *testing.T) {
time.Sleep(time.Millisecond * 100) time.Sleep(time.Millisecond * 100)
// Check that the file can no longer be reached. // Check that the file can no longer be reached.
_, err = th.App.GetFileInfo(info1.Id) _, err = th.App.GetFileInfo(th.Context, info1.Id)
assert.NotNil(t, err) assert.NotNil(t, err)
} }

View File

@ -41,7 +41,7 @@ func (a *App) SlackImport(c request.CTX, fileData multipart.File, fileSize int64
InvalidateAllCaches: func() { a.ch.srv.InvalidateAllCaches() }, InvalidateAllCaches: func() { a.ch.srv.InvalidateAllCaches() },
MaxPostSize: func() int { return a.ch.srv.platform.MaxPostSize() }, MaxPostSize: func() int { return a.ch.srv.platform.MaxPostSize() },
PrepareImage: func(fileData []byte) (image.Image, string, func(), error) { PrepareImage: func(fileData []byte) (image.Image, string, func(), error) {
img, imgType, release, err := prepareImage(a.ch.imgDecoder, bytes.NewReader(fileData)) img, imgType, release, err := prepareImage(c, a.ch.imgDecoder, bytes.NewReader(fileData))
if err != nil { if err != nil {
return nil, "", nil, err return nil, "", nil, err
} }

View File

@ -91,7 +91,7 @@ func (a *App) runPluginsHook(c request.CTX, info *model.FileInfo, file io.Reader
written, err := a.WriteFile(r, tmpPath) written, err := a.WriteFile(r, tmpPath)
if err != nil { if err != nil {
if fileErr := a.RemoveFile(tmpPath); fileErr != nil { if fileErr := a.RemoveFile(tmpPath); fileErr != nil {
mlog.Warn("Failed to remove file", mlog.Err(fileErr)) c.Logger().Warn("Failed to remove file", mlog.Err(fileErr))
} }
r.CloseWithError(err) // always returns nil r.CloseWithError(err) // always returns nil
return err return err
@ -99,10 +99,10 @@ func (a *App) runPluginsHook(c request.CTX, info *model.FileInfo, file io.Reader
if err = <-errChan; err != nil { if err = <-errChan; err != nil {
if fileErr := a.RemoveFile(info.Path); fileErr != nil { if fileErr := a.RemoveFile(info.Path); fileErr != nil {
mlog.Warn("Failed to remove file", mlog.Err(fileErr)) c.Logger().Warn("Failed to remove file", mlog.Err(fileErr))
} }
if fileErr := a.RemoveFile(tmpPath); fileErr != nil { if fileErr := a.RemoveFile(tmpPath); fileErr != nil {
mlog.Warn("Failed to remove file", mlog.Err(fileErr)) c.Logger().Warn("Failed to remove file", mlog.Err(fileErr))
} }
return err return err
} }
@ -296,7 +296,7 @@ func (a *App) UploadData(c request.CTX, us *model.UploadSession, rd io.Reader) (
if fileErr != nil { if fileErr != nil {
return nil, fileErr return nil, fileErr
} }
a.HandleImages([]string{info.PreviewPath}, []string{info.ThumbnailPath}, [][]byte{imgData}) a.HandleImages(c, []string{info.PreviewPath}, []string{info.ThumbnailPath}, [][]byte{imgData})
} }
if us.Type == model.UploadTypeImport { if us.Type == model.UploadTypeImport {

View File

@ -1105,7 +1105,7 @@ func TestPermanentDeleteUser(t *testing.T) {
require.False(t, res, "File was not deleted on FS. err=%v", err) require.False(t, res, "File was not deleted on FS. err=%v", err)
finfo, err = th.App.GetFileInfo(finfo.Id) finfo, err = th.App.GetFileInfo(th.Context, finfo.Id)
require.Nil(t, finfo, "Unable to find finfo. err=%v", err) require.Nil(t, finfo, "Unable to find finfo. err=%v", err)

View File

@ -91,8 +91,8 @@ type Actions struct {
CreateGroupChannel func(request.CTX, []string) (*model.Channel, *model.AppError) CreateGroupChannel func(request.CTX, []string) (*model.Channel, *model.AppError)
CreateChannel func(*model.Channel, bool) (*model.Channel, *model.AppError) CreateChannel func(*model.Channel, bool) (*model.Channel, *model.AppError)
DoUploadFile func(time.Time, string, string, string, string, []byte) (*model.FileInfo, *model.AppError) DoUploadFile func(time.Time, string, string, string, string, []byte) (*model.FileInfo, *model.AppError)
GenerateThumbnailImage func(image.Image, string, string) GenerateThumbnailImage func(request.CTX, image.Image, string, string)
GeneratePreviewImage func(image.Image, string, string) GeneratePreviewImage func(request.CTX, image.Image, string, string)
InvalidateAllCaches func() InvalidateAllCaches func()
MaxPostSize func() int MaxPostSize func() int
PrepareImage func(fileData []byte) (image.Image, string, func(), error) PrepareImage func(fileData []byte) (image.Image, string, func(), error)
@ -115,7 +115,7 @@ func New(store store.Store, actions Actions, config *model.Config) *SlackImporte
} }
} }
func (si *SlackImporter) SlackImport(c request.CTX, fileData multipart.File, fileSize int64, teamID string) (*model.AppError, *bytes.Buffer) { func (si *SlackImporter) SlackImport(rctx request.CTX, fileData multipart.File, fileSize int64, teamID string) (*model.AppError, *bytes.Buffer) {
// Create log file // Create log file
log := bytes.NewBufferString(i18n.T("api.slackimport.slack_import.log")) log := bytes.NewBufferString(i18n.T("api.slackimport.slack_import.log"))
@ -199,13 +199,13 @@ func (si *SlackImporter) SlackImport(c request.CTX, fileData multipart.File, fil
posts = slackConvertChannelMentions(channels, posts) posts = slackConvertChannelMentions(channels, posts)
posts = slackConvertPostsMarkup(posts) posts = slackConvertPostsMarkup(posts)
addedUsers := si.slackAddUsers(teamID, users, log) addedUsers := si.slackAddUsers(rctx, teamID, users, log)
botUser := si.slackAddBotUser(teamID, log) botUser := si.slackAddBotUser(rctx, teamID, log)
si.slackAddChannels(c, teamID, channels, posts, addedUsers, uploads, botUser, log) si.slackAddChannels(rctx, teamID, channels, posts, addedUsers, uploads, botUser, log)
if botUser != nil { if botUser != nil {
si.deactivateSlackBotUser(botUser) si.deactivateSlackBotUser(rctx, botUser)
} }
si.actions.InvalidateAllCaches() si.actions.InvalidateAllCaches()
@ -228,7 +228,7 @@ func truncateRunes(s string, i int) string {
return s return s
} }
func (si *SlackImporter) slackAddUsers(teamId string, slackusers []slackUser, importerLog *bytes.Buffer) map[string]*model.User { func (si *SlackImporter) slackAddUsers(rctx request.CTX, teamId string, slackusers []slackUser, importerLog *bytes.Buffer) map[string]*model.User {
// Log header // Log header
importerLog.WriteString(i18n.T("api.slackimport.slack_add_users.created")) importerLog.WriteString(i18n.T("api.slackimport.slack_add_users.created"))
importerLog.WriteString("===============\r\n\r\n") importerLog.WriteString("===============\r\n\r\n")
@ -249,7 +249,7 @@ func (si *SlackImporter) slackAddUsers(teamId string, slackusers []slackUser, im
if email == "" { if email == "" {
email = sUser.Username + "@example.com" email = sUser.Username + "@example.com"
importerLog.WriteString(i18n.T("api.slackimport.slack_add_users.missing_email_address", map[string]any{"Email": email, "Username": sUser.Username})) importerLog.WriteString(i18n.T("api.slackimport.slack_add_users.missing_email_address", map[string]any{"Email": email, "Username": sUser.Username}))
mlog.Warn("Slack Import: User does not have an email address in the Slack export. Used username as a placeholder. The user should update their email address once logged in to the system.", mlog.String("user_email", email), mlog.String("user_name", sUser.Username)) rctx.Logger().Warn("Slack Import: User does not have an email address in the Slack export. Used username as a placeholder. The user should update their email address once logged in to the system.", mlog.String("user_email", email), mlog.String("user_name", sUser.Username))
} }
password := model.NewId() password := model.NewId()
@ -274,7 +274,7 @@ func (si *SlackImporter) slackAddUsers(teamId string, slackusers []slackUser, im
Password: password, Password: password,
} }
mUser := si.oldImportUser(team, &newUser) mUser := si.oldImportUser(rctx, team, &newUser)
if mUser == nil { if mUser == nil {
importerLog.WriteString(i18n.T("api.slackimport.slack_add_users.unable_import", map[string]any{"Username": sUser.Username})) importerLog.WriteString(i18n.T("api.slackimport.slack_add_users.unable_import", map[string]any{"Username": sUser.Username}))
continue continue
@ -286,7 +286,7 @@ func (si *SlackImporter) slackAddUsers(teamId string, slackusers []slackUser, im
return addedUsers return addedUsers
} }
func (si *SlackImporter) slackAddBotUser(teamId string, log *bytes.Buffer) *model.User { func (si *SlackImporter) slackAddBotUser(rctx request.CTX, teamId string, log *bytes.Buffer) *model.User {
team, err := si.store.Team().Get(teamId) team, err := si.store.Team().Get(teamId)
if err != nil { if err != nil {
log.WriteString(i18n.T("api.slackimport.slack_import.team_fail")) log.WriteString(i18n.T("api.slackimport.slack_import.team_fail"))
@ -305,7 +305,7 @@ func (si *SlackImporter) slackAddBotUser(teamId string, log *bytes.Buffer) *mode
Password: password, Password: password,
} }
mUser := si.oldImportUser(team, &botUser) mUser := si.oldImportUser(rctx, team, &botUser)
if mUser == nil { if mUser == nil {
log.WriteString(i18n.T("api.slackimport.slack_add_bot_user.unable_import", map[string]any{"Username": username})) log.WriteString(i18n.T("api.slackimport.slack_add_bot_user.unable_import", map[string]any{"Username": username}))
return nil return nil
@ -315,7 +315,7 @@ func (si *SlackImporter) slackAddBotUser(teamId string, log *bytes.Buffer) *mode
return mUser return mUser
} }
func (si *SlackImporter) slackAddPosts(teamId string, channel *model.Channel, posts []slackPost, users map[string]*model.User, uploads map[string]*zip.File, botUser *model.User) { func (si *SlackImporter) slackAddPosts(rctx request.CTX, teamId string, channel *model.Channel, posts []slackPost, users map[string]*model.User, uploads map[string]*zip.File, botUser *model.User) {
sort.Slice(posts, func(i, j int) bool { sort.Slice(posts, func(i, j int) bool {
return slackConvertTimeStamp(posts[i].TimeStamp) < slackConvertTimeStamp(posts[j].TimeStamp) return slackConvertTimeStamp(posts[i].TimeStamp) < slackConvertTimeStamp(posts[j].TimeStamp)
}) })
@ -324,11 +324,11 @@ func (si *SlackImporter) slackAddPosts(teamId string, channel *model.Channel, po
switch { switch {
case sPost.Type == "message" && (sPost.SubType == "" || sPost.SubType == "file_share"): case sPost.Type == "message" && (sPost.SubType == "" || sPost.SubType == "file_share"):
if sPost.User == "" { if sPost.User == "" {
mlog.Debug("Slack Import: Unable to import the message as the user field is missing.") rctx.Logger().Debug("Slack Import: Unable to import the message as the user field is missing.")
continue continue
} }
if users[sPost.User] == nil { if users[sPost.User] == nil {
mlog.Debug("Slack Import: Unable to add the message as the Slack user does not exist in Mattermost.", mlog.String("user", sPost.User)) rctx.Logger().Debug("Slack Import: Unable to add the message as the Slack user does not exist in Mattermost.", mlog.String("user", sPost.User))
continue continue
} }
newPost := model.Post{ newPost := model.Post{
@ -339,12 +339,12 @@ func (si *SlackImporter) slackAddPosts(teamId string, channel *model.Channel, po
} }
if sPost.Upload { if sPost.Upload {
if sPost.File != nil { if sPost.File != nil {
if fileInfo, ok := si.slackUploadFile(sPost.File, uploads, teamId, newPost.ChannelId, newPost.UserId, sPost.TimeStamp); ok { if fileInfo, ok := si.slackUploadFile(rctx, sPost.File, uploads, teamId, newPost.ChannelId, newPost.UserId, sPost.TimeStamp); ok {
newPost.FileIds = append(newPost.FileIds, fileInfo.Id) newPost.FileIds = append(newPost.FileIds, fileInfo.Id)
} }
} else if sPost.Files != nil { } else if sPost.Files != nil {
for _, file := range sPost.Files { for _, file := range sPost.Files {
if fileInfo, ok := si.slackUploadFile(file, uploads, teamId, newPost.ChannelId, newPost.UserId, sPost.TimeStamp); ok { if fileInfo, ok := si.slackUploadFile(rctx, file, uploads, teamId, newPost.ChannelId, newPost.UserId, sPost.TimeStamp); ok {
newPost.FileIds = append(newPost.FileIds, fileInfo.Id) newPost.FileIds = append(newPost.FileIds, fileInfo.Id)
} }
} }
@ -354,22 +354,22 @@ func (si *SlackImporter) slackAddPosts(teamId string, channel *model.Channel, po
if sPost.ThreadTS != "" && sPost.ThreadTS != sPost.TimeStamp { if sPost.ThreadTS != "" && sPost.ThreadTS != sPost.TimeStamp {
newPost.RootId = threads[sPost.ThreadTS] newPost.RootId = threads[sPost.ThreadTS]
} }
postId := si.oldImportPost(&newPost) postId := si.oldImportPost(rctx, &newPost)
// If post is thread starter // If post is thread starter
if sPost.ThreadTS == sPost.TimeStamp { if sPost.ThreadTS == sPost.TimeStamp {
threads[sPost.ThreadTS] = postId threads[sPost.ThreadTS] = postId
} }
case sPost.Type == "message" && sPost.SubType == "file_comment": case sPost.Type == "message" && sPost.SubType == "file_comment":
if sPost.Comment == nil { if sPost.Comment == nil {
mlog.Debug("Slack Import: Unable to import the message as it has no comments.") rctx.Logger().Debug("Slack Import: Unable to import the message as it has no comments.")
continue continue
} }
if sPost.Comment.User == "" { if sPost.Comment.User == "" {
mlog.Debug("Slack Import: Unable to import the message as the user field is missing.") rctx.Logger().Debug("Slack Import: Unable to import the message as the user field is missing.")
continue continue
} }
if users[sPost.Comment.User] == nil { if users[sPost.Comment.User] == nil {
mlog.Debug("Slack Import: Unable to add the message as the Slack user does not exist in Mattermost.", mlog.String("user", sPost.User)) rctx.Logger().Debug("Slack Import: Unable to add the message as the Slack user does not exist in Mattermost.", mlog.String("user", sPost.User))
continue continue
} }
newPost := model.Post{ newPost := model.Post{
@ -378,14 +378,14 @@ func (si *SlackImporter) slackAddPosts(teamId string, channel *model.Channel, po
Message: sPost.Comment.Comment, Message: sPost.Comment.Comment,
CreateAt: slackConvertTimeStamp(sPost.TimeStamp), CreateAt: slackConvertTimeStamp(sPost.TimeStamp),
} }
si.oldImportPost(&newPost) si.oldImportPost(rctx, &newPost)
case sPost.Type == "message" && sPost.SubType == "bot_message": case sPost.Type == "message" && sPost.SubType == "bot_message":
if botUser == nil { if botUser == nil {
mlog.Warn("Slack Import: Unable to import the bot message as the bot user does not exist.") rctx.Logger().Warn("Slack Import: Unable to import the bot message as the bot user does not exist.")
continue continue
} }
if sPost.BotId == "" { if sPost.BotId == "" {
mlog.Warn("Slack Import: Unable to import bot message as the BotId field is missing.") rctx.Logger().Warn("Slack Import: Unable to import bot message as the BotId field is missing.")
continue continue
} }
@ -403,18 +403,18 @@ func (si *SlackImporter) slackAddPosts(teamId string, channel *model.Channel, po
Type: model.PostTypeSlackAttachment, Type: model.PostTypeSlackAttachment,
} }
postId := si.oldImportIncomingWebhookPost(post, props) postId := si.oldImportIncomingWebhookPost(rctx, post, props)
// If post is thread starter // If post is thread starter
if sPost.ThreadTS == sPost.TimeStamp { if sPost.ThreadTS == sPost.TimeStamp {
threads[sPost.ThreadTS] = postId threads[sPost.ThreadTS] = postId
} }
case sPost.Type == "message" && (sPost.SubType == "channel_join" || sPost.SubType == "channel_leave"): case sPost.Type == "message" && (sPost.SubType == "channel_join" || sPost.SubType == "channel_leave"):
if sPost.User == "" { if sPost.User == "" {
mlog.Debug("Slack Import: Unable to import the message as the user field is missing.") rctx.Logger().Debug("Slack Import: Unable to import the message as the user field is missing.")
continue continue
} }
if users[sPost.User] == nil { if users[sPost.User] == nil {
mlog.Debug("Slack Import: Unable to add the message as the Slack user does not exist in Mattermost.", mlog.String("user", sPost.User)) rctx.Logger().Debug("Slack Import: Unable to add the message as the Slack user does not exist in Mattermost.", mlog.String("user", sPost.User))
continue continue
} }
@ -435,14 +435,14 @@ func (si *SlackImporter) slackAddPosts(teamId string, channel *model.Channel, po
"username": users[sPost.User].Username, "username": users[sPost.User].Username,
}, },
} }
si.oldImportPost(&newPost) si.oldImportPost(rctx, &newPost)
case sPost.Type == "message" && sPost.SubType == "me_message": case sPost.Type == "message" && sPost.SubType == "me_message":
if sPost.User == "" { if sPost.User == "" {
mlog.Debug("Slack Import: Unable to import the message as the user field is missing.") rctx.Logger().Debug("Slack Import: Unable to import the message as the user field is missing.")
continue continue
} }
if users[sPost.User] == nil { if users[sPost.User] == nil {
mlog.Debug("Slack Import: Unable to add the message as the Slack user does not exist in Mattermost.", mlog.String("user", sPost.User)) rctx.Logger().Debug("Slack Import: Unable to add the message as the Slack user does not exist in Mattermost.", mlog.String("user", sPost.User))
continue continue
} }
newPost := model.Post{ newPost := model.Post{
@ -451,18 +451,18 @@ func (si *SlackImporter) slackAddPosts(teamId string, channel *model.Channel, po
Message: "*" + sPost.Text + "*", Message: "*" + sPost.Text + "*",
CreateAt: slackConvertTimeStamp(sPost.TimeStamp), CreateAt: slackConvertTimeStamp(sPost.TimeStamp),
} }
postId := si.oldImportPost(&newPost) postId := si.oldImportPost(rctx, &newPost)
// If post is thread starter // If post is thread starter
if sPost.ThreadTS == sPost.TimeStamp { if sPost.ThreadTS == sPost.TimeStamp {
threads[sPost.ThreadTS] = postId threads[sPost.ThreadTS] = postId
} }
case sPost.Type == "message" && sPost.SubType == "channel_topic": case sPost.Type == "message" && sPost.SubType == "channel_topic":
if sPost.User == "" { if sPost.User == "" {
mlog.Debug("Slack Import: Unable to import the message as the user field is missing.") rctx.Logger().Debug("Slack Import: Unable to import the message as the user field is missing.")
continue continue
} }
if users[sPost.User] == nil { if users[sPost.User] == nil {
mlog.Debug("Slack Import: Unable to add the message as the Slack user does not exist in Mattermost.", mlog.String("user", sPost.User)) rctx.Logger().Debug("Slack Import: Unable to add the message as the Slack user does not exist in Mattermost.", mlog.String("user", sPost.User))
continue continue
} }
newPost := model.Post{ newPost := model.Post{
@ -472,14 +472,14 @@ func (si *SlackImporter) slackAddPosts(teamId string, channel *model.Channel, po
CreateAt: slackConvertTimeStamp(sPost.TimeStamp), CreateAt: slackConvertTimeStamp(sPost.TimeStamp),
Type: model.PostTypeHeaderChange, Type: model.PostTypeHeaderChange,
} }
si.oldImportPost(&newPost) si.oldImportPost(rctx, &newPost)
case sPost.Type == "message" && sPost.SubType == "channel_purpose": case sPost.Type == "message" && sPost.SubType == "channel_purpose":
if sPost.User == "" { if sPost.User == "" {
mlog.Debug("Slack Import: Unable to import the message as the user field is missing.") rctx.Logger().Debug("Slack Import: Unable to import the message as the user field is missing.")
continue continue
} }
if users[sPost.User] == nil { if users[sPost.User] == nil {
mlog.Debug("Slack Import: Unable to add the message as the Slack user does not exist in Mattermost.", mlog.String("user", sPost.User)) rctx.Logger().Debug("Slack Import: Unable to add the message as the Slack user does not exist in Mattermost.", mlog.String("user", sPost.User))
continue continue
} }
newPost := model.Post{ newPost := model.Post{
@ -489,14 +489,14 @@ func (si *SlackImporter) slackAddPosts(teamId string, channel *model.Channel, po
CreateAt: slackConvertTimeStamp(sPost.TimeStamp), CreateAt: slackConvertTimeStamp(sPost.TimeStamp),
Type: model.PostTypePurposeChange, Type: model.PostTypePurposeChange,
} }
si.oldImportPost(&newPost) si.oldImportPost(rctx, &newPost)
case sPost.Type == "message" && sPost.SubType == "channel_name": case sPost.Type == "message" && sPost.SubType == "channel_name":
if sPost.User == "" { if sPost.User == "" {
mlog.Debug("Slack Import: Unable to import the message as the user field is missing.") rctx.Logger().Debug("Slack Import: Unable to import the message as the user field is missing.")
continue continue
} }
if users[sPost.User] == nil { if users[sPost.User] == nil {
mlog.Debug("Slack Import: Unable to add the message as the Slack user does not exist in Mattermost.", mlog.String("user", sPost.User)) rctx.Logger().Debug("Slack Import: Unable to add the message as the Slack user does not exist in Mattermost.", mlog.String("user", sPost.User))
continue continue
} }
newPost := model.Post{ newPost := model.Post{
@ -506,9 +506,9 @@ func (si *SlackImporter) slackAddPosts(teamId string, channel *model.Channel, po
CreateAt: slackConvertTimeStamp(sPost.TimeStamp), CreateAt: slackConvertTimeStamp(sPost.TimeStamp),
Type: model.PostTypeDisplaynameChange, Type: model.PostTypeDisplaynameChange,
} }
si.oldImportPost(&newPost) si.oldImportPost(rctx, &newPost)
default: default:
mlog.Warn( rctx.Logger().Warn(
"Slack Import: Unable to import the message as its type is not supported", "Slack Import: Unable to import the message as its type is not supported",
mlog.String("post_type", sPost.Type), mlog.String("post_type", sPost.Type),
mlog.String("post_subtype", sPost.SubType), mlog.String("post_subtype", sPost.SubType),
@ -517,77 +517,77 @@ func (si *SlackImporter) slackAddPosts(teamId string, channel *model.Channel, po
} }
} }
func (si *SlackImporter) slackUploadFile(slackPostFile *slackFile, uploads map[string]*zip.File, teamId string, channelId string, userId string, slackTimestamp string) (*model.FileInfo, bool) { func (si *SlackImporter) slackUploadFile(rctx request.CTX, slackPostFile *slackFile, uploads map[string]*zip.File, teamId string, channelId string, userId string, slackTimestamp string) (*model.FileInfo, bool) {
if slackPostFile == nil { if slackPostFile == nil {
mlog.Warn("Slack Import: Unable to attach the file to the post as the latter has no file section present in Slack export.") rctx.Logger().Warn("Slack Import: Unable to attach the file to the post as the latter has no file section present in Slack export.")
return nil, false return nil, false
} }
file, ok := uploads[slackPostFile.Id] file, ok := uploads[slackPostFile.Id]
if !ok { if !ok {
mlog.Warn("Slack Import: Unable to import file as the file is missing from the Slack export zip file.", mlog.String("file_id", slackPostFile.Id)) rctx.Logger().Warn("Slack Import: Unable to import file as the file is missing from the Slack export zip file.", mlog.String("file_id", slackPostFile.Id))
return nil, false return nil, false
} }
openFile, err := file.Open() openFile, err := file.Open()
if err != nil { if err != nil {
mlog.Warn("Slack Import: Unable to open the file from the Slack export.", mlog.String("file_id", slackPostFile.Id), mlog.Err(err)) rctx.Logger().Warn("Slack Import: Unable to open the file from the Slack export.", mlog.String("file_id", slackPostFile.Id), mlog.Err(err))
return nil, false return nil, false
} }
defer openFile.Close() defer openFile.Close()
timestamp := utils.TimeFromMillis(slackConvertTimeStamp(slackTimestamp)) timestamp := utils.TimeFromMillis(slackConvertTimeStamp(slackTimestamp))
uploadedFile, err := si.oldImportFile(timestamp, openFile, teamId, channelId, userId, filepath.Base(file.Name)) uploadedFile, err := si.oldImportFile(rctx, timestamp, openFile, teamId, channelId, userId, filepath.Base(file.Name))
if err != nil { if err != nil {
mlog.Warn("Slack Import: An error occurred when uploading file.", mlog.String("file_id", slackPostFile.Id), mlog.Err(err)) rctx.Logger().Warn("Slack Import: An error occurred when uploading file.", mlog.String("file_id", slackPostFile.Id), mlog.Err(err))
return nil, false return nil, false
} }
return uploadedFile, true return uploadedFile, true
} }
func (si *SlackImporter) deactivateSlackBotUser(user *model.User) { func (si *SlackImporter) deactivateSlackBotUser(rctx request.CTX, user *model.User) {
if _, err := si.actions.UpdateActive(user, false); err != nil { if _, err := si.actions.UpdateActive(user, false); err != nil {
mlog.Warn("Slack Import: Unable to deactivate the user account used for the bot.") rctx.Logger().Warn("Slack Import: Unable to deactivate the user account used for the bot.")
} }
} }
func (si *SlackImporter) addSlackUsersToChannel(c request.CTX, members []string, users map[string]*model.User, channel *model.Channel, log *bytes.Buffer) { func (si *SlackImporter) addSlackUsersToChannel(rctx request.CTX, members []string, users map[string]*model.User, channel *model.Channel, log *bytes.Buffer) {
for _, member := range members { for _, member := range members {
user, ok := users[member] user, ok := users[member]
if !ok { if !ok {
log.WriteString(i18n.T("api.slackimport.slack_add_channels.failed_to_add_user", map[string]any{"Username": "?"})) log.WriteString(i18n.T("api.slackimport.slack_add_channels.failed_to_add_user", map[string]any{"Username": "?"}))
continue continue
} }
if _, err := si.actions.AddUserToChannel(c, user, channel, false); err != nil { if _, err := si.actions.AddUserToChannel(rctx, user, channel, false); err != nil {
log.WriteString(i18n.T("api.slackimport.slack_add_channels.failed_to_add_user", map[string]any{"Username": user.Username})) log.WriteString(i18n.T("api.slackimport.slack_add_channels.failed_to_add_user", map[string]any{"Username": user.Username}))
} }
} }
} }
func slackSanitiseChannelProperties(channel model.Channel) model.Channel { func slackSanitiseChannelProperties(rctx request.CTX, channel model.Channel) model.Channel {
if utf8.RuneCountInString(channel.DisplayName) > model.ChannelDisplayNameMaxRunes { if utf8.RuneCountInString(channel.DisplayName) > model.ChannelDisplayNameMaxRunes {
mlog.Warn("Slack Import: Channel display name exceeds the maximum length. It will be truncated when imported.", mlog.String("channel_display_name", channel.DisplayName)) rctx.Logger().Warn("Slack Import: Channel display name exceeds the maximum length. It will be truncated when imported.", mlog.String("channel_display_name", channel.DisplayName))
channel.DisplayName = truncateRunes(channel.DisplayName, model.ChannelDisplayNameMaxRunes) channel.DisplayName = truncateRunes(channel.DisplayName, model.ChannelDisplayNameMaxRunes)
} }
if len(channel.Name) > model.ChannelNameMaxLength { if len(channel.Name) > model.ChannelNameMaxLength {
mlog.Warn("Slack Import: Channel handle exceeds the maximum length. It will be truncated when imported.", mlog.String("channel_display_name", channel.DisplayName)) rctx.Logger().Warn("Slack Import: Channel handle exceeds the maximum length. It will be truncated when imported.", mlog.String("channel_display_name", channel.DisplayName))
channel.Name = channel.Name[0:model.ChannelNameMaxLength] channel.Name = channel.Name[0:model.ChannelNameMaxLength]
} }
if utf8.RuneCountInString(channel.Purpose) > model.ChannelPurposeMaxRunes { if utf8.RuneCountInString(channel.Purpose) > model.ChannelPurposeMaxRunes {
mlog.Warn("Slack Import: Channel purpose exceeds the maximum length. It will be truncated when imported.", mlog.String("channel_display_name", channel.DisplayName)) rctx.Logger().Warn("Slack Import: Channel purpose exceeds the maximum length. It will be truncated when imported.", mlog.String("channel_display_name", channel.DisplayName))
channel.Purpose = truncateRunes(channel.Purpose, model.ChannelPurposeMaxRunes) channel.Purpose = truncateRunes(channel.Purpose, model.ChannelPurposeMaxRunes)
} }
if utf8.RuneCountInString(channel.Header) > model.ChannelHeaderMaxRunes { if utf8.RuneCountInString(channel.Header) > model.ChannelHeaderMaxRunes {
mlog.Warn("Slack Import: Channel header exceeds the maximum length. It will be truncated when imported.", mlog.String("channel_display_name", channel.DisplayName)) rctx.Logger().Warn("Slack Import: Channel header exceeds the maximum length. It will be truncated when imported.", mlog.String("channel_display_name", channel.DisplayName))
channel.Header = truncateRunes(channel.Header, model.ChannelHeaderMaxRunes) channel.Header = truncateRunes(channel.Header, model.ChannelHeaderMaxRunes)
} }
return channel return channel
} }
func (si *SlackImporter) slackAddChannels(c request.CTX, teamId string, slackchannels []slackChannel, posts map[string][]slackPost, users map[string]*model.User, uploads map[string]*zip.File, botUser *model.User, importerLog *bytes.Buffer) map[string]*model.Channel { func (si *SlackImporter) slackAddChannels(rctx request.CTX, teamId string, slackchannels []slackChannel, posts map[string][]slackPost, users map[string]*model.User, uploads map[string]*zip.File, botUser *model.User, importerLog *bytes.Buffer) map[string]*model.Channel {
// Write Header // Write Header
importerLog.WriteString(i18n.T("api.slackimport.slack_add_channels.added")) importerLog.WriteString(i18n.T("api.slackimport.slack_add_channels.added"))
importerLog.WriteString("=================\r\n\r\n") importerLog.WriteString("=================\r\n\r\n")
@ -608,7 +608,7 @@ func (si *SlackImporter) slackAddChannels(c request.CTX, teamId string, slackcha
sChannel.Name = sChannel.Id sChannel.Name = sChannel.Id
} }
newChannel = slackSanitiseChannelProperties(newChannel) newChannel = slackSanitiseChannelProperties(rctx, newChannel)
var mChannel *model.Channel var mChannel *model.Channel
var err error var err error
@ -618,14 +618,14 @@ func (si *SlackImporter) slackAddChannels(c request.CTX, teamId string, slackcha
} else if _, nErr := si.store.Channel().GetDeletedByName(teamId, sChannel.Name); nErr == nil { } else if _, nErr := si.store.Channel().GetDeletedByName(teamId, sChannel.Name); nErr == nil {
// The channel already exists but has been deleted. Generate a random string for the handle instead. // The channel already exists but has been deleted. Generate a random string for the handle instead.
newChannel.Name = model.NewId() newChannel.Name = model.NewId()
newChannel = slackSanitiseChannelProperties(newChannel) newChannel = slackSanitiseChannelProperties(rctx, newChannel)
} }
if mChannel == nil { if mChannel == nil {
// Haven't found an existing channel to merge with. Try importing it as a new one. // Haven't found an existing channel to merge with. Try importing it as a new one.
mChannel = si.oldImportChannel(c, &newChannel, sChannel, users) mChannel = si.oldImportChannel(rctx, &newChannel, sChannel, users)
if mChannel == nil { if mChannel == nil {
mlog.Warn("Slack Import: Unable to import Slack channel.", mlog.String("channel_display_name", newChannel.DisplayName)) rctx.Logger().Warn("Slack Import: Unable to import Slack channel.", mlog.String("channel_display_name", newChannel.DisplayName))
importerLog.WriteString(i18n.T("api.slackimport.slack_add_channels.import_failed", map[string]any{"DisplayName": newChannel.DisplayName})) importerLog.WriteString(i18n.T("api.slackimport.slack_add_channels.import_failed", map[string]any{"DisplayName": newChannel.DisplayName}))
continue continue
} }
@ -633,11 +633,11 @@ func (si *SlackImporter) slackAddChannels(c request.CTX, teamId string, slackcha
// Members for direct and group channels are added during the creation of the channel in the oldImportChannel function // Members for direct and group channels are added during the creation of the channel in the oldImportChannel function
if sChannel.Type == model.ChannelTypeOpen || sChannel.Type == model.ChannelTypePrivate { if sChannel.Type == model.ChannelTypeOpen || sChannel.Type == model.ChannelTypePrivate {
si.addSlackUsersToChannel(c, sChannel.Members, users, mChannel, importerLog) si.addSlackUsersToChannel(rctx, sChannel.Members, users, mChannel, importerLog)
} }
importerLog.WriteString(newChannel.DisplayName + "\r\n") importerLog.WriteString(newChannel.DisplayName + "\r\n")
addedChannels[sChannel.Id] = mChannel addedChannels[sChannel.Id] = mChannel
si.slackAddPosts(teamId, mChannel, posts[sChannel.Name], users, uploads, botUser) si.slackAddPosts(rctx, teamId, mChannel, posts[sChannel.Name], users, uploads, botUser)
} }
return addedChannels return addedChannels
@ -649,7 +649,7 @@ func (si *SlackImporter) slackAddChannels(c request.CTX, teamId string, slackcha
// some of the usual checks. (IsValid is still run) // some of the usual checks. (IsValid is still run)
// //
func (si *SlackImporter) oldImportPost(post *model.Post) string { func (si *SlackImporter) oldImportPost(rctx request.CTX, post *model.Post) string {
// Workaround for empty messages, which may be the case if they are webhook posts. // Workaround for empty messages, which may be the case if they are webhook posts.
firstIteration := true firstIteration := true
firstPostId := "" firstPostId := ""
@ -672,7 +672,7 @@ func (si *SlackImporter) oldImportPost(post *model.Post) string {
_, err := si.store.Post().Save(post) _, err := si.store.Post().Save(post)
if err != nil { if err != nil {
mlog.Debug("Error saving post.", mlog.String("user_id", post.UserId), mlog.String("message", post.Message)) rctx.Logger().Debug("Error saving post.", mlog.String("user_id", post.UserId), mlog.String("message", post.Message))
} }
if firstIteration { if firstIteration {
@ -681,7 +681,7 @@ func (si *SlackImporter) oldImportPost(post *model.Post) string {
} }
for _, fileId := range post.FileIds { for _, fileId := range post.FileIds {
if err := si.store.FileInfo().AttachToPost(fileId, post.Id, post.ChannelId, post.UserId); err != nil { if err := si.store.FileInfo().AttachToPost(fileId, post.Id, post.ChannelId, post.UserId); err != nil {
mlog.Error( rctx.Logger().Error(
"Error attaching files to post.", "Error attaching files to post.",
mlog.String("post_id", post.Id), mlog.String("post_id", post.Id),
mlog.String("file_ids", strings.Join(post.FileIds, ",")), mlog.String("file_ids", strings.Join(post.FileIds, ",")),
@ -701,29 +701,29 @@ func (si *SlackImporter) oldImportPost(post *model.Post) string {
return firstPostId return firstPostId
} }
func (si *SlackImporter) oldImportUser(team *model.Team, user *model.User) *model.User { func (si *SlackImporter) oldImportUser(rctx request.CTX, team *model.Team, user *model.User) *model.User {
user.MakeNonNil() user.MakeNonNil()
user.Roles = model.SystemUserRoleId user.Roles = model.SystemUserRoleId
ruser, nErr := si.store.User().Save(user) ruser, nErr := si.store.User().Save(user)
if nErr != nil { if nErr != nil {
mlog.Debug("Error saving user.", mlog.Err(nErr)) rctx.Logger().Debug("Error saving user.", mlog.Err(nErr))
return nil return nil
} }
if _, err := si.store.User().VerifyEmail(ruser.Id, ruser.Email); err != nil { if _, err := si.store.User().VerifyEmail(ruser.Id, ruser.Email); err != nil {
mlog.Warn("Failed to set email verified.", mlog.Err(err)) rctx.Logger().Warn("Failed to set email verified.", mlog.Err(err))
} }
if _, err := si.actions.JoinUserToTeam(team, user, ""); err != nil { if _, err := si.actions.JoinUserToTeam(team, user, ""); err != nil {
mlog.Warn("Failed to join team when importing.", mlog.Err(err)) rctx.Logger().Warn("Failed to join team when importing.", mlog.Err(err))
} }
return ruser return ruser
} }
func (si *SlackImporter) oldImportChannel(c request.CTX, channel *model.Channel, sChannel slackChannel, users map[string]*model.User) *model.Channel { func (si *SlackImporter) oldImportChannel(rctx request.CTX, channel *model.Channel, sChannel slackChannel, users map[string]*model.User) *model.Channel {
switch { switch {
case channel.Type == model.ChannelTypeDirect: case channel.Type == model.ChannelTypeDirect:
if len(sChannel.Members) < 2 { if len(sChannel.Members) < 2 {
@ -732,10 +732,10 @@ func (si *SlackImporter) oldImportChannel(c request.CTX, channel *model.Channel,
u1 := users[sChannel.Members[0]] u1 := users[sChannel.Members[0]]
u2 := users[sChannel.Members[1]] u2 := users[sChannel.Members[1]]
if u1 == nil || u2 == nil { if u1 == nil || u2 == nil {
mlog.Warn("Either or both of user ids not found in users.json. Ignoring.", mlog.String("id1", sChannel.Members[0]), mlog.String("id2", sChannel.Members[1])) rctx.Logger().Warn("Either or both of user ids not found in users.json. Ignoring.", mlog.String("id1", sChannel.Members[0]), mlog.String("id2", sChannel.Members[1]))
return nil return nil
} }
sc, err := si.actions.CreateDirectChannel(c, u1.Id, u2.Id) sc, err := si.actions.CreateDirectChannel(rctx, u1.Id, u2.Id)
if err != nil { if err != nil {
return nil return nil
} }
@ -748,7 +748,7 @@ func (si *SlackImporter) oldImportChannel(c request.CTX, channel *model.Channel,
for i := range sChannel.Members { for i := range sChannel.Members {
u := users[sChannel.Members[i]] u := users[sChannel.Members[i]]
if u == nil { if u == nil {
mlog.Warn("User not found in users.json. Ignoring.", mlog.String("id", sChannel.Members[i])) rctx.Logger().Warn("User not found in users.json. Ignoring.", mlog.String("id", sChannel.Members[i]))
continue continue
} }
members[i] = u.Id members[i] = u.Id
@ -758,7 +758,7 @@ func (si *SlackImporter) oldImportChannel(c request.CTX, channel *model.Channel,
if creator == nil { if creator == nil {
return nil return nil
} }
sc, err := si.actions.CreateGroupChannel(c, members) sc, err := si.actions.CreateGroupChannel(rctx, members)
if err != nil { if err != nil {
return nil return nil
} }
@ -782,7 +782,7 @@ func (si *SlackImporter) oldImportChannel(c request.CTX, channel *model.Channel,
return sc return sc
} }
func (si *SlackImporter) oldImportFile(timestamp time.Time, file io.Reader, teamId string, channelId string, userId string, fileName string) (*model.FileInfo, error) { func (si *SlackImporter) oldImportFile(rctx request.CTX, timestamp time.Time, file io.Reader, teamId string, channelId string, userId string, fileName string) (*model.FileInfo, error) {
buf := bytes.NewBuffer(nil) buf := bytes.NewBuffer(nil)
io.Copy(buf, file) io.Copy(buf, file)
data := buf.Bytes() data := buf.Bytes()
@ -798,14 +798,14 @@ func (si *SlackImporter) oldImportFile(timestamp time.Time, file io.Reader, team
return nil, err return nil, err
} }
defer release() defer release()
si.actions.GenerateThumbnailImage(img, imgType, fileInfo.ThumbnailPath) si.actions.GenerateThumbnailImage(rctx, img, imgType, fileInfo.ThumbnailPath)
si.actions.GeneratePreviewImage(img, imgType, fileInfo.PreviewPath) si.actions.GeneratePreviewImage(rctx, img, imgType, fileInfo.PreviewPath)
} }
return fileInfo, nil return fileInfo, nil
} }
func (si *SlackImporter) oldImportIncomingWebhookPost(post *model.Post, props model.StringInterface) string { func (si *SlackImporter) oldImportIncomingWebhookPost(rctx request.CTX, post *model.Post, props model.StringInterface) string {
linkWithTextRegex := regexp.MustCompile(`<([^<\|]+)\|([^>]+)>`) linkWithTextRegex := regexp.MustCompile(`<([^<\|]+)\|([^>]+)>`)
post.Message = linkWithTextRegex.ReplaceAllString(post.Message, "[${2}](${1})") post.Message = linkWithTextRegex.ReplaceAllString(post.Message, "[${2}](${1})")
@ -827,5 +827,5 @@ func (si *SlackImporter) oldImportIncomingWebhookPost(post *model.Post, props mo
} }
} }
return si.oldImportPost(post) return si.oldImportPost(rctx, post)
} }

View File

@ -196,6 +196,8 @@ func TestSlackParseMultipleAttachments(t *testing.T) {
} }
func TestSlackSanitiseChannelProperties(t *testing.T) { func TestSlackSanitiseChannelProperties(t *testing.T) {
rctx := request.TestContext(t)
c1 := model.Channel{ c1 := model.Channel{
DisplayName: "display-name", DisplayName: "display-name",
Name: "name", Name: "name",
@ -203,7 +205,7 @@ func TestSlackSanitiseChannelProperties(t *testing.T) {
Header: "The channel header", Header: "The channel header",
} }
c1s := slackSanitiseChannelProperties(c1) c1s := slackSanitiseChannelProperties(rctx, c1)
assert.Equal(t, c1, c1s) assert.Equal(t, c1, c1s)
c2 := model.Channel{ c2 := model.Channel{
@ -213,7 +215,7 @@ func TestSlackSanitiseChannelProperties(t *testing.T) {
Header: strings.Repeat("0123456789", 120), Header: strings.Repeat("0123456789", 120),
} }
c2s := slackSanitiseChannelProperties(c2) c2s := slackSanitiseChannelProperties(rctx, c2)
assert.Equal(t, model.Channel{ assert.Equal(t, model.Channel{
DisplayName: strings.Repeat("abcdefghij", 6) + "abcd", DisplayName: strings.Repeat("abcdefghij", 6) + "abcd",
Name: strings.Repeat("abcdefghij", 6) + "abcd", Name: strings.Repeat("abcdefghij", 6) + "abcd",
@ -338,7 +340,7 @@ func TestOldImportChannel(t *testing.T) {
store := &mocks.Store{} store := &mocks.Store{}
config := &model.Config{} config := &model.Config{}
config.SetDefaults() config.SetDefaults()
ctx := request.TestContext(t) rctx := request.TestContext(t)
t.Run("No panic on direct channel", func(t *testing.T) { t.Run("No panic on direct channel", func(t *testing.T) {
// ch := th.CreateDmChannel(u1) // ch := th.CreateDmChannel(u1)
@ -358,7 +360,7 @@ func TestOldImportChannel(t *testing.T) {
actions := Actions{} actions := Actions{}
importer := New(store, actions, config) importer := New(store, actions, config)
_ = importer.oldImportChannel(ctx, ch, sCh, users) _ = importer.oldImportChannel(rctx, ch, sCh, users)
}) })
t.Run("No panic on direct channel with 1 member", func(t *testing.T) { t.Run("No panic on direct channel with 1 member", func(t *testing.T) {
@ -378,7 +380,7 @@ func TestOldImportChannel(t *testing.T) {
actions := Actions{} actions := Actions{}
importer := New(store, actions, config) importer := New(store, actions, config)
_ = importer.oldImportChannel(ctx, ch, sCh, users) _ = importer.oldImportChannel(rctx, ch, sCh, users)
}) })
t.Run("No panic on group channel", func(t *testing.T) { t.Run("No panic on group channel", func(t *testing.T) {
@ -397,6 +399,6 @@ func TestOldImportChannel(t *testing.T) {
actions := Actions{} actions := Actions{}
importer := New(store, actions, config) importer := New(store, actions, config)
_ = importer.oldImportChannel(ctx, ch, sCh, users) _ = importer.oldImportChannel(rctx, ch, sCh, users)
}) })
} }