[MM-26812] Add support for resumable file uploads (#15252)

* Implement AppendFile for FileBackend

* Split test into subtests

* [MM-26812] Add support for resumable file uploads (#15252)

* Implement UploadSession

* Implement UploadSessionStore

* Add error strings

* Implement resumable file uploads

* Add UploadType

* Fix retry layer tests

* Regenerate store layers

* Fix store error handling

* Use base for filename

* Prevent concurrent uploads on the same upload session

* Fix erroneus error string

* Improve error handling

Co-authored-by: Mattermod <mattermod@users.noreply.github.com>

* Fix translations

Co-authored-by: Mattermod <mattermod@users.noreply.github.com>
This commit is contained in:
Claudio Costa
2020-09-15 21:28:25 +02:00
committed by GitHub
parent 6a58834f34
commit 9c272f0b20
40 changed files with 2523 additions and 18 deletions

View File

@@ -57,6 +57,9 @@ type Routes struct {
Files *mux.Router // 'api/v4/files'
File *mux.Router // 'api/v4/files/{file_id:[A-Za-z0-9]+}'
Uploads *mux.Router // 'api/v4/uploads'
Upload *mux.Router // 'api/v4/uploads/{upload_id:[A-Za-z0-9]+}'
Plugins *mux.Router // 'api/v4/plugins'
Plugin *mux.Router // 'api/v4/plugins/{plugin_id:[A-Za-z0-9\\_\\-\\.]+}'
@@ -173,6 +176,9 @@ func Init(configservice configservice.ConfigService, globalOptionsFunc app.AppOp
api.BaseRoutes.File = api.BaseRoutes.Files.PathPrefix("/{file_id:[A-Za-z0-9]+}").Subrouter()
api.BaseRoutes.PublicFile = api.BaseRoutes.Root.PathPrefix("/files/{file_id:[A-Za-z0-9]+}/public").Subrouter()
api.BaseRoutes.Uploads = api.BaseRoutes.ApiRoot.PathPrefix("/uploads").Subrouter()
api.BaseRoutes.Upload = api.BaseRoutes.Uploads.PathPrefix("/{upload_id:[A-Za-z0-9]+}").Subrouter()
api.BaseRoutes.Plugins = api.BaseRoutes.ApiRoot.PathPrefix("/plugins").Subrouter()
api.BaseRoutes.Plugin = api.BaseRoutes.Plugins.PathPrefix("/{plugin_id:[A-Za-z0-9\\_\\-\\.]+}").Subrouter()
@@ -227,6 +233,7 @@ func Init(configservice configservice.ConfigService, globalOptionsFunc app.AppOp
api.InitChannel()
api.InitPost()
api.InitFile()
api.InitUpload()
api.InitSystem()
api.InitLicense()
api.InitConfig()

123
api4/upload.go Normal file
View File

@@ -0,0 +1,123 @@
// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved.
// See LICENSE.txt for license information.
package api4
import (
"net/http"
"github.com/mattermost/mattermost-server/v5/audit"
"github.com/mattermost/mattermost-server/v5/model"
)
func (api *API) InitUpload() {
api.BaseRoutes.Uploads.Handle("", api.ApiSessionRequired(createUpload)).Methods("POST")
api.BaseRoutes.Upload.Handle("", api.ApiSessionRequired(getUpload)).Methods("GET")
api.BaseRoutes.Upload.Handle("", api.ApiSessionRequired(uploadData)).Methods("POST")
}
func createUpload(c *Context, w http.ResponseWriter, r *http.Request) {
if !*c.App.Config().FileSettings.EnableFileAttachments {
c.Err = model.NewAppError("createUpload",
"api.file.attachments.disabled.app_error",
nil, "", http.StatusNotImplemented)
return
}
us := model.UploadSessionFromJson(r.Body)
if us == nil {
c.SetInvalidParam("upload")
return
}
auditRec := c.MakeAuditRecord("createUpload", audit.Fail)
defer c.LogAuditRec(auditRec)
auditRec.AddMeta("upload", us)
if !c.App.SessionHasPermissionToChannel(*c.App.Session(), us.ChannelId, model.PERMISSION_UPLOAD_FILE) {
c.SetPermissionError(model.PERMISSION_UPLOAD_FILE)
return
}
us.Id = model.NewId()
us.Type = model.UploadTypeAttachment
us.UserId = c.App.Session().UserId
us, err := c.App.CreateUploadSession(us)
if err != nil {
c.Err = err
return
}
auditRec.Success()
w.WriteHeader(http.StatusCreated)
w.Write([]byte(us.ToJson()))
}
func getUpload(c *Context, w http.ResponseWriter, r *http.Request) {
c.RequireUploadId()
if c.Err != nil {
return
}
us, err := c.App.GetUploadSession(c.Params.UploadId)
if err != nil {
c.Err = err
return
}
if us.UserId != c.App.Session().UserId {
c.Err = model.NewAppError("getUpload", "api.upload.get_upload.forbidden.app_error", nil, "", http.StatusForbidden)
return
}
w.Write([]byte(us.ToJson()))
}
func uploadData(c *Context, w http.ResponseWriter, r *http.Request) {
if !*c.App.Config().FileSettings.EnableFileAttachments {
c.Err = model.NewAppError("uploadData", "api.file.attachments.disabled.app_error",
nil, "", http.StatusNotImplemented)
return
}
c.RequireUploadId()
if c.Err != nil {
return
}
auditRec := c.MakeAuditRecord("uploadData", audit.Fail)
defer c.LogAuditRec(auditRec)
auditRec.AddMeta("upload_id", c.Params.UploadId)
us, err := c.App.GetUploadSession(c.Params.UploadId)
if err != nil {
c.Err = err
return
}
if r.ContentLength > (us.FileSize - us.FileOffset) {
c.Err = model.NewAppError("uploadData", "api.upload.upload_data.invalid_content_length",
nil, "", http.StatusBadRequest)
return
}
if us.UserId != c.App.Session().UserId || !c.App.SessionHasPermissionToChannel(*c.App.Session(), us.ChannelId, model.PERMISSION_UPLOAD_FILE) {
c.SetPermissionError(model.PERMISSION_UPLOAD_FILE)
return
}
info, err := c.App.UploadData(us, r.Body)
if err != nil {
c.Err = err
return
}
auditRec.Success()
if info == nil {
w.WriteHeader(http.StatusNoContent)
return
}
w.Write([]byte(info.ToJson()))
}

242
api4/upload_test.go Normal file
View File

@@ -0,0 +1,242 @@
// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved.
// See LICENSE.txt for license information.
package api4
import (
"bytes"
"io"
"net/http"
"testing"
"github.com/mattermost/mattermost-server/v5/model"
"github.com/stretchr/testify/require"
)
func TestCreateUpload(t *testing.T) {
th := Setup(t).InitBasic()
defer th.TearDown()
us := &model.UploadSession{
ChannelId: th.BasicChannel.Id,
Filename: "upload",
FileSize: 8 * 1024 * 1024,
}
t.Run("file attachments disabled", func(t *testing.T) {
th.App.UpdateConfig(func(cfg *model.Config) { *cfg.FileSettings.EnableFileAttachments = false })
defer th.App.UpdateConfig(func(cfg *model.Config) { *cfg.FileSettings.EnableFileAttachments = true })
u, resp := th.Client.CreateUpload(us)
require.Nil(t, u)
require.Error(t, resp.Error)
require.Equal(t, "api.file.attachments.disabled.app_error", resp.Error.Id)
require.Equal(t, http.StatusNotImplemented, resp.StatusCode)
})
t.Run("no permissions", func(t *testing.T) {
us.ChannelId = th.BasicPrivateChannel2.Id
u, resp := th.Client.CreateUpload(us)
require.Nil(t, u)
require.Error(t, resp.Error)
require.Equal(t, "api.context.permissions.app_error", resp.Error.Id)
require.Equal(t, http.StatusForbidden, resp.StatusCode)
})
t.Run("valid", func(t *testing.T) {
us.ChannelId = th.BasicChannel.Id
u, resp := th.Client.CreateUpload(us)
require.Nil(t, resp.Error)
require.NotEmpty(t, u)
require.Equal(t, http.StatusCreated, resp.StatusCode)
})
}
func TestGetUpload(t *testing.T) {
th := Setup(t).InitBasic()
defer th.TearDown()
us := &model.UploadSession{
Id: model.NewId(),
Type: model.UploadTypeAttachment,
CreateAt: model.GetMillis(),
UserId: th.BasicUser2.Id,
ChannelId: th.BasicChannel.Id,
Filename: "upload",
FileSize: 8 * 1024 * 1024,
}
us, err := th.App.CreateUploadSession(us)
require.Nil(t, err)
require.NotNil(t, us)
require.NotEmpty(t, us)
t.Run("upload not found", func(t *testing.T) {
u, resp := th.Client.GetUpload(model.NewId())
require.Nil(t, u)
require.Error(t, resp.Error)
require.Equal(t, "app.upload.get.app_error", resp.Error.Id)
require.Equal(t, http.StatusNotFound, resp.StatusCode)
})
t.Run("no permissions", func(t *testing.T) {
u, resp := th.Client.GetUpload(us.Id)
require.Nil(t, u)
require.Error(t, resp.Error)
require.Equal(t, "api.upload.get_upload.forbidden.app_error", resp.Error.Id)
})
t.Run("success", func(t *testing.T) {
expected, resp := th.Client.CreateUpload(us)
require.Nil(t, resp.Error)
require.NotEmpty(t, expected)
require.Equal(t, http.StatusCreated, resp.StatusCode)
u, resp := th.Client.GetUpload(expected.Id)
require.Nil(t, resp.Error)
require.NotEmpty(t, u)
require.Equal(t, expected, u)
})
}
func TestGetUploadsForUser(t *testing.T) {
th := Setup(t).InitBasic()
defer th.TearDown()
t.Run("no permissions", func(t *testing.T) {
uss, resp := th.Client.GetUploadsForUser(th.BasicUser2.Id)
require.Error(t, resp.Error)
require.Equal(t, "api.user.get_uploads_for_user.forbidden.app_error", resp.Error.Id)
require.Nil(t, uss)
})
t.Run("empty", func(t *testing.T) {
uss, resp := th.Client.GetUploadsForUser(th.BasicUser.Id)
require.Nil(t, resp.Error)
require.Empty(t, uss)
})
t.Run("success", func(t *testing.T) {
uploads := make([]*model.UploadSession, 4)
for i := 0; i < len(uploads); i++ {
us := &model.UploadSession{
Id: model.NewId(),
Type: model.UploadTypeAttachment,
CreateAt: model.GetMillis(),
UserId: th.BasicUser.Id,
ChannelId: th.BasicChannel.Id,
Filename: "upload",
FileSize: 8 * 1024 * 1024,
}
us, err := th.App.CreateUploadSession(us)
require.Nil(t, err)
require.NotNil(t, us)
require.NotEmpty(t, us)
us.Path = ""
uploads[i] = us
}
uss, resp := th.Client.GetUploadsForUser(th.BasicUser.Id)
require.Nil(t, resp.Error)
require.NotEmpty(t, uss)
require.Len(t, uss, len(uploads))
for i := range uploads {
require.Contains(t, uss, uploads[i])
}
})
}
func TestUploadData(t *testing.T) {
th := Setup(t).InitBasic()
defer th.TearDown()
if *th.App.Config().FileSettings.DriverName == "" {
t.Skip("skipping because no file driver is enabled")
}
us := &model.UploadSession{
Id: model.NewId(),
Type: model.UploadTypeAttachment,
CreateAt: model.GetMillis(),
UserId: th.BasicUser2.Id,
ChannelId: th.BasicChannel.Id,
Filename: "upload",
FileSize: 8 * 1024 * 1024,
}
us, err := th.App.CreateUploadSession(us)
require.Nil(t, err)
require.NotNil(t, us)
require.NotEmpty(t, us)
data := randomBytes(t, int(us.FileSize))
t.Run("file attachments disabled", func(t *testing.T) {
th.App.UpdateConfig(func(cfg *model.Config) { *cfg.FileSettings.EnableFileAttachments = false })
defer th.App.UpdateConfig(func(cfg *model.Config) { *cfg.FileSettings.EnableFileAttachments = true })
info, resp := th.Client.UploadData(model.NewId(), bytes.NewReader(data))
require.Nil(t, info)
require.Error(t, resp.Error)
require.Equal(t, "api.file.attachments.disabled.app_error", resp.Error.Id)
})
t.Run("upload not found", func(t *testing.T) {
info, resp := th.Client.UploadData(model.NewId(), bytes.NewReader(data))
require.Nil(t, info)
require.Error(t, resp.Error)
require.Equal(t, "app.upload.get.app_error", resp.Error.Id)
require.Equal(t, http.StatusNotFound, resp.StatusCode)
})
t.Run("no permissions", func(t *testing.T) {
info, resp := th.Client.UploadData(us.Id, bytes.NewReader(data))
require.Nil(t, info)
require.Error(t, resp.Error)
require.Equal(t, "api.context.permissions.app_error", resp.Error.Id)
})
t.Run("bad content-length", func(t *testing.T) {
info, resp := th.Client.UploadData(us.Id, bytes.NewReader(append(data, 0x00)))
require.Nil(t, info)
require.Error(t, resp.Error)
require.Equal(t, "api.upload.upload_data.invalid_content_length", resp.Error.Id)
})
t.Run("success", func(t *testing.T) {
u, resp := th.Client.CreateUpload(us)
require.Nil(t, resp.Error)
require.NotEmpty(t, u)
require.Equal(t, http.StatusCreated, resp.StatusCode)
info, resp := th.Client.UploadData(u.Id, bytes.NewReader(data))
require.Nil(t, resp.Error)
require.NotEmpty(t, info)
require.Equal(t, u.Filename, info.Name)
file, resp := th.Client.GetFile(info.Id)
require.Nil(t, resp.Error)
require.Equal(t, file, data)
})
t.Run("resume success", func(t *testing.T) {
u, resp := th.Client.CreateUpload(us)
require.Nil(t, resp.Error)
require.NotEmpty(t, u)
require.Equal(t, http.StatusCreated, resp.StatusCode)
rd := &io.LimitedReader{
R: bytes.NewReader(data),
N: 5 * 1024 * 1024,
}
info, resp := th.Client.UploadData(u.Id, rd)
require.Nil(t, resp.Error)
require.Nil(t, info)
require.Equal(t, http.StatusNoContent, resp.StatusCode)
info, resp = th.Client.UploadData(u.Id, bytes.NewReader(data[5*1024*1024:]))
require.Nil(t, resp.Error)
require.NotEmpty(t, info)
require.Equal(t, u.Filename, info.Name)
file, resp := th.Client.GetFile(info.Id)
require.Nil(t, resp.Error)
require.Equal(t, file, data)
})
}

View File

@@ -89,6 +89,8 @@ func (api *API) InitUser() {
api.BaseRoutes.Users.Handle("/migrate_auth/ldap", api.ApiSessionRequired(migrateAuthToLDAP)).Methods("POST")
api.BaseRoutes.Users.Handle("/migrate_auth/saml", api.ApiSessionRequired(migrateAuthToSaml)).Methods("POST")
api.BaseRoutes.User.Handle("/uploads", api.ApiSessionRequired(getUploadsForUser)).Methods("GET")
}
func createUser(c *Context, w http.ResponseWriter, r *http.Request) {
@@ -2601,6 +2603,26 @@ func convertUserToBot(c *Context, w http.ResponseWriter, r *http.Request) {
w.Write(bot.ToJson())
}
func getUploadsForUser(c *Context, w http.ResponseWriter, r *http.Request) {
c.RequireUserId()
if c.Err != nil {
return
}
if c.Params.UserId != c.App.Session().UserId {
c.Err = model.NewAppError("getUploadsForUser", "api.user.get_uploads_for_user.forbidden.app_error", nil, "", http.StatusForbidden)
return
}
uss, err := c.App.GetUploadSessionsForUser(c.Params.UserId)
if err != nil {
c.Err = err
return
}
w.Write([]byte(model.UploadSessionsToJson(uss)))
}
func migrateAuthToLDAP(c *Context, w http.ResponseWriter, r *http.Request) {
props := model.StringInterfaceFromJson(r.Body)
from, ok := props["from"].(string)

View File

@@ -360,6 +360,7 @@ type AppIface interface {
AddUserToTeamByToken(userId string, tokenId string) (*model.Team, *model.AppError)
AdjustImage(file io.Reader) (*bytes.Buffer, *model.AppError)
AllowOAuthAppAccessToUser(userId string, authRequest *model.AuthorizeRequest) (string, *model.AppError)
AppendFile(fr io.Reader, path string) (int64, *model.AppError)
AsymmetricSigningKey() *ecdsa.PrivateKey
AttachDeviceId(sessionId string, deviceId string, expiresAt int64) *model.AppError
AttachSessionCookies(w http.ResponseWriter, r *http.Request)
@@ -427,6 +428,7 @@ type AppIface interface {
CreateTeam(team *model.Team) (*model.Team, *model.AppError)
CreateTeamWithUser(team *model.Team, userId string) (*model.Team, *model.AppError)
CreateTermsOfService(text, userId string) (*model.TermsOfService, *model.AppError)
CreateUploadSession(us *model.UploadSession) (*model.UploadSession, *model.AppError)
CreateUserAccessToken(token *model.UserAccessToken) (*model.UserAccessToken, *model.AppError)
CreateUserAsAdmin(user *model.User, redirect string) (*model.User, *model.AppError)
CreateUserFromSignup(user *model.User, redirect string) (*model.User, *model.AppError)
@@ -670,6 +672,8 @@ type AppIface interface {
GetTeamsForUser(userId string) ([]*model.Team, *model.AppError)
GetTeamsUnreadForUser(excludeTeamId string, userId string) ([]*model.TeamUnread, *model.AppError)
GetTermsOfService(id string) (*model.TermsOfService, *model.AppError)
GetUploadSession(uploadId string) (*model.UploadSession, *model.AppError)
GetUploadSessionsForUser(userId string) ([]*model.UploadSession, *model.AppError)
GetUser(userId string) (*model.User, *model.AppError)
GetUserAccessToken(tokenId string, sanitize bool) (*model.UserAccessToken, *model.AppError)
GetUserAccessTokens(page, perPage int) ([]*model.UserAccessToken, *model.AppError)
@@ -980,6 +984,7 @@ type AppIface interface {
UpdateUserAuth(userId string, userAuth *model.UserAuth) (*model.UserAuth, *model.AppError)
UpdateUserNotifyProps(userId string, props map[string]string) (*model.User, *model.AppError)
UpdateUserRoles(userId string, newRoles string, sendWebSocketEvent bool) (*model.User, *model.AppError)
UploadData(us *model.UploadSession, rd io.Reader) (*model.FileInfo, *model.AppError)
UploadEmojiImage(id string, imageData *multipart.FileHeader) *model.AppError
UploadMultipartFiles(teamId string, channelId string, userId string, fileHeaders []*multipart.FileHeader, clientIds []string, now time.Time) (*model.FileUploadResponse, *model.AppError)
UpsertGroupMember(groupID string, userID string) (*model.GroupMember, *model.AppError)

View File

@@ -124,7 +124,7 @@ func (a *App) UploadEmojiImage(id string, imageData *multipart.FileHeader) *mode
if config.Width > MaxEmojiWidth || config.Height > MaxEmojiHeight {
data := buf.Bytes()
newbuf := bytes.NewBuffer(nil)
info, err := model.GetInfoForBytes(imageData.Filename, data)
info, err := model.GetInfoForBytes(imageData.Filename, bytes.NewReader(data), len(data))
if err != nil {
return err
}

View File

@@ -118,6 +118,15 @@ func (a *App) WriteFile(fr io.Reader, path string) (int64, *model.AppError) {
return backend.WriteFile(fr, path)
}
func (a *App) AppendFile(fr io.Reader, path string) (int64, *model.AppError) {
backend, err := a.FileBackend()
if err != nil {
return 0, err
}
return backend.AppendFile(fr, path)
}
func (a *App) RemoveFile(path string) *model.AppError {
backend, err := a.FileBackend()
if err != nil {
@@ -157,7 +166,7 @@ func (a *App) getInfoForFilename(post *model.Post, teamId, channelId, userId, ol
return nil
}
info, err := model.GetInfoForBytes(name, data)
info, err := model.GetInfoForBytes(name, bytes.NewReader(data), len(data))
if err != nil {
mlog.Warn(
"Unable to fully decode file info when migrating post to use FileInfos",
@@ -902,7 +911,7 @@ func (a *App) DoUploadFileExpectModification(now time.Time, rawTeamId string, ra
channelId := filepath.Base(rawChannelId)
userId := filepath.Base(rawUserId)
info, err := model.GetInfoForBytes(filename, data)
info, err := model.GetInfoForBytes(filename, bytes.NewReader(data), len(data))
if err != nil {
err.StatusCode = http.StatusBadRequest
return nil, data, err

View File

@@ -589,6 +589,28 @@ func (a *OpenTracingAppLayer) AllowOAuthAppAccessToUser(userId string, authReque
return resultVar0, resultVar1
}
func (a *OpenTracingAppLayer) AppendFile(fr io.Reader, path string) (int64, *model.AppError) {
origCtx := a.ctx
span, newCtx := tracing.StartSpanWithParentByContext(a.ctx, "app.AppendFile")
a.ctx = newCtx
a.app.Srv().Store.SetContext(newCtx)
defer func() {
a.app.Srv().Store.SetContext(origCtx)
a.ctx = origCtx
}()
defer span.Finish()
resultVar0, resultVar1 := a.app.AppendFile(fr, path)
if resultVar1 != nil {
span.LogFields(spanlog.Error(resultVar1))
ext.Error.Set(span, true)
}
return resultVar0, resultVar1
}
func (a *OpenTracingAppLayer) AsymmetricSigningKey() *ecdsa.PrivateKey {
origCtx := a.ctx
span, newCtx := tracing.StartSpanWithParentByContext(a.ctx, "app.AsymmetricSigningKey")
@@ -2135,6 +2157,28 @@ func (a *OpenTracingAppLayer) CreateTermsOfService(text string, userId string) (
return resultVar0, resultVar1
}
func (a *OpenTracingAppLayer) CreateUploadSession(us *model.UploadSession) (*model.UploadSession, *model.AppError) {
origCtx := a.ctx
span, newCtx := tracing.StartSpanWithParentByContext(a.ctx, "app.CreateUploadSession")
a.ctx = newCtx
a.app.Srv().Store.SetContext(newCtx)
defer func() {
a.app.Srv().Store.SetContext(origCtx)
a.ctx = origCtx
}()
defer span.Finish()
resultVar0, resultVar1 := a.app.CreateUploadSession(us)
if resultVar1 != nil {
span.LogFields(spanlog.Error(resultVar1))
ext.Error.Set(span, true)
}
return resultVar0, resultVar1
}
func (a *OpenTracingAppLayer) CreateUser(user *model.User) (*model.User, *model.AppError) {
origCtx := a.ctx
span, newCtx := tracing.StartSpanWithParentByContext(a.ctx, "app.CreateUser")
@@ -8270,6 +8314,50 @@ func (a *OpenTracingAppLayer) GetTotalUsersStats(viewRestrictions *model.ViewUse
return resultVar0, resultVar1
}
func (a *OpenTracingAppLayer) GetUploadSession(uploadId string) (*model.UploadSession, *model.AppError) {
origCtx := a.ctx
span, newCtx := tracing.StartSpanWithParentByContext(a.ctx, "app.GetUploadSession")
a.ctx = newCtx
a.app.Srv().Store.SetContext(newCtx)
defer func() {
a.app.Srv().Store.SetContext(origCtx)
a.ctx = origCtx
}()
defer span.Finish()
resultVar0, resultVar1 := a.app.GetUploadSession(uploadId)
if resultVar1 != nil {
span.LogFields(spanlog.Error(resultVar1))
ext.Error.Set(span, true)
}
return resultVar0, resultVar1
}
func (a *OpenTracingAppLayer) GetUploadSessionsForUser(userId string) ([]*model.UploadSession, *model.AppError) {
origCtx := a.ctx
span, newCtx := tracing.StartSpanWithParentByContext(a.ctx, "app.GetUploadSessionsForUser")
a.ctx = newCtx
a.app.Srv().Store.SetContext(newCtx)
defer func() {
a.app.Srv().Store.SetContext(origCtx)
a.ctx = origCtx
}()
defer span.Finish()
resultVar0, resultVar1 := a.app.GetUploadSessionsForUser(userId)
if resultVar1 != nil {
span.LogFields(spanlog.Error(resultVar1))
ext.Error.Set(span, true)
}
return resultVar0, resultVar1
}
func (a *OpenTracingAppLayer) GetUser(userId string) (*model.User, *model.AppError) {
origCtx := a.ctx
span, newCtx := tracing.StartSpanWithParentByContext(a.ctx, "app.GetUser")
@@ -15019,6 +15107,28 @@ func (a *OpenTracingAppLayer) UpdateWebConnUserActivity(session model.Session, a
a.app.UpdateWebConnUserActivity(session, activityAt)
}
func (a *OpenTracingAppLayer) UploadData(us *model.UploadSession, rd io.Reader) (*model.FileInfo, *model.AppError) {
origCtx := a.ctx
span, newCtx := tracing.StartSpanWithParentByContext(a.ctx, "app.UploadData")
a.ctx = newCtx
a.app.Srv().Store.SetContext(newCtx)
defer func() {
a.app.Srv().Store.SetContext(origCtx)
a.ctx = origCtx
}()
defer span.Finish()
resultVar0, resultVar1 := a.app.UploadData(us, rd)
if resultVar1 != nil {
span.LogFields(spanlog.Error(resultVar1))
ext.Error.Set(span, true)
}
return resultVar0, resultVar1
}
func (a *OpenTracingAppLayer) UploadEmojiImage(id string, imageData *multipart.FileHeader) *model.AppError {
origCtx := a.ctx
span, newCtx := tracing.StartSpanWithParentByContext(a.ctx, "app.UploadEmojiImage")

View File

@@ -170,6 +170,12 @@ type Server struct {
CacheProvider cache.Provider
tracer *tracing.Tracer
// These are used to prevent concurrent upload requests
// for a given upload session which could cause inconsistencies
// and data corruption.
uploadLockMapMut sync.Mutex
uploadLockMap map[string]bool
}
func NewServer(options ...Option) (*Server, error) {
@@ -182,6 +188,7 @@ func NewServer(options ...Option) (*Server, error) {
LocalRouter: localRouter,
licenseListeners: map[string]func(*model.License, *model.License){},
hashSeed: maphash.MakeSeed(),
uploadLockMap: map[string]bool{},
}
for _, option := range options {

248
app/upload.go Normal file
View File

@@ -0,0 +1,248 @@
// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved.
// See LICENSE.txt for license information.
package app
import (
"errors"
"io"
"net/http"
"path/filepath"
"strings"
"time"
"github.com/mattermost/mattermost-server/v5/mlog"
"github.com/mattermost/mattermost-server/v5/model"
"github.com/mattermost/mattermost-server/v5/plugin"
"github.com/mattermost/mattermost-server/v5/store"
)
const minFirstPartSize = 5 * 1024 * 1024 // 5MB
func (a *App) CreateUploadSession(us *model.UploadSession) (*model.UploadSession, *model.AppError) {
if us.FileSize > *a.Config().FileSettings.MaxFileSize {
return nil, model.NewAppError("CreateUploadSession", "app.upload.create.upload_too_large.app_error",
map[string]interface{}{"channelId": us.ChannelId}, "", http.StatusRequestEntityTooLarge)
}
us.FileOffset = 0
now := time.Now()
us.CreateAt = model.GetMillisForTime(now)
us.Path = now.Format("20060102") + "/teams/noteam/channels/" + us.ChannelId + "/users/" + us.UserId + "/" + us.Id + "/" + filepath.Base(us.Filename)
if err := us.IsValid(); err != nil {
return nil, err
}
channel, err := a.GetChannel(us.ChannelId)
if err != nil {
return nil, model.NewAppError("CreateUploadSession", "app.upload.create.incorrect_channel_id.app_error",
map[string]interface{}{"channelId": us.ChannelId}, "", http.StatusBadRequest)
}
if channel.DeleteAt != 0 {
return nil, model.NewAppError("CreateUploadSession", "app.upload.create.cannot_upload_to_deleted_channel.app_error",
map[string]interface{}{"channelId": us.ChannelId}, "", http.StatusBadRequest)
}
us, storeErr := a.Srv().Store.UploadSession().Save(us)
if storeErr != nil {
return nil, model.NewAppError("CreateUploadSession", "app.upload.create.save.app_error", nil, storeErr.Error(), http.StatusInternalServerError)
}
return us, nil
}
func (a *App) GetUploadSession(uploadId string) (*model.UploadSession, *model.AppError) {
us, err := a.Srv().Store.UploadSession().Get(uploadId)
if err != nil {
var nfErr *store.ErrNotFound
switch {
case errors.As(err, &nfErr):
return nil, model.NewAppError("GetUpload", "app.upload.get.app_error",
nil, nfErr.Error(), http.StatusNotFound)
default:
return nil, model.NewAppError("GetUpload", "app.upload.get.app_error",
nil, err.Error(), http.StatusInternalServerError)
}
}
return us, nil
}
func (a *App) GetUploadSessionsForUser(userId string) ([]*model.UploadSession, *model.AppError) {
uss, err := a.Srv().Store.UploadSession().GetForUser(userId)
if err != nil {
return nil, model.NewAppError("GetUploadsForUser", "app.upload.get_for_user.app_error",
nil, err.Error(), http.StatusInternalServerError)
}
return uss, nil
}
func (a *App) UploadData(us *model.UploadSession, rd io.Reader) (*model.FileInfo, *model.AppError) {
// prevent more than one caller to upload data at the same time for a given upload session.
// This is to avoid possible inconsistencies.
a.Srv().uploadLockMapMut.Lock()
locked := a.Srv().uploadLockMap[us.Id]
if locked {
// session lock is already taken, return error.
a.Srv().uploadLockMapMut.Unlock()
return nil, model.NewAppError("UploadData", "app.upload.upload_data.concurrent.app_error",
nil, "", http.StatusBadRequest)
}
// grab the session lock.
a.Srv().uploadLockMap[us.Id] = true
a.Srv().uploadLockMapMut.Unlock()
// reset the session lock on exit.
defer func() {
a.Srv().uploadLockMapMut.Lock()
delete(a.Srv().uploadLockMap, us.Id)
a.Srv().uploadLockMapMut.Unlock()
}()
// make sure it's not possible to upload more data than what is expected.
lr := &io.LimitedReader{
R: rd,
N: us.FileSize - us.FileOffset,
}
var err *model.AppError
var written int64
if us.FileOffset == 0 {
// new upload
written, err = a.WriteFile(lr, us.Path)
if err != nil && written == 0 {
return nil, err
}
if written < minFirstPartSize && written != us.FileSize {
a.RemoveFile(us.Path)
var errStr string
if err != nil {
errStr = err.Error()
}
return nil, model.NewAppError("UploadData", "app.upload.upload_data.first_part_too_small.app_error",
map[string]interface{}{"Size": minFirstPartSize}, errStr, http.StatusBadRequest)
}
} else if us.FileOffset < us.FileSize {
// resume upload
written, err = a.AppendFile(lr, us.Path)
}
if written > 0 {
us.FileOffset += written
if storeErr := a.Srv().Store.UploadSession().Update(us); storeErr != nil {
return nil, model.NewAppError("UploadData", "app.upload.upload_data.update.app_error", nil, storeErr.Error(), http.StatusInternalServerError)
}
}
if err != nil {
return nil, err
}
// upload is incomplete
if us.FileOffset != us.FileSize {
return nil, nil
}
// upload is done, create FileInfo
file, err := a.FileReader(us.Path)
if err != nil {
return nil, model.NewAppError("UploadData", "app.upload.upload_data.read_file.app_error", nil, err.Error(), http.StatusInternalServerError)
}
info, err := model.GetInfoForBytes(us.Filename, file, int(us.FileSize))
file.Close()
if err != nil {
return nil, err
}
info.CreatorId = us.UserId
info.Path = us.Path
// call plugins upload hook
if pluginsEnvironment := a.GetPluginsEnvironment(); pluginsEnvironment != nil {
// using a pipe to avoid loading the whole file content in memory.
r, w := io.Pipe()
errChan := make(chan *model.AppError, 1)
go func() {
defer w.Close()
defer close(errChan)
pluginContext := a.PluginContext()
pluginsEnvironment.RunMultiPluginHook(func(hooks plugin.Hooks) bool {
newInfo, rejStr := hooks.FileWillBeUploaded(pluginContext, info, file, w)
if rejStr != "" {
errChan <- model.NewAppError("UploadData", "File rejected by plugin. "+rejStr, nil, "", http.StatusBadRequest)
return false
}
if newInfo != nil {
info = newInfo
}
return true
}, plugin.FileWillBeUploadedId)
}()
var written int64
tmpPath := us.Path + ".tmp"
written, err = a.WriteFile(r, tmpPath)
if err != nil {
if fileErr := a.RemoveFile(tmpPath); fileErr != nil {
mlog.Error("Failed to remove file", mlog.Err(fileErr))
}
return nil, err
}
if err = <-errChan; err != nil {
if fileErr := a.RemoveFile(us.Path); fileErr != nil {
mlog.Error("Failed to remove file", mlog.Err(fileErr))
}
if fileErr := a.RemoveFile(tmpPath); fileErr != nil {
mlog.Error("Failed to remove file", mlog.Err(fileErr))
}
return nil, err
}
if written > 0 {
info.Size = written
if fileErr := a.MoveFile(tmpPath, us.Path); fileErr != nil {
mlog.Error("Failed to move file", mlog.Err(fileErr))
}
} else {
if fileErr := a.RemoveFile(tmpPath); fileErr != nil {
mlog.Error("Failed to remove file", mlog.Err(fileErr))
}
}
}
// image post-processing
if info.IsImage() {
// Check dimensions before loading the whole thing into memory later on
// This casting is done to prevent overflow on 32 bit systems (not needed
// in 64 bits systems because images can't have more than 32 bits height or
// width)
if int64(info.Width)*int64(info.Height) > MaxImageSize {
return nil, model.NewAppError("uploadData", "app.upload.upload_data.large_image.app_error",
map[string]interface{}{"Filename": us.Filename, "Width": info.Width, "Height": info.Height}, "", http.StatusBadRequest)
}
nameWithoutExtension := info.Name[:strings.LastIndex(info.Name, ".")]
info.PreviewPath = filepath.Dir(info.Path) + "/" + nameWithoutExtension + "_preview.jpg"
info.ThumbnailPath = filepath.Dir(info.Path) + "/" + nameWithoutExtension + "_thumb.jpg"
imgData, fileErr := a.ReadFile(us.Path)
if fileErr != nil {
return nil, fileErr
}
a.HandleImages([]string{info.PreviewPath}, []string{info.ThumbnailPath}, [][]byte{imgData})
}
var storeErr error
if info, storeErr = a.Srv().Store.FileInfo().Save(info); storeErr != nil {
var appErr *model.AppError
switch {
case errors.As(storeErr, &appErr):
return nil, appErr
default:
return nil, model.NewAppError("uploadData", "app.upload.upload_data.save.app_error", nil, storeErr.Error(), http.StatusInternalServerError)
}
}
// delete upload session
if storeErr := a.Srv().Store.UploadSession().Delete(us.Id); storeErr != nil {
mlog.Error("Failed to delete UploadSession", mlog.Err(storeErr))
}
return info, nil
}

308
app/upload_test.go Normal file
View File

@@ -0,0 +1,308 @@
// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved.
// See LICENSE.txt for license information.
package app
import (
"bytes"
"io"
"io/ioutil"
"math/rand"
"path/filepath"
"sync"
"sync/atomic"
"testing"
"github.com/mattermost/mattermost-server/v5/model"
"github.com/mattermost/mattermost-server/v5/utils/fileutils"
"github.com/stretchr/testify/require"
)
func TestCreateUploadSession(t *testing.T) {
th := Setup(t).InitBasic()
defer th.TearDown()
us := &model.UploadSession{
Type: model.UploadTypeAttachment,
UserId: th.BasicUser.Id,
ChannelId: th.BasicChannel.Id,
Filename: "upload",
FileSize: 8 * 1024 * 1024,
}
t.Run("FileSize over limit", func(t *testing.T) {
maxFileSize := *th.App.Config().FileSettings.MaxFileSize
th.App.UpdateConfig(func(cfg *model.Config) { *cfg.FileSettings.MaxFileSize = us.FileSize - 1 })
defer th.App.UpdateConfig(func(cfg *model.Config) { *cfg.FileSettings.MaxFileSize = maxFileSize })
u, err := th.App.CreateUploadSession(us)
require.NotNil(t, err)
require.Equal(t, "app.upload.create.upload_too_large.app_error", err.Id)
require.Nil(t, u)
})
t.Run("invalid Id", func(t *testing.T) {
u, err := th.App.CreateUploadSession(us)
require.NotNil(t, err)
require.Equal(t, "model.upload_session.is_valid.id.app_error", err.Id)
require.Nil(t, u)
})
t.Run("invalid UserId", func(t *testing.T) {
us.Id = model.NewId()
us.UserId = ""
u, err := th.App.CreateUploadSession(us)
require.NotNil(t, err)
require.Equal(t, "model.upload_session.is_valid.user_id.app_error", err.Id)
require.Nil(t, u)
})
t.Run("invalid ChannelId", func(t *testing.T) {
us.UserId = th.BasicUser.Id
us.ChannelId = ""
u, err := th.App.CreateUploadSession(us)
require.NotNil(t, err)
require.Equal(t, "model.upload_session.is_valid.channel_id.app_error", err.Id)
require.Nil(t, u)
})
t.Run("non-existing channel", func(t *testing.T) {
us.ChannelId = model.NewId()
u, err := th.App.CreateUploadSession(us)
require.NotNil(t, err)
require.Equal(t, "app.upload.create.incorrect_channel_id.app_error", err.Id)
require.Nil(t, u)
})
t.Run("deleted channel", func(t *testing.T) {
ch := th.CreateChannel(th.BasicTeam)
th.App.DeleteChannel(ch, th.BasicUser.Id)
us.ChannelId = ch.Id
u, err := th.App.CreateUploadSession(us)
require.NotNil(t, err)
require.Equal(t, "app.upload.create.cannot_upload_to_deleted_channel.app_error", err.Id)
require.Nil(t, u)
})
t.Run("success", func(t *testing.T) {
us.ChannelId = th.BasicChannel.Id
u, err := th.App.CreateUploadSession(us)
require.Nil(t, err)
require.NotEmpty(t, u)
})
}
func TestUploadData(t *testing.T) {
th := Setup(t).InitBasic()
defer th.TearDown()
us := &model.UploadSession{
Id: model.NewId(),
Type: model.UploadTypeAttachment,
UserId: th.BasicUser.Id,
ChannelId: th.BasicChannel.Id,
Filename: "upload",
FileSize: 8 * 1024 * 1024,
}
var err error
us, err = th.App.CreateUploadSession(us)
require.Nil(t, err)
require.NotEmpty(t, us)
data := make([]byte, us.FileSize)
_, err2 := rand.Read(data)
require.NoError(t, err2)
t.Run("write error", func(t *testing.T) {
rd := &io.LimitedReader{
R: bytes.NewReader(data),
N: 1024 * 1024,
}
ok, err := th.App.FileExists(us.Path)
require.False(t, ok)
require.Nil(t, err)
u := *us
u.Path = ""
info, err := th.App.UploadData(&u, rd)
require.Nil(t, info)
require.NotNil(t, err)
require.NotEqual(t, "app.upload.upload_data.first_part_too_small.app_error", err.Id)
})
t.Run("first part too small", func(t *testing.T) {
rd := &io.LimitedReader{
R: bytes.NewReader(data),
N: 1024 * 1024,
}
ok, err := th.App.FileExists(us.Path)
require.False(t, ok)
require.Nil(t, err)
info, err := th.App.UploadData(us, rd)
require.Nil(t, info)
require.NotNil(t, err)
require.Equal(t, "app.upload.upload_data.first_part_too_small.app_error", err.Id)
ok, err = th.App.FileExists(us.Path)
require.False(t, ok)
require.Nil(t, err)
})
t.Run("resume success", func(t *testing.T) {
rd := &io.LimitedReader{
R: bytes.NewReader(data),
N: 5 * 1024 * 1024,
}
info, err := th.App.UploadData(us, rd)
require.Nil(t, info)
require.Nil(t, err)
rd = &io.LimitedReader{
R: bytes.NewReader(data[5*1024*1024:]),
N: 3 * 1024 * 1024,
}
info, err = th.App.UploadData(us, rd)
require.Nil(t, err)
require.NotEmpty(t, info)
d, err := th.App.ReadFile(us.Path)
require.Nil(t, err)
require.Equal(t, data, d)
})
t.Run("all at once success", func(t *testing.T) {
us.Id = model.NewId()
us, err = th.App.CreateUploadSession(us)
require.Nil(t, err)
require.NotEmpty(t, us)
info, err := th.App.UploadData(us, bytes.NewReader(data))
require.Nil(t, err)
require.NotEmpty(t, info)
d, err := th.App.ReadFile(us.Path)
require.Nil(t, err)
require.Equal(t, data, d)
})
t.Run("small file success", func(t *testing.T) {
us.Id = model.NewId()
us.FileSize = 1024 * 1024
us, err = th.App.CreateUploadSession(us)
require.Nil(t, err)
require.NotEmpty(t, us)
rd := &io.LimitedReader{
R: bytes.NewReader(data),
N: 1024 * 1024,
}
info, err := th.App.UploadData(us, rd)
require.Nil(t, err)
require.NotEmpty(t, info)
d, err := th.App.ReadFile(us.Path)
require.Nil(t, err)
require.Equal(t, data[:1024*1024], d)
})
t.Run("image processing", func(t *testing.T) {
testDir, _ := fileutils.FindDir("tests")
data, err := ioutil.ReadFile(filepath.Join(testDir, "test.png"))
require.Nil(t, err)
require.NotEmpty(t, data)
us.Id = model.NewId()
us.Filename = "test.png"
us.FileSize = int64(len(data))
us, err = th.App.CreateUploadSession(us)
require.Nil(t, err)
require.NotEmpty(t, us)
info, err := th.App.UploadData(us, bytes.NewReader(data))
require.Nil(t, err)
require.NotEmpty(t, info)
require.NotZero(t, info.Width)
require.NotZero(t, info.Height)
require.NotEmpty(t, info.ThumbnailPath)
require.NotEmpty(t, info.PreviewPath)
})
}
func TestUploadDataConcurrent(t *testing.T) {
th := Setup(t).InitBasic()
defer th.TearDown()
us := &model.UploadSession{
Id: model.NewId(),
Type: model.UploadTypeAttachment,
UserId: th.BasicUser.Id,
ChannelId: th.BasicChannel.Id,
Filename: "upload",
FileSize: 8 * 1024 * 1024,
}
var err error
us, err = th.App.CreateUploadSession(us)
require.Nil(t, err)
require.NotEmpty(t, us)
data := make([]byte, us.FileSize)
_, err2 := rand.Read(data)
require.NoError(t, err2)
var nErrs int32
var wg sync.WaitGroup
n := 8
wg.Add(n)
for i := 0; i < n; i++ {
go func() {
defer wg.Done()
rd := &io.LimitedReader{
R: bytes.NewReader(data),
N: 5 * 1024 * 1024,
}
u := *us
_, err := th.App.UploadData(&u, rd)
if err != nil && err.Id == "app.upload.upload_data.concurrent.app_error" {
atomic.AddInt32(&nErrs, 1)
}
}()
}
wg.Wait()
// Verify that only 1 request was able to perform the upload.
require.Equal(t, int32(n-1), nErrs)
wg.Add(n)
for i := 0; i < n; i++ {
go func() {
defer wg.Done()
rd := &io.LimitedReader{
R: bytes.NewReader(data[5*1024*1024:]),
N: 3 * 1024 * 1024,
}
u := *us
u.FileOffset = 5 * 1024 * 1024
_, err := th.App.UploadData(&u, rd)
if err != nil && err.Id == "app.upload.upload_data.concurrent.app_error" {
atomic.AddInt32(&nErrs, 1)
}
}()
}
wg.Wait()
// Verify that only 1 request was able to finish the upload.
require.Equal(t, int32(n*2-2), nErrs)
d, err := th.App.ReadFile(us.Path)
require.Nil(t, err)
require.Equal(t, data, d)
}

View File

@@ -4,7 +4,7 @@ go 1.14
require (
github.com/jstemmer/go-junit-report v0.9.1 // indirect
github.com/mattermost/mattermost-utilities/mmgotool v0.0.0-20200828152206-519b99a4e51e // indirect
github.com/mattermost/mattermost-utilities/mmgotool v0.0.0-20200915114419-f4421bc07461 // indirect
github.com/philhofer/fwd v1.0.0 // indirect
github.com/reflog/struct2interface v0.6.1 // indirect
github.com/tinylib/msgp v1.1.2 // indirect

View File

@@ -66,6 +66,8 @@ github.com/mattermost/mattermost-utilities/mmgotool v0.0.0-20200825115902-a83581
github.com/mattermost/mattermost-utilities/mmgotool v0.0.0-20200825115902-a83581379d0a/go.mod h1:3gKozJI8n2Y/vW37GfnFWAdehGXe5yZlt+HykK6Y3DM=
github.com/mattermost/mattermost-utilities/mmgotool v0.0.0-20200828152206-519b99a4e51e h1:dQ+AQmmcn+kncwE/XfC3xVPyegR84d4dno/UOXI3eJs=
github.com/mattermost/mattermost-utilities/mmgotool v0.0.0-20200828152206-519b99a4e51e/go.mod h1:3gKozJI8n2Y/vW37GfnFWAdehGXe5yZlt+HykK6Y3DM=
github.com/mattermost/mattermost-utilities/mmgotool v0.0.0-20200915114419-f4421bc07461 h1:dn2/HZjzUY5PQmKDmH95vgwVqpbR84FiU/t060g7rqg=
github.com/mattermost/mattermost-utilities/mmgotool v0.0.0-20200915114419-f4421bc07461/go.mod h1:3gKozJI8n2Y/vW37GfnFWAdehGXe5yZlt+HykK6Y3DM=
github.com/matttproud/golang_protobuf_extensions v1.0.1/go.mod h1:D8He9yQNgCq6Z5Ld7szi9bcBfOoFv/3dc6xSMkL2PC0=
github.com/mitchellh/go-homedir v1.1.0/go.mod h1:SfyaCUpYCn1Vlf4IUYiD9fPX4A5wJrkLzIz1N1q0pr0=
github.com/mitchellh/mapstructure v1.1.2/go.mod h1:FVVH3fgwuzCH5S8UJGiWEs2h04kUh9fWfEaFds41c1Y=

View File

@@ -1312,6 +1312,22 @@
"id": "api.emoji.upload.open.app_error",
"translation": "Unable to create the emoji. An error occurred when trying to open the attached image."
},
{
"id": "api.file.append_file.no_exist.app_error",
"translation": "File does not exist."
},
{
"id": "api.file.append_file.opening.app_error",
"translation": "Encountered an error opening the file."
},
{
"id": "api.file.append_file.s3.app_error",
"translation": "Encountered an error appending to a file in S3."
},
{
"id": "api.file.append_file.writing.app_error",
"translation": "Encountered an error writing to S3."
},
{
"id": "api.file.attachments.disabled.app_error",
"translation": "File attachments have been disabled on this server."
@@ -2686,6 +2702,14 @@
"id": "api.upgrade_to_enterprise_status.signature.app_error",
"translation": "Mattermost was unable to upgrade to Enterprise Edition. The digital signature of the downloaded binary file could not be verified."
},
{
"id": "api.upload.get_upload.forbidden.app_error",
"translation": "Failed to get upload."
},
{
"id": "api.upload.upload_data.invalid_content_length",
"translation": "Invalid Content-Length."
},
{
"id": "api.user.activate_mfa.email_and_ldap_only.app_error",
"translation": "MFA is not available for this account type."
@@ -2850,6 +2874,10 @@
"id": "api.user.get_authorization_code.unsupported.app_error",
"translation": "Unsupported OAuth service provider."
},
{
"id": "api.user.get_uploads_for_user.forbidden.app_error",
"translation": "Failed to get uploads."
},
{
"id": "api.user.get_user_by_email.permissions.app_error",
"translation": "Unable to get user by email."
@@ -4886,6 +4914,54 @@
"id": "app.terms_of_service.get.no_rows.app_error",
"translation": "No terms of service found."
},
{
"id": "app.upload.create.cannot_upload_to_deleted_channel.app_error",
"translation": "Cannot upload to a deleted channel."
},
{
"id": "app.upload.create.incorrect_channel_id.app_error",
"translation": "Cannot upload to the specified channel."
},
{
"id": "app.upload.create.save.app_error",
"translation": "Failed to save upload."
},
{
"id": "app.upload.create.upload_too_large.app_error",
"translation": "Unable to upload file. File is too large."
},
{
"id": "app.upload.get.app_error",
"translation": "Failed to get upload."
},
{
"id": "app.upload.get_for_user.app_error",
"translation": "Failed to get uploads for user."
},
{
"id": "app.upload.upload_data.concurrent.app_error",
"translation": "Unable to upload data from more than one request."
},
{
"id": "app.upload.upload_data.first_part_too_small.app_error",
"translation": "Failed to upload data. First part must be at least {{.Size}} Bytes."
},
{
"id": "app.upload.upload_data.large_image.app_error",
"translation": "{{.Filename}} dimensions ({{.Width}} by {{.Height}} pixels) exceed the limits."
},
{
"id": "app.upload.upload_data.read_file.app_error",
"translation": "Failed to read a file."
},
{
"id": "app.upload.upload_data.save.app_error",
"translation": "Failed to save file info."
},
{
"id": "app.upload.upload_data.update.app_error",
"translation": "Failed to update the upload session."
},
{
"id": "app.user.convert_bot_to_user.app_error",
"translation": "Unable to convert bot to user."
@@ -7074,6 +7150,42 @@
"id": "model.token.is_valid.size",
"translation": "Invalid token."
},
{
"id": "model.upload_session.is_valid.channel_id.app_error",
"translation": "Invalid value for ChannelId."
},
{
"id": "model.upload_session.is_valid.create_at.app_error",
"translation": "Invalid value for CreateAt"
},
{
"id": "model.upload_session.is_valid.file_offset.app_error",
"translation": "Invalid value for FileOffset"
},
{
"id": "model.upload_session.is_valid.file_size.app_error",
"translation": "Invalid value for FileSize"
},
{
"id": "model.upload_session.is_valid.filename.app_error",
"translation": "Invalid value for Filename"
},
{
"id": "model.upload_session.is_valid.id.app_error",
"translation": "Invalid value for Id"
},
{
"id": "model.upload_session.is_valid.path.app_error",
"translation": "Invalid value for Path"
},
{
"id": "model.upload_session.is_valid.type.app_error",
"translation": "Invalid value for Type"
},
{
"id": "model.upload_session.is_valid.user_id.app_error",
"translation": "Invalid Value for UserId"
},
{
"id": "model.user.is_valid.auth_data.app_error",
"translation": "Invalid auth data."

View File

@@ -313,6 +313,14 @@ func (c *Client4) GetFileRoute(fileId string) string {
return fmt.Sprintf(c.GetFilesRoute()+"/%v", fileId)
}
func (c *Client4) GetUploadsRoute() string {
return "/uploads"
}
func (c *Client4) GetUploadRoute(uploadId string) string {
return fmt.Sprintf("%s/%s", c.GetUploadsRoute(), uploadId)
}
func (c *Client4) GetPluginsRoute() string {
return "/plugins"
}
@@ -5531,3 +5539,46 @@ func (c *Client4) CheckIntegrity() ([]IntegrityCheckResult, *Response) {
}
return results, BuildResponse(r)
}
// CreateUpload creates a new upload session.
func (c *Client4) CreateUpload(us *UploadSession) (*UploadSession, *Response) {
r, err := c.DoApiPost(c.GetUploadsRoute(), us.ToJson())
if err != nil {
return nil, BuildErrorResponse(r, err)
}
defer closeBody(r)
return UploadSessionFromJson(r.Body), BuildResponse(r)
}
// GetUpload returns the upload session for the specified uploadId.
func (c *Client4) GetUpload(uploadId string) (*UploadSession, *Response) {
r, err := c.DoApiGet(c.GetUploadRoute(uploadId), "")
if err != nil {
return nil, BuildErrorResponse(r, err)
}
defer closeBody(r)
return UploadSessionFromJson(r.Body), BuildResponse(r)
}
// GetUploadsForUser returns the upload sessions created by the specified
// userId.
func (c *Client4) GetUploadsForUser(userId string) ([]*UploadSession, *Response) {
r, err := c.DoApiGet(c.GetUserRoute(userId)+"/uploads", "")
if err != nil {
return nil, BuildErrorResponse(r, err)
}
defer closeBody(r)
return UploadSessionsFromJson(r.Body), BuildResponse(r)
}
// UploadData performs an upload. On success it returns
// a FileInfo object.
func (c *Client4) UploadData(uploadId string, data io.Reader) (*FileInfo, *Response) {
url := c.GetUploadRoute(uploadId)
r, err := c.doApiRequestReader("POST", c.ApiUrl+url, data, "")
if err != nil {
return nil, BuildErrorResponse(r, err)
}
defer closeBody(r)
return FileInfoFromJson(r.Body), BuildResponse(r)
}

View File

@@ -4,7 +4,6 @@
package model
import (
"bytes"
"encoding/json"
"image"
"image/gif"
@@ -151,10 +150,10 @@ func NewInfo(name string) *FileInfo {
return info
}
func GetInfoForBytes(name string, data []byte) (*FileInfo, *AppError) {
func GetInfoForBytes(name string, data io.ReadSeeker, size int) (*FileInfo, *AppError) {
info := &FileInfo{
Name: name,
Size: int64(len(data)),
Size: int64(size),
}
var err *AppError
@@ -170,16 +169,17 @@ func GetInfoForBytes(name string, data []byte) (*FileInfo, *AppError) {
if info.IsImage() {
// Only set the width and height if it's actually an image that we can understand
if config, _, err := image.DecodeConfig(bytes.NewReader(data)); err == nil {
if config, _, err := image.DecodeConfig(data); err == nil {
info.Width = config.Width
info.Height = config.Height
if info.MimeType == "image/gif" {
// Just show the gif itself instead of a preview image for animated gifs
if gifConfig, err := gif.DecodeAll(bytes.NewReader(data)); err != nil {
data.Seek(0, io.SeekStart)
if gifConfig, err := gif.DecodeAll(data); err != nil {
// Still return the rest of the info even though it doesn't appear to be an actual gif
info.HasPreviewImage = true
return info, NewAppError("GetInfoForBytes", "model.file_info.get.gif.app_error", nil, "name="+name, http.StatusBadRequest)
return info, NewAppError("GetInfoForBytes", "model.file_info.get.gif.app_error", nil, err.Error(), http.StatusBadRequest)
} else {
info.HasPreviewImage = len(gifConfig.Image) == 1
}

View File

@@ -4,6 +4,7 @@
package model
import (
"bytes"
"encoding/base64"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
@@ -185,7 +186,7 @@ func TestGetInfoForFile(t *testing.T) {
for _, tc := range ttc {
t.Run(tc.testName, func(t *testing.T) {
info, errApp := GetInfoForBytes(tc.filename, tc.file)
info, errApp := GetInfoForBytes(tc.filename, bytes.NewReader(tc.file), len(tc.file))
require.Nil(t, errApp)
assert.Equalf(t, tc.filename, info.Name, "Got incorrect filename: %v", info.Name)

141
model/upload_session.go Normal file
View File

@@ -0,0 +1,141 @@
// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved.
// See LICENSE.txt for license information.
package model
import (
"encoding/json"
"fmt"
"io"
"net/http"
)
// UploadType defines the type of an upload.
type UploadType string
const (
UploadTypeAttachment UploadType = "attachment"
UploadTypeImport UploadType = "import"
)
// UploadSession contains information used to keep track of a file upload.
type UploadSession struct {
// The unique identifier for the session.
Id string `json:"id"`
// The type of the upload.
Type UploadType `json:"type"`
// The timestamp of creation.
CreateAt int64 `json:"create_at"`
// The id of the user performing the upload.
UserId string `json:"user_id"`
// The id of the channel to upload to.
ChannelId string `json:"channel_id"`
// The name of the file to upload.
Filename string `json:"filename"`
// The path where the file is stored.
Path string `json:"-"`
// The size of the file to upload.
FileSize int64 `json:"file_size"`
// The amount of received data in bytes. If equal to FileSize it means the
// upload has finished.
FileOffset int64 `json:"file_offset"`
}
// ToJson serializes the UploadSession into JSON and returns it as string.
func (us *UploadSession) ToJson() string {
b, _ := json.Marshal(us)
return string(b)
}
// UploadSessionsToJson serializes a list of UploadSession into JSON and
// returns it as string.
func UploadSessionsToJson(uss []*UploadSession) string {
b, _ := json.Marshal(uss)
return string(b)
}
// UploadSessionsFromJson deserializes a list of UploadSession from JSON data.
func UploadSessionsFromJson(data io.Reader) []*UploadSession {
decoder := json.NewDecoder(data)
var uss []*UploadSession
if err := decoder.Decode(&uss); err != nil {
return nil
}
return uss
}
// UploadSessionFromJson deserializes the UploadSession from JSON data.
func UploadSessionFromJson(data io.Reader) *UploadSession {
decoder := json.NewDecoder(data)
var us UploadSession
if err := decoder.Decode(&us); err != nil {
return nil
}
return &us
}
// PreSave is a utility function used to fill required information.
func (us *UploadSession) PreSave() {
if us.Id == "" {
us.Id = NewId()
}
if us.CreateAt == 0 {
us.CreateAt = GetMillis()
}
}
// IsValid validates an UploadType. It returns an error in case of
// failure.
func (t UploadType) IsValid() error {
switch t {
case UploadTypeAttachment:
return nil
case UploadTypeImport:
return nil
default:
}
return fmt.Errorf("invalid UploadType %s", t)
}
// IsValid validates an UploadSession. It returns an error in case of
// failure.
func (us *UploadSession) IsValid() *AppError {
if !IsValidId(us.Id) {
return NewAppError("UploadSession.IsValid", "model.upload_session.is_valid.id.app_error", nil, "", http.StatusBadRequest)
}
if err := us.Type.IsValid(); err != nil {
return NewAppError("UploadSession.IsValid", "model.upload_session.is_valid.type.app_error", nil, err.Error(), http.StatusBadRequest)
}
if !IsValidId(us.UserId) {
return NewAppError("UploadSession.IsValid", "model.upload_session.is_valid.user_id.app_error", nil, "id="+us.Id, http.StatusBadRequest)
}
if us.Type == UploadTypeAttachment && !IsValidId(us.ChannelId) {
return NewAppError("UploadSession.IsValid", "model.upload_session.is_valid.channel_id.app_error", nil, "id="+us.Id, http.StatusBadRequest)
}
if us.CreateAt == 0 {
return NewAppError("UploadSession.IsValid", "model.upload_session.is_valid.create_at.app_error", nil, "id="+us.Id, http.StatusBadRequest)
}
if us.Filename == "" {
return NewAppError("UploadSession.IsValid", "model.upload_session.is_valid.filename.app_error", nil, "id="+us.Id, http.StatusBadRequest)
}
if us.FileSize <= 0 {
return NewAppError("UploadSession.IsValid", "model.upload_session.is_valid.file_size.app_error", nil, "id="+us.Id, http.StatusBadRequest)
}
if us.FileOffset < 0 || us.FileOffset > us.FileSize {
return NewAppError("UploadSession.IsValid", "model.upload_session.is_valid.file_offset.app_error", nil, "id="+us.Id, http.StatusBadRequest)
}
if us.Path == "" {
return NewAppError("UploadSession.IsValid", "model.upload_session.is_valid.path.app_error", nil, "id="+us.Id, http.StatusBadRequest)
}
return nil
}

View File

@@ -0,0 +1,125 @@
// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved.
// See LICENSE.txt for license information.
package model
import (
"testing"
"github.com/stretchr/testify/require"
)
func TestUploadSessionIsValid(t *testing.T) {
var session UploadSession
t.Run("empty session should fail", func(t *testing.T) {
err := session.IsValid()
require.NotNil(t, err)
})
t.Run("valid session should succeed", func(t *testing.T) {
session = UploadSession{
Id: NewId(),
Type: UploadTypeAttachment,
CreateAt: GetMillis(),
UserId: NewId(),
ChannelId: NewId(),
Filename: "test",
Path: "/tmp/test",
FileSize: 1024,
FileOffset: 0,
}
err := session.IsValid()
require.Nil(t, err)
})
t.Run("invalid Id should fail", func(t *testing.T) {
us := session
us.Id = "invalid"
err := us.IsValid()
require.NotNil(t, err)
require.Equal(t, "model.upload_session.is_valid.id.app_error", err.Id)
})
t.Run("invalid type should fail", func(t *testing.T) {
us := session
us.Type = "invalid"
err := us.IsValid()
require.NotNil(t, err)
require.Equal(t, "model.upload_session.is_valid.type.app_error", err.Id)
})
t.Run("invalid CreateAt should fail", func(t *testing.T) {
us := session
us.CreateAt = 0
err := us.IsValid()
require.NotNil(t, err)
require.Equal(t, "model.upload_session.is_valid.create_at.app_error", err.Id)
})
t.Run("invalid UserId should fail", func(t *testing.T) {
us := session
us.UserId = "invalid"
err := us.IsValid()
require.NotNil(t, err)
require.Equal(t, "model.upload_session.is_valid.user_id.app_error", err.Id)
})
t.Run("invalid ChannelId should fail", func(t *testing.T) {
us := session
us.ChannelId = "invalid"
err := us.IsValid()
require.NotNil(t, err)
require.Equal(t, "model.upload_session.is_valid.channel_id.app_error", err.Id)
})
t.Run("ChannelId is not validated if type is not attachment", func(t *testing.T) {
us := session
us.ChannelId = ""
us.Type = UploadTypeImport
err := us.IsValid()
require.Nil(t, err)
})
t.Run("invalid Filename should fail", func(t *testing.T) {
us := session
us.Filename = ""
err := us.IsValid()
require.NotNil(t, err)
require.Equal(t, "model.upload_session.is_valid.filename.app_error", err.Id)
})
t.Run("invalid Path should fail", func(t *testing.T) {
us := session
us.Path = ""
err := us.IsValid()
require.NotNil(t, err)
require.Equal(t, "model.upload_session.is_valid.path.app_error", err.Id)
})
t.Run("invalid FileSize should fail", func(t *testing.T) {
us := session
us.FileSize = 0
err := us.IsValid()
require.NotNil(t, err)
require.Equal(t, "model.upload_session.is_valid.file_size.app_error", err.Id)
us.FileSize = -1
err = us.IsValid()
require.NotNil(t, err)
require.Equal(t, "model.upload_session.is_valid.file_size.app_error", err.Id)
})
t.Run("invalid FileOffset should fail", func(t *testing.T) {
us := session
us.FileOffset = us.FileSize + 1
err := us.IsValid()
require.NotNil(t, err)
require.Equal(t, "model.upload_session.is_valid.file_offset.app_error", err.Id)
us.FileOffset = -1
err = us.IsValid()
require.NotNil(t, err)
require.Equal(t, "model.upload_session.is_valid.file_offset.app_error", err.Id)
})
}

View File

@@ -24,6 +24,7 @@ type FileBackend interface {
CopyFile(oldPath, newPath string) *model.AppError
MoveFile(oldPath, newPath string) *model.AppError
WriteFile(fr io.Reader, path string) (int64, *model.AppError)
AppendFile(fr io.Reader, path string) (int64, *model.AppError)
RemoveFile(path string) *model.AppError
ListDirectory(path string) (*[]string, *model.AppError)

View File

@@ -308,3 +308,56 @@ func (s *FileBackendTestSuite) TestRemoveDirectory() {
_, err = s.backend.ReadFile("tests2/asdf")
s.Error(err)
}
func (s *FileBackendTestSuite) TestAppendFile() {
s.Run("should fail if target file is missing", func() {
path := "tests/" + model.NewId()
b := make([]byte, 1024)
written, err := s.backend.AppendFile(bytes.NewReader(b), path)
s.Error(err)
s.Zero(written)
})
s.Run("should correctly append the data", func() {
// First part needs to be at least 5MB for the S3 implementation to work.
size := 5 * 1024 * 1024
b := make([]byte, size)
for i := range b {
b[i] = 'A'
}
path := "tests/" + model.NewId()
written, err := s.backend.WriteFile(bytes.NewReader(b), path)
s.Nil(err)
s.EqualValues(len(b), written)
defer s.backend.RemoveFile(path)
b2 := make([]byte, 1024)
for i := range b2 {
b2[i] = 'B'
}
written, err = s.backend.AppendFile(bytes.NewReader(b2), path)
s.Nil(err)
s.EqualValues(int64(len(b2)), written)
read, err := s.backend.ReadFile(path)
s.Nil(err)
s.EqualValues(len(b)+len(b2), len(read))
s.EqualValues(append(b, b2...), read)
b3 := make([]byte, 1024)
for i := range b3 {
b3[i] = 'C'
}
written, err = s.backend.AppendFile(bytes.NewReader(b3), path)
s.Nil(err)
s.EqualValues(int64(len(b3)), written)
read, err = s.backend.ReadFile(path)
s.Nil(err)
s.EqualValues(len(b)+len(b2)+len(b3), len(read))
s.EqualValues(append(append(b, b2...), b3...), read)
})
}

View File

@@ -103,6 +103,23 @@ func writeFileLocally(fr io.Reader, path string) (int64, *model.AppError) {
return written, nil
}
func (b *LocalFileBackend) AppendFile(fr io.Reader, path string) (int64, *model.AppError) {
fp := filepath.Join(b.directory, path)
if _, err := os.Stat(fp); err != nil {
return 0, model.NewAppError("AppendFile", "api.file.append_file.no_exist.app_error", nil, err.Error(), http.StatusInternalServerError)
}
fw, err := os.OpenFile(fp, os.O_WRONLY|os.O_APPEND, 0600)
if err != nil {
return 0, model.NewAppError("AppendFile", "api.file.append_file.opening.app_error", nil, err.Error(), http.StatusInternalServerError)
}
defer fw.Close()
written, err := io.Copy(fw, fr)
if err != nil {
return written, model.NewAppError("AppendFile", "api.file.append_file.writing.app_error", nil, err.Error(), http.StatusInternalServerError)
}
return written, nil
}
func (b *LocalFileBackend) RemoveFile(path string) *model.AppError {
if err := os.Remove(filepath.Join(b.directory, path)); err != nil {
return model.NewAppError("RemoveFile", "utils.file.remove_file.local.app_error", nil, err.Error(), http.StatusInternalServerError)

View File

@@ -19,6 +19,29 @@ type FileBackend struct {
mock.Mock
}
// AppendFile provides a mock function with given fields: fr, path
func (_m *FileBackend) AppendFile(fr io.Reader, path string) (int64, *model.AppError) {
ret := _m.Called(fr, path)
var r0 int64
if rf, ok := ret.Get(0).(func(io.Reader, string) int64); ok {
r0 = rf(fr, path)
} else {
r0 = ret.Get(0).(int64)
}
var r1 *model.AppError
if rf, ok := ret.Get(1).(func(io.Reader, string) *model.AppError); ok {
r1 = rf(fr, path)
} else {
if ret.Get(1) != nil {
r1 = ret.Get(1).(*model.AppError)
}
}
return r0, r1
}
// CopyFile provides a mock function with given fields: oldPath, newPath
func (_m *FileBackend) CopyFile(oldPath string, newPath string) *model.AppError {
ret := _m.Called(oldPath, newPath)

View File

@@ -4,7 +4,6 @@
package filesstore
import (
"bytes"
"context"
"errors"
"io"
@@ -218,12 +217,7 @@ func (b *S3FileBackend) WriteFile(fr io.Reader, path string) (int64, *model.AppE
}
options := s3PutOptions(b.encrypt, contentType)
var buf bytes.Buffer
_, err = buf.ReadFrom(fr)
if err != nil {
return 0, model.NewAppError("WriteFile", "api.file.write_file.s3.app_error", nil, err.Error(), http.StatusInternalServerError)
}
info, err := s3Clnt.PutObject(context.Background(), b.bucket, path, &buf, int64(buf.Len()), options)
info, err := s3Clnt.PutObject(context.Background(), b.bucket, path, fr, -1, options)
if err != nil {
return info.Size, model.NewAppError("WriteFile", "api.file.write_file.s3.app_error", nil, err.Error(), http.StatusInternalServerError)
}
@@ -231,6 +225,65 @@ func (b *S3FileBackend) WriteFile(fr io.Reader, path string) (int64, *model.AppE
return info.Size, nil
}
func (b *S3FileBackend) AppendFile(fr io.Reader, path string) (int64, *model.AppError) {
s3Clnt, err := b.s3New()
if err != nil {
return 0, model.NewAppError("AppendFile", "api.file.append_file.s3.app_error", nil, err.Error(), http.StatusInternalServerError)
}
fp := filepath.Join(b.pathPrefix, path)
if _, err = s3Clnt.StatObject(context.Background(), b.bucket, fp, s3.StatObjectOptions{}); err != nil {
return 0, model.NewAppError("AppendFile", "api.file.append_file.s3.app_error", nil, err.Error(), http.StatusInternalServerError)
}
var contentType string
if ext := filepath.Ext(fp); model.IsFileExtImage(ext) {
contentType = model.GetImageMimeType(ext)
} else {
contentType = "binary/octet-stream"
}
var sse encrypt.ServerSide
if b.encrypt {
sse = encrypt.NewSSE()
}
options := s3.PutObjectOptions{
ContentType: contentType,
ServerSideEncryption: sse,
}
partName := fp + ".part"
info, err := s3Clnt.PutObject(context.Background(), b.bucket, partName, fr, -1, options)
defer s3Clnt.RemoveObject(context.Background(), b.bucket, partName, s3.RemoveObjectOptions{})
if info.Size > 0 {
src1Opts := s3.CopySrcOptions{
Bucket: b.bucket,
Object: fp,
}
src2Opts := s3.CopySrcOptions{
Bucket: b.bucket,
Object: partName,
}
dstOpts := s3.CopyDestOptions{
Bucket: b.bucket,
Object: fp,
Encryption: sse,
}
_, err = s3Clnt.ComposeObject(context.Background(), dstOpts, src1Opts, src2Opts)
if err != nil {
return 0, model.NewAppError("AppendFile", "api.file.append_file.s3.app_error", nil, err.Error(), http.StatusInternalServerError)
}
return info.Size, nil
}
var errString string
if err != nil {
errString = err.Error()
}
return 0, model.NewAppError("AppendFile", "api.file.append_file.s3.app_error", nil, errString, http.StatusInternalServerError)
}
func (b *S3FileBackend) RemoveFile(path string) *model.AppError {
s3Clnt, err := b.s3New()
if err != nil {

View File

@@ -45,6 +45,7 @@ type OpenTracingLayer struct {
TeamStore store.TeamStore
TermsOfServiceStore store.TermsOfServiceStore
TokenStore store.TokenStore
UploadSessionStore store.UploadSessionStore
UserStore store.UserStore
UserAccessTokenStore store.UserAccessTokenStore
UserTermsOfServiceStore store.UserTermsOfServiceStore
@@ -159,6 +160,10 @@ func (s *OpenTracingLayer) Token() store.TokenStore {
return s.TokenStore
}
func (s *OpenTracingLayer) UploadSession() store.UploadSessionStore {
return s.UploadSessionStore
}
func (s *OpenTracingLayer) User() store.UserStore {
return s.UserStore
}
@@ -310,6 +315,11 @@ type OpenTracingLayerTokenStore struct {
Root *OpenTracingLayer
}
type OpenTracingLayerUploadSessionStore struct {
store.UploadSessionStore
Root *OpenTracingLayer
}
type OpenTracingLayerUserStore struct {
store.UserStore
Root *OpenTracingLayer
@@ -7559,6 +7569,96 @@ func (s *OpenTracingLayerTokenStore) Save(recovery *model.Token) error {
return err
}
func (s *OpenTracingLayerUploadSessionStore) Delete(id string) error {
origCtx := s.Root.Store.Context()
span, newCtx := tracing.StartSpanWithParentByContext(s.Root.Store.Context(), "UploadSessionStore.Delete")
s.Root.Store.SetContext(newCtx)
defer func() {
s.Root.Store.SetContext(origCtx)
}()
defer span.Finish()
err := s.UploadSessionStore.Delete(id)
if err != nil {
span.LogFields(spanlog.Error(err))
ext.Error.Set(span, true)
}
return err
}
func (s *OpenTracingLayerUploadSessionStore) Get(id string) (*model.UploadSession, error) {
origCtx := s.Root.Store.Context()
span, newCtx := tracing.StartSpanWithParentByContext(s.Root.Store.Context(), "UploadSessionStore.Get")
s.Root.Store.SetContext(newCtx)
defer func() {
s.Root.Store.SetContext(origCtx)
}()
defer span.Finish()
result, err := s.UploadSessionStore.Get(id)
if err != nil {
span.LogFields(spanlog.Error(err))
ext.Error.Set(span, true)
}
return result, err
}
func (s *OpenTracingLayerUploadSessionStore) GetForUser(userId string) ([]*model.UploadSession, error) {
origCtx := s.Root.Store.Context()
span, newCtx := tracing.StartSpanWithParentByContext(s.Root.Store.Context(), "UploadSessionStore.GetForUser")
s.Root.Store.SetContext(newCtx)
defer func() {
s.Root.Store.SetContext(origCtx)
}()
defer span.Finish()
result, err := s.UploadSessionStore.GetForUser(userId)
if err != nil {
span.LogFields(spanlog.Error(err))
ext.Error.Set(span, true)
}
return result, err
}
func (s *OpenTracingLayerUploadSessionStore) Save(session *model.UploadSession) (*model.UploadSession, error) {
origCtx := s.Root.Store.Context()
span, newCtx := tracing.StartSpanWithParentByContext(s.Root.Store.Context(), "UploadSessionStore.Save")
s.Root.Store.SetContext(newCtx)
defer func() {
s.Root.Store.SetContext(origCtx)
}()
defer span.Finish()
result, err := s.UploadSessionStore.Save(session)
if err != nil {
span.LogFields(spanlog.Error(err))
ext.Error.Set(span, true)
}
return result, err
}
func (s *OpenTracingLayerUploadSessionStore) Update(session *model.UploadSession) error {
origCtx := s.Root.Store.Context()
span, newCtx := tracing.StartSpanWithParentByContext(s.Root.Store.Context(), "UploadSessionStore.Update")
s.Root.Store.SetContext(newCtx)
defer func() {
s.Root.Store.SetContext(origCtx)
}()
defer span.Finish()
err := s.UploadSessionStore.Update(session)
if err != nil {
span.LogFields(spanlog.Error(err))
ext.Error.Set(span, true)
}
return err
}
func (s *OpenTracingLayerUserStore) AnalyticsActiveCount(time int64, options model.UserCountOptions) (int64, *model.AppError) {
origCtx := s.Root.Store.Context()
span, newCtx := tracing.StartSpanWithParentByContext(s.Root.Store.Context(), "UserStore.AnalyticsActiveCount")
@@ -9494,6 +9594,7 @@ func New(childStore store.Store, ctx context.Context) *OpenTracingLayer {
newStore.TeamStore = &OpenTracingLayerTeamStore{TeamStore: childStore.Team(), Root: &newStore}
newStore.TermsOfServiceStore = &OpenTracingLayerTermsOfServiceStore{TermsOfServiceStore: childStore.TermsOfService(), Root: &newStore}
newStore.TokenStore = &OpenTracingLayerTokenStore{TokenStore: childStore.Token(), Root: &newStore}
newStore.UploadSessionStore = &OpenTracingLayerUploadSessionStore{UploadSessionStore: childStore.UploadSession(), Root: &newStore}
newStore.UserStore = &OpenTracingLayerUserStore{UserStore: childStore.User(), Root: &newStore}
newStore.UserAccessTokenStore = &OpenTracingLayerUserAccessTokenStore{UserAccessTokenStore: childStore.UserAccessToken(), Root: &newStore}
newStore.UserTermsOfServiceStore = &OpenTracingLayerUserTermsOfServiceStore{UserTermsOfServiceStore: childStore.UserTermsOfService(), Root: &newStore}

View File

@@ -47,6 +47,7 @@ type RetryLayer struct {
TeamStore store.TeamStore
TermsOfServiceStore store.TermsOfServiceStore
TokenStore store.TokenStore
UploadSessionStore store.UploadSessionStore
UserStore store.UserStore
UserAccessTokenStore store.UserAccessTokenStore
UserTermsOfServiceStore store.UserTermsOfServiceStore
@@ -161,6 +162,10 @@ func (s *RetryLayer) Token() store.TokenStore {
return s.TokenStore
}
func (s *RetryLayer) UploadSession() store.UploadSessionStore {
return s.UploadSessionStore
}
func (s *RetryLayer) User() store.UserStore {
return s.UserStore
}
@@ -312,6 +317,11 @@ type RetryLayerTokenStore struct {
Root *RetryLayer
}
type RetryLayerUploadSessionStore struct {
store.UploadSessionStore
Root *RetryLayer
}
type RetryLayerUserStore struct {
store.UserStore
Root *RetryLayer
@@ -6542,6 +6552,106 @@ func (s *RetryLayerTokenStore) Save(recovery *model.Token) error {
}
func (s *RetryLayerUploadSessionStore) Delete(id string) error {
tries := 0
for {
err := s.UploadSessionStore.Delete(id)
if err == nil {
return nil
}
if !isRepeatableError(err) {
return err
}
tries++
if tries >= 3 {
err = errors.Wrap(err, "giving up after 3 consecutive repeatable transaction failures")
return err
}
}
}
func (s *RetryLayerUploadSessionStore) Get(id string) (*model.UploadSession, error) {
tries := 0
for {
result, err := s.UploadSessionStore.Get(id)
if err == nil {
return result, nil
}
if !isRepeatableError(err) {
return result, err
}
tries++
if tries >= 3 {
err = errors.Wrap(err, "giving up after 3 consecutive repeatable transaction failures")
return result, err
}
}
}
func (s *RetryLayerUploadSessionStore) GetForUser(userId string) ([]*model.UploadSession, error) {
tries := 0
for {
result, err := s.UploadSessionStore.GetForUser(userId)
if err == nil {
return result, nil
}
if !isRepeatableError(err) {
return result, err
}
tries++
if tries >= 3 {
err = errors.Wrap(err, "giving up after 3 consecutive repeatable transaction failures")
return result, err
}
}
}
func (s *RetryLayerUploadSessionStore) Save(session *model.UploadSession) (*model.UploadSession, error) {
tries := 0
for {
result, err := s.UploadSessionStore.Save(session)
if err == nil {
return result, nil
}
if !isRepeatableError(err) {
return result, err
}
tries++
if tries >= 3 {
err = errors.Wrap(err, "giving up after 3 consecutive repeatable transaction failures")
return result, err
}
}
}
func (s *RetryLayerUploadSessionStore) Update(session *model.UploadSession) error {
tries := 0
for {
err := s.UploadSessionStore.Update(session)
if err == nil {
return nil
}
if !isRepeatableError(err) {
return err
}
tries++
if tries >= 3 {
err = errors.Wrap(err, "giving up after 3 consecutive repeatable transaction failures")
return err
}
}
}
func (s *RetryLayerUserStore) AnalyticsActiveCount(time int64, options model.UserCountOptions) (int64, *model.AppError) {
return s.UserStore.AnalyticsActiveCount(time, options)
@@ -7782,6 +7892,7 @@ func New(childStore store.Store) *RetryLayer {
newStore.TeamStore = &RetryLayerTeamStore{TeamStore: childStore.Team(), Root: &newStore}
newStore.TermsOfServiceStore = &RetryLayerTermsOfServiceStore{TermsOfServiceStore: childStore.TermsOfService(), Root: &newStore}
newStore.TokenStore = &RetryLayerTokenStore{TokenStore: childStore.Token(), Root: &newStore}
newStore.UploadSessionStore = &RetryLayerUploadSessionStore{UploadSessionStore: childStore.UploadSession(), Root: &newStore}
newStore.UserStore = &RetryLayerUserStore{UserStore: childStore.User(), Root: &newStore}
newStore.UserAccessTokenStore = &RetryLayerUserAccessTokenStore{UserAccessTokenStore: childStore.UserAccessToken(), Root: &newStore}
newStore.UserTermsOfServiceStore = &RetryLayerUserTermsOfServiceStore{UserTermsOfServiceStore: childStore.UserTermsOfService(), Root: &newStore}

View File

@@ -25,6 +25,7 @@ func genStore() *mocks.Store {
mock.On("Compliance").Return(&mocks.ComplianceStore{})
mock.On("Emoji").Return(&mocks.EmojiStore{})
mock.On("FileInfo").Return(&mocks.FileInfoStore{})
mock.On("UploadSession").Return(&mocks.UploadSessionStore{})
mock.On("Group").Return(&mocks.GroupStore{})
mock.On("Job").Return(&mocks.JobStore{})
mock.On("License").Return(&mocks.LicenseStore{})

View File

@@ -92,6 +92,7 @@ type SqlStore interface {
Emoji() store.EmojiStore
Status() store.StatusStore
FileInfo() store.FileInfoStore
UploadSession() store.UploadSessionStore
Reaction() store.ReactionStore
Job() store.JobStore
Plugin() store.PluginStore

View File

@@ -89,6 +89,7 @@ type SqlSupplierStores struct {
emoji store.EmojiStore
status store.StatusStore
fileInfo store.FileInfoStore
uploadSession store.UploadSessionStore
reaction store.ReactionStore
job store.JobStore
userAccessToken store.UserAccessTokenStore
@@ -157,6 +158,7 @@ func NewSqlSupplier(settings model.SqlSettings, metrics einterfaces.MetricsInter
supplier.stores.emoji = newSqlEmojiStore(supplier, metrics)
supplier.stores.status = newSqlStatusStore(supplier)
supplier.stores.fileInfo = newSqlFileInfoStore(supplier, metrics)
supplier.stores.uploadSession = newSqlUploadSessionStore(supplier)
supplier.stores.job = newSqlJobStore(supplier)
supplier.stores.userAccessToken = newSqlUserAccessTokenStore(supplier)
supplier.stores.channelMemberHistory = newSqlChannelMemberHistoryStore(supplier)
@@ -202,6 +204,7 @@ func NewSqlSupplier(settings model.SqlSettings, metrics einterfaces.MetricsInter
supplier.stores.emoji.(*SqlEmojiStore).createIndexesIfNotExists()
supplier.stores.status.(*SqlStatusStore).createIndexesIfNotExists()
supplier.stores.fileInfo.(*SqlFileInfoStore).createIndexesIfNotExists()
supplier.stores.uploadSession.(*SqlUploadSessionStore).createIndexesIfNotExists()
supplier.stores.job.(*SqlJobStore).createIndexesIfNotExists()
supplier.stores.userAccessToken.(*SqlUserAccessTokenStore).createIndexesIfNotExists()
supplier.stores.plugin.(*SqlPluginStore).createIndexesIfNotExists()
@@ -1134,6 +1137,10 @@ func (ss *SqlSupplier) FileInfo() store.FileInfoStore {
return ss.stores.fileInfo
}
func (ss *SqlSupplier) UploadSession() store.UploadSessionStore {
return ss.stores.uploadSession
}
func (ss *SqlSupplier) Reaction() store.ReactionStore {
return ss.stores.reaction
}

View File

@@ -0,0 +1,133 @@
// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved.
// See LICENSE.txt for license information.
package sqlstore
import (
"database/sql"
"github.com/pkg/errors"
"github.com/mattermost/mattermost-server/v5/model"
"github.com/mattermost/mattermost-server/v5/store"
sq "github.com/Masterminds/squirrel"
)
type SqlUploadSessionStore struct {
SqlStore
}
func newSqlUploadSessionStore(sqlStore SqlStore) store.UploadSessionStore {
s := &SqlUploadSessionStore{
SqlStore: sqlStore,
}
for _, db := range sqlStore.GetAllConns() {
table := db.AddTableWithName(model.UploadSession{}, "UploadSessions").SetKeys(false, "Id")
table.ColMap("Id").SetMaxSize(26)
table.ColMap("Type").SetMaxSize(32)
table.ColMap("UserId").SetMaxSize(26)
table.ColMap("ChannelId").SetMaxSize(26)
table.ColMap("Filename").SetMaxSize(256)
table.ColMap("Path").SetMaxSize(512)
}
return s
}
func (us SqlUploadSessionStore) createIndexesIfNotExists() {
us.CreateIndexIfNotExists("idx_uploadsessions_user_id", "UploadSessions", "Type")
us.CreateIndexIfNotExists("idx_uploadsessions_create_at", "UploadSessions", "CreateAt")
us.CreateIndexIfNotExists("idx_uploadsessions_user_id", "UploadSessions", "UserId")
}
func (us SqlUploadSessionStore) Save(session *model.UploadSession) (*model.UploadSession, error) {
if session == nil {
return nil, errors.New("SqlUploadSessionStore.Save: session should not be nil")
}
session.PreSave()
if err := session.IsValid(); err != nil {
return nil, errors.Wrap(err, "SqlUploadSessionStore.Save: validation failed")
}
if err := us.GetMaster().Insert(session); err != nil {
return nil, errors.Wrap(err, "SqlUploadSessionStore.Save: failed to insert")
}
return session, nil
}
func (us SqlUploadSessionStore) Update(session *model.UploadSession) error {
if session == nil {
return errors.New("SqlUploadSessionStore.Update: session should not be nil")
}
if err := session.IsValid(); err != nil {
return errors.Wrap(err, "SqlUploadSessionStore.Update: validation failed")
}
if _, err := us.GetMaster().Update(session); err != nil {
if err == sql.ErrNoRows {
return store.NewErrNotFound("UploadSession", session.Id)
}
return errors.Wrapf(err, "SqlUploadSessionStore.Update: failed to update session with id=%s", session.Id)
}
return nil
}
func (us SqlUploadSessionStore) Get(id string) (*model.UploadSession, error) {
if !model.IsValidId(id) {
return nil, errors.New("SqlUploadSessionStore.Get: id is not valid")
}
query := us.getQueryBuilder().
Select("*").
From("UploadSessions").
Where(sq.Eq{"Id": id})
queryString, args, err := query.ToSql()
if err != nil {
return nil, errors.Wrap(err, "SqlUploadSessionStore.Get: failed to build query")
}
var session model.UploadSession
if err := us.GetReplica().SelectOne(&session, queryString, args...); err != nil {
if err == sql.ErrNoRows {
return nil, store.NewErrNotFound("UploadSession", id)
}
return nil, errors.Wrapf(err, "SqlUploadSessionStore.Get: failed to select session with id=%s", id)
}
return &session, nil
}
func (us SqlUploadSessionStore) GetForUser(userId string) ([]*model.UploadSession, error) {
if !model.IsValidId(userId) {
return nil, errors.New("SqlUploadSessionStore.GetForUser: userId is not valid")
}
query := us.getQueryBuilder().
Select("*").
From("UploadSessions").
Where(sq.Eq{"UserId": userId}).
OrderBy("CreateAt ASC")
queryString, args, err := query.ToSql()
if err != nil {
return nil, errors.Wrap(err, "SqlUploadSessionStore.GetForUser: failed to build query")
}
var sessions []*model.UploadSession
if _, err := us.GetReplica().Select(&sessions, queryString, args...); err != nil {
return nil, errors.Wrap(err, "SqlUploadSessionStore.GetForUser: failed to select")
}
return sessions, nil
}
func (us SqlUploadSessionStore) Delete(id string) error {
if !model.IsValidId(id) {
return errors.New("SqlUploadSessionStore.Delete: id is not valid")
}
query := us.getQueryBuilder().
Delete("UploadSessions").
Where(sq.Eq{"Id": id})
queryString, args, err := query.ToSql()
if err != nil {
return errors.Wrap(err, "SqlUploadSessionStore.Delete: failed to build query")
}
if _, err := us.GetMaster().Exec(queryString, args...); err != nil {
return errors.Wrap(err, "SqlUploadSessionStore.Delete: failed to delete")
}
return nil
}

View File

@@ -0,0 +1,14 @@
// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved.
// See LICENSE.txt for license information.
package sqlstore
import (
"testing"
"github.com/mattermost/mattermost-server/v5/store/storetest"
)
func TestUploadSessionStore(t *testing.T) {
StoreTest(t, storetest.TestUploadSessionStore)
}

View File

@@ -41,6 +41,7 @@ type Store interface {
Emoji() EmojiStore
Status() StatusStore
FileInfo() FileInfoStore
UploadSession() UploadSessionStore
Reaction() ReactionStore
Role() RoleStore
Scheme() SchemeStore
@@ -549,6 +550,14 @@ type FileInfoStore interface {
ClearCaches()
}
type UploadSessionStore interface {
Save(session *model.UploadSession) (*model.UploadSession, error)
Update(session *model.UploadSession) error
Get(id string) (*model.UploadSession, error)
GetForUser(userId string) ([]*model.UploadSession, error)
Delete(id string) error
}
type ReactionStore interface {
Save(reaction *model.Reaction) (*model.Reaction, error)
Delete(reaction *model.Reaction) (*model.Reaction, error)

View File

@@ -845,6 +845,22 @@ func (_m *SqlStore) UnlockFromMaster() {
_m.Called()
}
// UploadSession provides a mock function with given fields:
func (_m *SqlStore) UploadSession() store.UploadSessionStore {
ret := _m.Called()
var r0 store.UploadSessionStore
if rf, ok := ret.Get(0).(func() store.UploadSessionStore); ok {
r0 = rf()
} else {
if ret.Get(0) != nil {
r0 = ret.Get(0).(store.UploadSessionStore)
}
}
return r0
}
// User provides a mock function with given fields:
func (_m *SqlStore) User() store.UserStore {
ret := _m.Called()

View File

@@ -596,6 +596,22 @@ func (_m *Store) UnlockFromMaster() {
_m.Called()
}
// UploadSession provides a mock function with given fields:
func (_m *Store) UploadSession() store.UploadSessionStore {
ret := _m.Called()
var r0 store.UploadSessionStore
if rf, ok := ret.Get(0).(func() store.UploadSessionStore); ok {
r0 = rf()
} else {
if ret.Get(0) != nil {
r0 = ret.Get(0).(store.UploadSessionStore)
}
}
return r0
}
// User provides a mock function with given fields:
func (_m *Store) User() store.UserStore {
ret := _m.Called()

View File

@@ -0,0 +1,112 @@
// Code generated by mockery v1.0.0. DO NOT EDIT.
// Regenerate this file using `make store-mocks`.
package mocks
import (
model "github.com/mattermost/mattermost-server/v5/model"
mock "github.com/stretchr/testify/mock"
)
// UploadSessionStore is an autogenerated mock type for the UploadSessionStore type
type UploadSessionStore struct {
mock.Mock
}
// Delete provides a mock function with given fields: id
func (_m *UploadSessionStore) Delete(id string) error {
ret := _m.Called(id)
var r0 error
if rf, ok := ret.Get(0).(func(string) error); ok {
r0 = rf(id)
} else {
r0 = ret.Error(0)
}
return r0
}
// Get provides a mock function with given fields: id
func (_m *UploadSessionStore) Get(id string) (*model.UploadSession, error) {
ret := _m.Called(id)
var r0 *model.UploadSession
if rf, ok := ret.Get(0).(func(string) *model.UploadSession); ok {
r0 = rf(id)
} else {
if ret.Get(0) != nil {
r0 = ret.Get(0).(*model.UploadSession)
}
}
var r1 error
if rf, ok := ret.Get(1).(func(string) error); ok {
r1 = rf(id)
} else {
r1 = ret.Error(1)
}
return r0, r1
}
// GetForUser provides a mock function with given fields: userId
func (_m *UploadSessionStore) GetForUser(userId string) ([]*model.UploadSession, error) {
ret := _m.Called(userId)
var r0 []*model.UploadSession
if rf, ok := ret.Get(0).(func(string) []*model.UploadSession); ok {
r0 = rf(userId)
} else {
if ret.Get(0) != nil {
r0 = ret.Get(0).([]*model.UploadSession)
}
}
var r1 error
if rf, ok := ret.Get(1).(func(string) error); ok {
r1 = rf(userId)
} else {
r1 = ret.Error(1)
}
return r0, r1
}
// Save provides a mock function with given fields: session
func (_m *UploadSessionStore) Save(session *model.UploadSession) (*model.UploadSession, error) {
ret := _m.Called(session)
var r0 *model.UploadSession
if rf, ok := ret.Get(0).(func(*model.UploadSession) *model.UploadSession); ok {
r0 = rf(session)
} else {
if ret.Get(0) != nil {
r0 = ret.Get(0).(*model.UploadSession)
}
}
var r1 error
if rf, ok := ret.Get(1).(func(*model.UploadSession) error); ok {
r1 = rf(session)
} else {
r1 = ret.Error(1)
}
return r0, r1
}
// Update provides a mock function with given fields: session
func (_m *UploadSessionStore) Update(session *model.UploadSession) error {
ret := _m.Called(session)
var r0 error
if rf, ok := ret.Get(0).(func(*model.UploadSession) error); ok {
r0 = rf(session)
} else {
r0 = ret.Error(0)
}
return r0
}

View File

@@ -35,6 +35,7 @@ type Store struct {
EmojiStore mocks.EmojiStore
StatusStore mocks.StatusStore
FileInfoStore mocks.FileInfoStore
UploadSessionStore mocks.UploadSessionStore
ReactionStore mocks.ReactionStore
JobStore mocks.JobStore
UserAccessTokenStore mocks.UserAccessTokenStore
@@ -71,6 +72,7 @@ func (s *Store) Token() store.TokenStore { return &s.T
func (s *Store) Emoji() store.EmojiStore { return &s.EmojiStore }
func (s *Store) Status() store.StatusStore { return &s.StatusStore }
func (s *Store) FileInfo() store.FileInfoStore { return &s.FileInfoStore }
func (s *Store) UploadSession() store.UploadSessionStore { return &s.UploadSessionStore }
func (s *Store) Reaction() store.ReactionStore { return &s.ReactionStore }
func (s *Store) Job() store.JobStore { return &s.JobStore }
func (s *Store) UserAccessToken() store.UserAccessTokenStore { return &s.UserAccessTokenStore }
@@ -121,6 +123,7 @@ func (s *Store) AssertExpectations(t mock.TestingT) bool {
&s.EmojiStore,
&s.StatusStore,
&s.FileInfoStore,
&s.UploadSessionStore,
&s.ReactionStore,
&s.JobStore,
&s.UserAccessTokenStore,

View File

@@ -0,0 +1,213 @@
// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved.
// See LICENSE.txt for license information.
package storetest
import (
"testing"
"time"
"github.com/mattermost/mattermost-server/v5/model"
"github.com/mattermost/mattermost-server/v5/store"
"github.com/stretchr/testify/require"
)
func TestUploadSessionStore(t *testing.T, ss store.Store) {
t.Run("UploadSessionStoreSaveGet", func(t *testing.T) { testUploadSessionStoreSaveGet(t, ss) })
t.Run("UploadSessionStoreUpdate", func(t *testing.T) { testUploadSessionStoreUpdate(t, ss) })
t.Run("UploadSessionStoreGetForUser", func(t *testing.T) { testUploadSessionStoreGetForUser(t, ss) })
t.Run("UploadSessionStoreDelete", func(t *testing.T) { testUploadSessionStoreDelete(t, ss) })
}
func testUploadSessionStoreSaveGet(t *testing.T, ss store.Store) {
var session *model.UploadSession
t.Run("saving nil session should fail", func(t *testing.T) {
us, err := ss.UploadSession().Save(nil)
require.Error(t, err)
require.Nil(t, us)
})
t.Run("saving empty session should fail", func(t *testing.T) {
session = &model.UploadSession{}
us, err := ss.UploadSession().Save(session)
require.Error(t, err)
require.Nil(t, us)
})
t.Run("saving valid session should succeed", func(t *testing.T) {
session = &model.UploadSession{
Type: model.UploadTypeAttachment,
UserId: model.NewId(),
ChannelId: model.NewId(),
Filename: "test",
FileSize: 1024,
Path: "/tmp/test",
}
us, err := ss.UploadSession().Save(session)
require.NoError(t, err)
require.NotNil(t, us)
require.NotEmpty(t, us)
})
t.Run("getting non-existing session should fail", func(t *testing.T) {
us, err := ss.UploadSession().Get("fake")
require.Error(t, err)
require.Nil(t, us)
})
t.Run("getting existing session should succeed", func(t *testing.T) {
us, err := ss.UploadSession().Get(session.Id)
require.NoError(t, err)
require.NotNil(t, us)
require.Equal(t, session, us)
})
}
func testUploadSessionStoreUpdate(t *testing.T, ss store.Store) {
session := &model.UploadSession{
Type: model.UploadTypeAttachment,
UserId: model.NewId(),
ChannelId: model.NewId(),
Filename: "test",
FileSize: 1024,
Path: "/tmp/test",
}
t.Run("updating nil session should fail", func(t *testing.T) {
err := ss.UploadSession().Update(nil)
require.Error(t, err)
})
t.Run("updating invalid session should fail", func(t *testing.T) {
err := ss.UploadSession().Update(&model.UploadSession{})
require.Error(t, err)
})
t.Run("updating non-existing session should fail", func(t *testing.T) {
err := ss.UploadSession().Update(&model.UploadSession{})
require.Error(t, err)
})
t.Run("updating existing session should succeed", func(t *testing.T) {
us, err := ss.UploadSession().Save(session)
require.NoError(t, err)
require.NotNil(t, us)
require.NotEmpty(t, us)
us.FileOffset = 512
err = ss.UploadSession().Update(us)
require.NoError(t, err)
updated, err := ss.UploadSession().Get(us.Id)
require.NoError(t, err)
require.NotNil(t, us)
require.Equal(t, us, updated)
})
}
func testUploadSessionStoreGetForUser(t *testing.T, ss store.Store) {
userId := model.NewId()
sessions := []*model.UploadSession{
{
Type: model.UploadTypeAttachment,
UserId: userId,
ChannelId: model.NewId(),
Filename: "test0",
FileSize: 1024,
Path: "/tmp/test0",
},
{
Type: model.UploadTypeAttachment,
UserId: model.NewId(),
ChannelId: model.NewId(),
Filename: "test1",
FileSize: 1024,
Path: "/tmp/test1",
},
{
Type: model.UploadTypeAttachment,
UserId: userId,
ChannelId: model.NewId(),
Filename: "test2",
FileSize: 1024,
Path: "/tmp/test2",
},
{
Type: model.UploadTypeAttachment,
UserId: userId,
ChannelId: model.NewId(),
Filename: "test3",
FileSize: 1024,
Path: "/tmp/test3",
},
}
t.Run("getting invalid userId should fail", func(t *testing.T) {
us, err := ss.UploadSession().GetForUser("invalidId")
require.Error(t, err)
require.Nil(t, us)
})
t.Run("should return no sessions", func(t *testing.T) {
us, err := ss.UploadSession().GetForUser(userId)
require.NoError(t, err)
require.NotNil(t, us)
require.Empty(t, us)
})
for i := 0; i < len(sessions); i++ {
us, err := ss.UploadSession().Save(sessions[i])
require.NoError(t, err)
require.NotNil(t, us)
require.NotEmpty(t, us)
// We need this to make sure the ordering is consistent.
time.Sleep(1 * time.Millisecond)
}
t.Run("should return existing sessions", func(t *testing.T) {
us, err := ss.UploadSession().GetForUser(userId)
require.NoError(t, err)
require.NotNil(t, us)
require.NotEmpty(t, us)
require.Len(t, us, 3)
require.Equal(t, sessions[0], us[0])
require.Equal(t, sessions[2], us[1])
require.Equal(t, sessions[3], us[2])
})
}
func testUploadSessionStoreDelete(t *testing.T, ss store.Store) {
session := &model.UploadSession{
Id: model.NewId(),
Type: model.UploadTypeAttachment,
UserId: model.NewId(),
ChannelId: model.NewId(),
Filename: "test",
FileSize: 1024,
Path: "/tmp/test",
}
t.Run("deleting invalid id should fail", func(t *testing.T) {
err := ss.UploadSession().Delete("invalidId")
require.Error(t, err)
})
t.Run("deleting existing session should succeed", func(t *testing.T) {
us, err := ss.UploadSession().Save(session)
require.NoError(t, err)
require.NotNil(t, us)
require.NotEmpty(t, us)
err = ss.UploadSession().Delete(session.Id)
require.NoError(t, err)
us, err = ss.UploadSession().Get(us.Id)
require.Error(t, err)
require.Nil(t, us)
require.IsType(t, &store.ErrNotFound{}, err)
})
}

View File

@@ -45,6 +45,7 @@ type TimerLayer struct {
TeamStore store.TeamStore
TermsOfServiceStore store.TermsOfServiceStore
TokenStore store.TokenStore
UploadSessionStore store.UploadSessionStore
UserStore store.UserStore
UserAccessTokenStore store.UserAccessTokenStore
UserTermsOfServiceStore store.UserTermsOfServiceStore
@@ -159,6 +160,10 @@ func (s *TimerLayer) Token() store.TokenStore {
return s.TokenStore
}
func (s *TimerLayer) UploadSession() store.UploadSessionStore {
return s.UploadSessionStore
}
func (s *TimerLayer) User() store.UserStore {
return s.UserStore
}
@@ -310,6 +315,11 @@ type TimerLayerTokenStore struct {
Root *TimerLayer
}
type TimerLayerUploadSessionStore struct {
store.UploadSessionStore
Root *TimerLayer
}
type TimerLayerUserStore struct {
store.UserStore
Root *TimerLayer
@@ -6825,6 +6835,86 @@ func (s *TimerLayerTokenStore) Save(recovery *model.Token) error {
return err
}
func (s *TimerLayerUploadSessionStore) Delete(id string) error {
start := timemodule.Now()
err := s.UploadSessionStore.Delete(id)
elapsed := float64(timemodule.Since(start)) / float64(timemodule.Second)
if s.Root.Metrics != nil {
success := "false"
if err == nil {
success = "true"
}
s.Root.Metrics.ObserveStoreMethodDuration("UploadSessionStore.Delete", success, elapsed)
}
return err
}
func (s *TimerLayerUploadSessionStore) Get(id string) (*model.UploadSession, error) {
start := timemodule.Now()
result, err := s.UploadSessionStore.Get(id)
elapsed := float64(timemodule.Since(start)) / float64(timemodule.Second)
if s.Root.Metrics != nil {
success := "false"
if err == nil {
success = "true"
}
s.Root.Metrics.ObserveStoreMethodDuration("UploadSessionStore.Get", success, elapsed)
}
return result, err
}
func (s *TimerLayerUploadSessionStore) GetForUser(userId string) ([]*model.UploadSession, error) {
start := timemodule.Now()
result, err := s.UploadSessionStore.GetForUser(userId)
elapsed := float64(timemodule.Since(start)) / float64(timemodule.Second)
if s.Root.Metrics != nil {
success := "false"
if err == nil {
success = "true"
}
s.Root.Metrics.ObserveStoreMethodDuration("UploadSessionStore.GetForUser", success, elapsed)
}
return result, err
}
func (s *TimerLayerUploadSessionStore) Save(session *model.UploadSession) (*model.UploadSession, error) {
start := timemodule.Now()
result, err := s.UploadSessionStore.Save(session)
elapsed := float64(timemodule.Since(start)) / float64(timemodule.Second)
if s.Root.Metrics != nil {
success := "false"
if err == nil {
success = "true"
}
s.Root.Metrics.ObserveStoreMethodDuration("UploadSessionStore.Save", success, elapsed)
}
return result, err
}
func (s *TimerLayerUploadSessionStore) Update(session *model.UploadSession) error {
start := timemodule.Now()
err := s.UploadSessionStore.Update(session)
elapsed := float64(timemodule.Since(start)) / float64(timemodule.Second)
if s.Root.Metrics != nil {
success := "false"
if err == nil {
success = "true"
}
s.Root.Metrics.ObserveStoreMethodDuration("UploadSessionStore.Update", success, elapsed)
}
return err
}
func (s *TimerLayerUserStore) AnalyticsActiveCount(time int64, options model.UserCountOptions) (int64, *model.AppError) {
start := timemodule.Now()
@@ -8588,6 +8678,7 @@ func New(childStore store.Store, metrics einterfaces.MetricsInterface) *TimerLay
newStore.TeamStore = &TimerLayerTeamStore{TeamStore: childStore.Team(), Root: &newStore}
newStore.TermsOfServiceStore = &TimerLayerTermsOfServiceStore{TermsOfServiceStore: childStore.TermsOfService(), Root: &newStore}
newStore.TokenStore = &TimerLayerTokenStore{TokenStore: childStore.Token(), Root: &newStore}
newStore.UploadSessionStore = &TimerLayerUploadSessionStore{UploadSessionStore: childStore.UploadSession(), Root: &newStore}
newStore.UserStore = &TimerLayerUserStore{UserStore: childStore.User(), Root: &newStore}
newStore.UserAccessTokenStore = &TimerLayerUserAccessTokenStore{UserAccessTokenStore: childStore.UserAccessToken(), Root: &newStore}
newStore.UserTermsOfServiceStore = &TimerLayerUserTermsOfServiceStore{UserTermsOfServiceStore: childStore.UserTermsOfService(), Root: &newStore}

View File

@@ -386,6 +386,18 @@ func (c *Context) RequireFileId() *Context {
return c
}
func (c *Context) RequireUploadId() *Context {
if c.Err != nil {
return c
}
if !model.IsValidId(c.Params.UploadId) {
c.SetInvalidUrlParam("upload_id")
}
return c
}
func (c *Context) RequireFilename() *Context {
if c.Err != nil {
return c

View File

@@ -31,6 +31,7 @@ type Params struct {
PostId string
FileId string
Filename string
UploadId string
PluginId string
CommandId string
HookId string
@@ -123,6 +124,10 @@ func ParamsFromRequest(r *http.Request) *Params {
params.Filename = query.Get("filename")
if val, ok := props["upload_id"]; ok {
params.UploadId = val
}
if val, ok := props["plugin_id"]; ok {
params.PluginId = val
}