mirror of
https://github.com/mattermost/mattermost.git
synced 2025-02-25 18:55:24 -06:00
fixed initialism errors in accordance with golang conventions in 7 files. (#24287)
This commit is contained in:
parent
4ed3dc81bc
commit
2e449413e8
@ -4189,7 +4189,7 @@ func TestImportImportEmoji(t *testing.T) {
|
|||||||
data = imports.EmojiImportData{Name: ptrStr(model.NewId()), Image: ptrStr(largeImage)}
|
data = imports.EmojiImportData{Name: ptrStr(model.NewId()), Image: ptrStr(largeImage)}
|
||||||
appErr = th.App.importEmoji(th.Context, &data, false)
|
appErr = th.App.importEmoji(th.Context, &data, false)
|
||||||
require.NotNil(t, appErr)
|
require.NotNil(t, appErr)
|
||||||
require.ErrorIs(t, appErr.Unwrap(), utils.SizeLimitExceeded)
|
require.ErrorIs(t, appErr.Unwrap(), utils.ErrSizeLimitExceeded)
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestImportAttachment(t *testing.T) {
|
func TestImportAttachment(t *testing.T) {
|
||||||
|
@ -2473,7 +2473,7 @@ func TestGetLinkMetadata(t *testing.T) {
|
|||||||
assert.Nil(t, img)
|
assert.Nil(t, img)
|
||||||
assert.Error(t, err)
|
assert.Error(t, err)
|
||||||
assert.IsType(t, &url.Error{}, err)
|
assert.IsType(t, &url.Error{}, err)
|
||||||
assert.Equal(t, httpservice.AddressForbidden, err.(*url.Error).Err)
|
assert.Equal(t, httpservice.ErrAddressForbidden, err.(*url.Error).Err)
|
||||||
|
|
||||||
requestURL = th.App.GetSiteURL() + "/api/v4/image?url=" + url.QueryEscape(requestURL)
|
requestURL = th.App.GetSiteURL() + "/api/v4/image?url=" + url.QueryEscape(requestURL)
|
||||||
|
|
||||||
|
@ -8,7 +8,7 @@ import (
|
|||||||
"io"
|
"io"
|
||||||
)
|
)
|
||||||
|
|
||||||
var SizeLimitExceeded = errors.New("Size limit exceeded")
|
var ErrSizeLimitExceeded = errors.New("size limit exceeded")
|
||||||
|
|
||||||
type LimitedReaderWithError struct {
|
type LimitedReaderWithError struct {
|
||||||
limitedReader *io.LimitedReader
|
limitedReader *io.LimitedReader
|
||||||
@ -23,7 +23,7 @@ func NewLimitedReaderWithError(reader io.Reader, maxBytes int64) *LimitedReaderW
|
|||||||
func (l *LimitedReaderWithError) Read(p []byte) (int, error) {
|
func (l *LimitedReaderWithError) Read(p []byte) (int, error) {
|
||||||
n, err := l.limitedReader.Read(p)
|
n, err := l.limitedReader.Read(p)
|
||||||
if l.limitedReader.N <= 0 && err == io.EOF {
|
if l.limitedReader.N <= 0 && err == io.EOF {
|
||||||
return n, SizeLimitExceeded
|
return n, ErrSizeLimitExceeded
|
||||||
}
|
}
|
||||||
return n, err
|
return n, err
|
||||||
}
|
}
|
||||||
|
@ -51,7 +51,7 @@ func TestLimitedReaderWithError(t *testing.T) {
|
|||||||
buf := make([]byte, moreThanMaxBytes)
|
buf := make([]byte, moreThanMaxBytes)
|
||||||
_, err = io.ReadFull(lr, buf)
|
_, err = io.ReadFull(lr, buf)
|
||||||
require.Error(t, err)
|
require.Error(t, err)
|
||||||
require.Equal(t, SizeLimitExceeded, err)
|
require.Equal(t, ErrSizeLimitExceeded, err)
|
||||||
})
|
})
|
||||||
|
|
||||||
t.Run("multiple small reads, total larger than max size", func(t *testing.T) {
|
t.Run("multiple small reads, total larger than max size", func(t *testing.T) {
|
||||||
@ -71,6 +71,6 @@ func TestLimitedReaderWithError(t *testing.T) {
|
|||||||
// lets do it again
|
// lets do it again
|
||||||
_, err = io.ReadFull(lr, buf)
|
_, err = io.ReadFull(lr, buf)
|
||||||
require.Error(t, err)
|
require.Error(t, err)
|
||||||
require.Equal(t, SizeLimitExceeded, err)
|
require.Equal(t, ErrSizeLimitExceeded, err)
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
@ -89,7 +89,7 @@ func init() {
|
|||||||
|
|
||||||
type DialContextFunction func(ctx context.Context, network, addr string) (net.Conn, error)
|
type DialContextFunction func(ctx context.Context, network, addr string) (net.Conn, error)
|
||||||
|
|
||||||
var AddressForbidden = errors.New("address forbidden, you may need to set AllowedUntrustedInternalConnections to allow an integration access to your internal network")
|
var ErrAddressForbidden = errors.New("address forbidden, you may need to set AllowedUntrustedInternalConnections to allow an integration access to your internal network")
|
||||||
|
|
||||||
func dialContextFilter(dial DialContextFunction, allowHost func(host string) bool, allowIP func(ip net.IP) bool) DialContextFunction {
|
func dialContextFilter(dial DialContextFunction, allowHost func(host string) bool, allowIP func(ip net.IP) bool) DialContextFunction {
|
||||||
return func(ctx context.Context, network, addr string) (net.Conn, error) {
|
return func(ctx context.Context, network, addr string) (net.Conn, error) {
|
||||||
@ -128,7 +128,7 @@ func dialContextFilter(dial DialContextFunction, allowHost func(host string) boo
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
if firstErr == nil {
|
if firstErr == nil {
|
||||||
return nil, AddressForbidden
|
return nil, ErrAddressForbidden
|
||||||
}
|
}
|
||||||
return nil, firstErr
|
return nil, firstErr
|
||||||
}
|
}
|
||||||
|
@ -89,7 +89,7 @@ func TestHTTPClient(t *testing.T) {
|
|||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
} else {
|
} else {
|
||||||
require.IsType(t, &url.Error{}, err)
|
require.IsType(t, &url.Error{}, err)
|
||||||
require.Equal(t, AddressForbidden, err.(*url.Error).Err)
|
require.Equal(t, ErrAddressForbidden, err.(*url.Error).Err)
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
@ -154,7 +154,7 @@ func TestDialContextFilter(t *testing.T) {
|
|||||||
require.True(t, didDial)
|
require.True(t, didDial)
|
||||||
} else {
|
} else {
|
||||||
require.Error(t, err)
|
require.Error(t, err)
|
||||||
require.Equal(t, err, AddressForbidden)
|
require.Equal(t, err, ErrAddressForbidden)
|
||||||
require.False(t, didDial)
|
require.False(t, didDial)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -143,35 +143,35 @@ func (si *SlackImporter) SlackImport(c request.CTX, fileData multipart.File, fil
|
|||||||
reader := utils.NewLimitedReaderWithError(fileReader, slackImportMaxFileSize)
|
reader := utils.NewLimitedReaderWithError(fileReader, slackImportMaxFileSize)
|
||||||
if file.Name == "channels.json" {
|
if file.Name == "channels.json" {
|
||||||
publicChannels, err = slackParseChannels(reader, model.ChannelTypeOpen)
|
publicChannels, err = slackParseChannels(reader, model.ChannelTypeOpen)
|
||||||
if errors.Is(err, utils.SizeLimitExceeded) {
|
if errors.Is(err, utils.ErrSizeLimitExceeded) {
|
||||||
log.WriteString(i18n.T("api.slackimport.slack_import.zip.file_too_large", map[string]any{"Filename": file.Name}))
|
log.WriteString(i18n.T("api.slackimport.slack_import.zip.file_too_large", map[string]any{"Filename": file.Name}))
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
channels = append(channels, publicChannels...)
|
channels = append(channels, publicChannels...)
|
||||||
} else if file.Name == "dms.json" {
|
} else if file.Name == "dms.json" {
|
||||||
directChannels, err = slackParseChannels(reader, model.ChannelTypeDirect)
|
directChannels, err = slackParseChannels(reader, model.ChannelTypeDirect)
|
||||||
if errors.Is(err, utils.SizeLimitExceeded) {
|
if errors.Is(err, utils.ErrSizeLimitExceeded) {
|
||||||
log.WriteString(i18n.T("api.slackimport.slack_import.zip.file_too_large", map[string]any{"Filename": file.Name}))
|
log.WriteString(i18n.T("api.slackimport.slack_import.zip.file_too_large", map[string]any{"Filename": file.Name}))
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
channels = append(channels, directChannels...)
|
channels = append(channels, directChannels...)
|
||||||
} else if file.Name == "groups.json" {
|
} else if file.Name == "groups.json" {
|
||||||
privateChannels, err = slackParseChannels(reader, model.ChannelTypePrivate)
|
privateChannels, err = slackParseChannels(reader, model.ChannelTypePrivate)
|
||||||
if errors.Is(err, utils.SizeLimitExceeded) {
|
if errors.Is(err, utils.ErrSizeLimitExceeded) {
|
||||||
log.WriteString(i18n.T("api.slackimport.slack_import.zip.file_too_large", map[string]any{"Filename": file.Name}))
|
log.WriteString(i18n.T("api.slackimport.slack_import.zip.file_too_large", map[string]any{"Filename": file.Name}))
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
channels = append(channels, privateChannels...)
|
channels = append(channels, privateChannels...)
|
||||||
} else if file.Name == "mpims.json" {
|
} else if file.Name == "mpims.json" {
|
||||||
groupChannels, err = slackParseChannels(reader, model.ChannelTypeGroup)
|
groupChannels, err = slackParseChannels(reader, model.ChannelTypeGroup)
|
||||||
if errors.Is(err, utils.SizeLimitExceeded) {
|
if errors.Is(err, utils.ErrSizeLimitExceeded) {
|
||||||
log.WriteString(i18n.T("api.slackimport.slack_import.zip.file_too_large", map[string]any{"Filename": file.Name}))
|
log.WriteString(i18n.T("api.slackimport.slack_import.zip.file_too_large", map[string]any{"Filename": file.Name}))
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
channels = append(channels, groupChannels...)
|
channels = append(channels, groupChannels...)
|
||||||
} else if file.Name == "users.json" {
|
} else if file.Name == "users.json" {
|
||||||
users, err = slackParseUsers(reader)
|
users, err = slackParseUsers(reader)
|
||||||
if errors.Is(err, utils.SizeLimitExceeded) {
|
if errors.Is(err, utils.ErrSizeLimitExceeded) {
|
||||||
log.WriteString(i18n.T("api.slackimport.slack_import.zip.file_too_large", map[string]any{"Filename": file.Name}))
|
log.WriteString(i18n.T("api.slackimport.slack_import.zip.file_too_large", map[string]any{"Filename": file.Name}))
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
@ -179,7 +179,7 @@ func (si *SlackImporter) SlackImport(c request.CTX, fileData multipart.File, fil
|
|||||||
spl := strings.Split(file.Name, "/")
|
spl := strings.Split(file.Name, "/")
|
||||||
if len(spl) == 2 && strings.HasSuffix(spl[1], ".json") {
|
if len(spl) == 2 && strings.HasSuffix(spl[1], ".json") {
|
||||||
newposts, err := slackParsePosts(reader)
|
newposts, err := slackParsePosts(reader)
|
||||||
if errors.Is(err, utils.SizeLimitExceeded) {
|
if errors.Is(err, utils.ErrSizeLimitExceeded) {
|
||||||
log.WriteString(i18n.T("api.slackimport.slack_import.zip.file_too_large", map[string]any{"Filename": file.Name}))
|
log.WriteString(i18n.T("api.slackimport.slack_import.zip.file_too_large", map[string]any{"Filename": file.Name}))
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
|
Loading…
Reference in New Issue
Block a user