Merge remote-tracking branch 'upstream/master' into mm-62557-improve-property-service-api

This commit is contained in:
Miguel de la Cruz 2025-02-14 10:54:29 +01:00
commit 73502c0ee3
75 changed files with 3452 additions and 804 deletions

View File

@ -8,15 +8,6 @@ else
PLATFORM := $(shell uname)
endif
# Set an environment variable on Linux used to resolve `docker.host.internal` inconsistencies with
# docker. This can be reworked once https://github.com/docker/for-linux/issues/264 is resolved
# satisfactorily.
ifeq ($(PLATFORM),Linux)
export IS_LINUX = -linux
else
export IS_LINUX =
endif
# Detect Apple Silicon and set a flag.
ifeq ($(shell uname)/$(shell uname -m),Darwin/arm64)
ARM_BASED_MAC = true

View File

@ -144,10 +144,12 @@ services:
image: "prom/prometheus:v2.46.0"
user: root
volumes:
- "./docker/prometheus${IS_LINUX}.yml:/etc/prometheus/prometheus.yml"
- "./docker/prometheus.yml:/etc/prometheus/prometheus.yml"
- "/var/run/docker.sock:/var/run/docker.sock"
networks:
- mm-test
extra_hosts:
- "host.docker.internal:host-gateway"
grafana:
image: "grafana/grafana:10.4.2"
volumes:

View File

@ -1,8 +0,0 @@
global:
scrape_interval: 5s
evaluation_interval: 60s
scrape_configs:
- job_name: 'mattermost'
static_configs:
- targets: ['172.17.0.1:8067']

View File

@ -9,6 +9,7 @@ import (
"fmt"
"os"
"testing"
"time"
"github.com/mattermost/mattermost/server/public/model"
"github.com/stretchr/testify/require"
@ -54,6 +55,8 @@ func TestCreateCPAField(t *testing.T) {
}, "an invalid field should be rejected")
th.TestForSystemAdminAndLocal(t, func(t *testing.T, client *model.Client4) {
webSocketClient := th.CreateConnectedWebSocketClient(t)
name := model.NewId()
field := &model.PropertyField{
Name: fmt.Sprintf(" %s\t", name), // name should be sanitized
@ -67,6 +70,27 @@ func TestCreateCPAField(t *testing.T) {
require.NotZero(t, createdField.ID)
require.Equal(t, name, createdField.Name)
require.Equal(t, "default", createdField.Attrs["visibility"])
t.Run("a websocket event should be fired as part of the field creation", func(t *testing.T) {
var wsField model.PropertyField
require.Eventually(t, func() bool {
select {
case event := <-webSocketClient.EventChannel:
if event.EventType() == model.WebsocketEventCPAFieldCreated {
fieldData, err := json.Marshal(event.GetData()["field"])
require.NoError(t, err)
require.NoError(t, json.Unmarshal(fieldData, &wsField))
return true
}
default:
return false
}
return false
}, 5*time.Second, 100*time.Millisecond)
require.NotEmpty(t, wsField.ID)
require.Equal(t, createdField, &wsField)
})
}, "a user with admin permissions should be able to create the field")
}
@ -149,6 +173,8 @@ func TestPatchCPAField(t *testing.T) {
})
th.TestForSystemAdminAndLocal(t, func(t *testing.T, client *model.Client4) {
webSocketClient := th.CreateConnectedWebSocketClient(t)
field := &model.PropertyField{
Name: model.NewId(),
Type: model.PropertyFieldTypeText,
@ -163,6 +189,27 @@ func TestPatchCPAField(t *testing.T) {
CheckOKStatus(t, resp)
require.NoError(t, err)
require.Equal(t, newName, patchedField.Name)
t.Run("a websocket event should be fired as part of the field patch", func(t *testing.T) {
var wsField model.PropertyField
require.Eventually(t, func() bool {
select {
case event := <-webSocketClient.EventChannel:
if event.EventType() == model.WebsocketEventCPAFieldUpdated {
fieldData, err := json.Marshal(event.GetData()["field"])
require.NoError(t, err)
require.NoError(t, json.Unmarshal(fieldData, &wsField))
return true
}
default:
return false
}
return false
}, 5*time.Second, 100*time.Millisecond)
require.NotEmpty(t, wsField.ID)
require.Equal(t, patchedField, &wsField)
})
}, "a user with admin permissions should be able to patch the field")
}
@ -197,6 +244,8 @@ func TestDeleteCPAField(t *testing.T) {
})
th.TestForSystemAdminAndLocal(t, func(t *testing.T, client *model.Client4) {
webSocketClient := th.CreateConnectedWebSocketClient(t)
field := &model.PropertyField{
Name: model.NewId(),
Type: model.PropertyFieldTypeText,
@ -213,6 +262,26 @@ func TestDeleteCPAField(t *testing.T) {
deletedField, appErr := th.App.GetCPAField(createdField.ID)
require.Nil(t, appErr)
require.NotZero(t, deletedField.DeleteAt)
t.Run("a websocket event should be fired as part of the field deletion", func(t *testing.T) {
var fieldID string
require.Eventually(t, func() bool {
select {
case event := <-webSocketClient.EventChannel:
if event.EventType() == model.WebsocketEventCPAFieldDeleted {
var ok bool
fieldID, ok = event.GetData()["field_id"].(string)
require.True(t, ok)
return true
}
default:
return false
}
return false
}, 5*time.Second, 100*time.Millisecond)
require.Equal(t, createdField.ID, fieldID)
})
}, "a user with admin permissions should be able to delete the field")
}
@ -470,6 +539,8 @@ func TestPatchCPAValues(t *testing.T) {
th.App.Srv().SetLicense(model.NewTestLicenseSKU(model.LicenseShortSkuEnterprise))
t.Run("any team member should be able to create their own values", func(t *testing.T) {
webSocketClient := th.CreateConnectedWebSocketClient(t)
values := map[string]json.RawMessage{}
value := "Field Value"
values[createdField.ID] = json.RawMessage(fmt.Sprintf(`" %s "`, value)) // value should be sanitized
@ -490,6 +561,27 @@ func TestPatchCPAValues(t *testing.T) {
actualValue = ""
require.NoError(t, json.Unmarshal(values[createdField.ID], &actualValue))
require.Equal(t, value, actualValue)
t.Run("a websocket event should be fired as part of the value changes", func(t *testing.T) {
var wsValues map[string]json.RawMessage
require.Eventually(t, func() bool {
select {
case event := <-webSocketClient.EventChannel:
if event.EventType() == model.WebsocketEventCPAValuesUpdated {
valuesData, err := json.Marshal(event.GetData()["values"])
require.NoError(t, err)
require.NoError(t, json.Unmarshal(valuesData, &wsValues))
return true
}
default:
return false
}
return false
}, 5*time.Second, 100*time.Millisecond)
require.NotEmpty(t, wsValues)
require.Equal(t, patchedValues, wsValues)
})
})
t.Run("any team member should be able to patch their own values", func(t *testing.T) {

View File

@ -91,7 +91,11 @@ func TestSubmitMetrics(t *testing.T) {
t.Run("metrics enabled and valid", func(t *testing.T) {
metricsMock := setupMetricsMock()
metricsMock.On("IncrementClientLongTasks", mock.AnythingOfType("string"), mock.AnythingOfType("string"), mock.AnythingOfType("float64")).Return()
metricsMock.On("IncrementClientLongTasks",
mock.AnythingOfType("string"),
mock.AnythingOfType("string"),
mock.AnythingOfType("string"),
mock.AnythingOfType("float64")).Return()
platform.RegisterMetricsInterface(func(_ *platform.PlatformService, _, _ string) einterfaces.MetricsInterface {
return metricsMock
@ -159,7 +163,11 @@ func TestSubmitMetrics(t *testing.T) {
t.Run("metrics recorded for API errors", func(t *testing.T) {
metricsMock := setupMetricsMock()
metricsMock.On("IncrementClientLongTasks", mock.AnythingOfType("string"), mock.AnythingOfType("string"), mock.AnythingOfType("float64")).Return()
metricsMock.On("IncrementClientLongTasks",
mock.AnythingOfType("string"),
mock.AnythingOfType("string"),
mock.AnythingOfType("string"),
mock.AnythingOfType("float64")).Return()
platform.RegisterMetricsInterface(func(_ *platform.PlatformService, _, _ string) einterfaces.MetricsInterface {
return metricsMock
@ -190,7 +198,11 @@ func TestSubmitMetrics(t *testing.T) {
t.Run("metrics recorded for URL length limit errors", func(t *testing.T) {
metricsMock := setupMetricsMock()
metricsMock.On("IncrementClientLongTasks", mock.AnythingOfType("string"), mock.AnythingOfType("string"), mock.AnythingOfType("float64")).Return()
metricsMock.On("IncrementClientLongTasks",
mock.AnythingOfType("string"),
mock.AnythingOfType("string"),
mock.AnythingOfType("string"),
mock.AnythingOfType("float64")).Return()
platform.RegisterMetricsInterface(func(_ *platform.PlatformService, _, _ string) einterfaces.MetricsInterface {
return metricsMock

View File

@ -3475,7 +3475,9 @@ func TestWebHubMembership(t *testing.T) {
}
func TestWebHubCloseConnOnDBFail(t *testing.T) {
th := Setup(t).InitBasic()
th := SetupConfig(t, func(cfg *model.Config) {
*cfg.ServiceSettings.EnableWebHubChannelIteration = true
}).InitBasic()
defer func() {
th.TearDown()
_, err := th.Server.Store().GetInternalMasterDB().Exec(`ALTER TABLE dummy RENAME to ChannelMembers`)

View File

@ -55,7 +55,7 @@ func (a *App) UpdateChannelBookmark(c request.CTX, updateBookmark *model.Channel
isAnotherFile := updateBookmark.FileInfo != nil && updateBookmark.FileId != "" && updateBookmark.FileId != updateBookmark.FileInfo.Id
if isAnotherFile {
if fileAlreadyAttachedErr := a.Srv().Store().ChannelBookmark().ErrorIfBookmarkFileInfoAlreadyAttached(updateBookmark.FileId); fileAlreadyAttachedErr != nil {
if fileAlreadyAttachedErr := a.Srv().Store().ChannelBookmark().ErrorIfBookmarkFileInfoAlreadyAttached(updateBookmark.FileId, updateBookmark.ChannelId); fileAlreadyAttachedErr != nil {
return nil, model.NewAppError("UpdateChannelBookmark", "app.channel.bookmark.update.app_error", nil, "", http.StatusInternalServerError).Wrap(fileAlreadyAttachedErr)
}
}

View File

@ -89,6 +89,7 @@ func TestUpdateBookmark(t *testing.T) {
var testUpdateAnotherFile = func(th *TestHelper, t *testing.T) {
file := &model.FileInfo{
Id: model.NewId(),
ChannelId: th.BasicChannel.Id,
CreatorId: model.BookmarkFileOwner,
Path: "somepath",
ThumbnailPath: "thumbpath",
@ -116,6 +117,7 @@ func TestUpdateBookmark(t *testing.T) {
file2 := &model.FileInfo{
Id: model.NewId(),
ChannelId: th.BasicChannel.Id,
CreatorId: model.BookmarkFileOwner,
Path: "somepath",
ThumbnailPath: "thumbpath",
@ -144,6 +146,106 @@ func TestUpdateBookmark(t *testing.T) {
require.Nil(t, bookmarkResp)
}
var testUpdateInvalidFiles = func(th *TestHelper, t *testing.T, creatingUserId string, updatingUserId string) {
file := &model.FileInfo{
Id: model.NewId(),
ChannelId: th.BasicChannel.Id,
CreatorId: model.BookmarkFileOwner,
Path: "somepath",
ThumbnailPath: "thumbpath",
PreviewPath: "prevPath",
Name: "test file",
Extension: "png",
MimeType: "images/png",
Size: 873182,
Width: 3076,
Height: 2200,
HasPreviewImage: true,
}
_, err := th.App.Srv().Store().FileInfo().Save(th.Context, file)
assert.NoError(t, err)
defer func() {
err = th.App.Srv().Store().FileInfo().PermanentDelete(th.Context, file.Id)
assert.NoError(t, err)
}()
th.Context.Session().UserId = creatingUserId
bookmark := createBookmark("File to be updated", model.ChannelBookmarkFile, th.BasicChannel.Id, file.Id)
bookmarkToEdit, appErr := th.App.CreateChannelBookmark(th.Context, bookmark, "")
require.Nil(t, appErr)
require.NotNil(t, bookmarkToEdit)
otherChannel := th.CreateChannel(th.Context, th.BasicTeam)
createAt := time.Now().Add(-1 * time.Minute)
deleteAt := createAt.Add(1 * time.Second)
deletedFile := &model.FileInfo{
Id: model.NewId(),
ChannelId: th.BasicChannel.Id,
CreatorId: model.BookmarkFileOwner,
Path: "somepath",
ThumbnailPath: "thumbpath",
PreviewPath: "prevPath",
Name: "test file",
Extension: "png",
MimeType: "images/png",
Size: 873182,
Width: 3076,
Height: 2200,
HasPreviewImage: true,
CreateAt: createAt.UnixMilli(),
UpdateAt: createAt.UnixMilli(),
DeleteAt: deleteAt.UnixMilli(),
}
_, err = th.App.Srv().Store().FileInfo().Save(th.Context, deletedFile)
assert.NoError(t, err)
defer func() {
err = th.App.Srv().Store().FileInfo().PermanentDelete(th.Context, deletedFile.Id)
assert.NoError(t, err)
}()
th.Context.Session().UserId = updatingUserId
updateBookmarkPending := bookmarkToEdit.Clone()
updateBookmarkPending.FileId = deletedFile.Id
bookmarkEdited, appErr := th.App.UpdateChannelBookmark(th.Context, updateBookmarkPending, "")
assert.NotNil(t, appErr)
require.Nil(t, bookmarkEdited)
anotherChannelFile := &model.FileInfo{
Id: model.NewId(),
ChannelId: otherChannel.Id,
CreatorId: model.BookmarkFileOwner,
Path: "somepath",
ThumbnailPath: "thumbpath",
PreviewPath: "prevPath",
Name: "test file",
Extension: "png",
MimeType: "images/png",
Size: 873182,
Width: 3076,
Height: 2200,
HasPreviewImage: true,
}
_, err = th.App.Srv().Store().FileInfo().Save(th.Context, anotherChannelFile)
assert.NoError(t, err)
defer func() {
err = th.App.Srv().Store().FileInfo().PermanentDelete(th.Context, anotherChannelFile.Id)
require.NoError(t, err)
}()
updateBookmarkPending = bookmarkToEdit.Clone()
updateBookmarkPending.FileId = anotherChannelFile.Id
bookmarkEdited, appErr = th.App.UpdateChannelBookmark(th.Context, updateBookmarkPending, "")
assert.NotNil(t, appErr)
require.Nil(t, bookmarkEdited)
}
t.Run("same user update a channel bookmark", func(t *testing.T) {
bookmark1 := &model.ChannelBookmark{
ChannelId: th.BasicChannel.Id,
@ -166,6 +268,8 @@ func TestUpdateBookmark(t *testing.T) {
assert.Greater(t, response.Updated.UpdateAt, response.Updated.CreateAt)
testUpdateAnotherFile(th, t)
testUpdateInvalidFiles(th, t, th.BasicUser.Id, th.BasicUser.Id)
})
t.Run("another user update a channel bookmark", func(t *testing.T) {
@ -181,6 +285,8 @@ func TestUpdateBookmark(t *testing.T) {
assert.Equal(t, "New name", response.Deleted.DisplayName)
testUpdateAnotherFile(th, t)
testUpdateInvalidFiles(th, t, th.BasicUser.Id, th.BasicUser.Id)
})
t.Run("update an already deleted channel bookmark", func(t *testing.T) {
@ -265,6 +371,7 @@ func TestGetChannelBookmarks(t *testing.T) {
file := &model.FileInfo{
Id: model.NewId(),
ChannelId: th.BasicChannel.Id,
CreatorId: model.BookmarkFileOwner,
Path: "somepath",
ThumbnailPath: "thumbpath",
@ -346,6 +453,7 @@ func TestUpdateChannelBookmarkSortOrder(t *testing.T) {
file := &model.FileInfo{
Id: model.NewId(),
ChannelId: th.BasicChannel.Id,
CreatorId: model.BookmarkFileOwner,
Path: "somepath",
ThumbnailPath: "thumbpath",

View File

@ -7,13 +7,16 @@ import (
"database/sql"
"encoding/json"
"net/http"
"sort"
"github.com/mattermost/mattermost/server/public/model"
"github.com/mattermost/mattermost/server/v8/channels/store"
"github.com/pkg/errors"
)
const CustomProfileAttributesFieldLimit = 20
const (
CustomProfileAttributesFieldLimit = 20
)
var cpaGroupID string
@ -59,7 +62,7 @@ func (a *App) ListCPAFields() ([]*model.PropertyField, *model.AppError) {
}
opts := model.PropertyFieldSearchOpts{
Page: 0,
GroupID: groupID,
PerPage: CustomProfileAttributesFieldLimit,
}
@ -68,6 +71,10 @@ func (a *App) ListCPAFields() ([]*model.PropertyField, *model.AppError) {
return nil, model.NewAppError("GetCPAFields", "app.custom_profile_attributes.search_property_fields.app_error", nil, "", http.StatusInternalServerError).Wrap(err)
}
sort.Slice(fields, func(i, j int) bool {
return model.CustomProfileAttributesPropertySortOrder(fields[i]) < model.CustomProfileAttributesPropertySortOrder(fields[j])
})
return fields, nil
}
@ -77,12 +84,12 @@ func (a *App) CreateCPAField(field *model.PropertyField) (*model.PropertyField,
return nil, model.NewAppError("CreateCPAField", "app.custom_profile_attributes.cpa_group_id.app_error", nil, "", http.StatusInternalServerError).Wrap(err)
}
existingFields, appErr := a.ListCPAFields()
if appErr != nil {
return nil, appErr
fieldCount, err := a.Srv().propertyService.CountActivePropertyFieldsForGroup(groupID)
if err != nil {
return nil, model.NewAppError("CreateCPAField", "app.custom_profile_attributes.count_property_fields.app_error", nil, "", http.StatusInternalServerError).Wrap(err)
}
if len(existingFields) >= CustomProfileAttributesFieldLimit {
if fieldCount >= CustomProfileAttributesFieldLimit {
return nil, model.NewAppError("CreateCPAField", "app.custom_profile_attributes.limit_reached.app_error", nil, "", http.StatusUnprocessableEntity).Wrap(err)
}
@ -98,6 +105,10 @@ func (a *App) CreateCPAField(field *model.PropertyField) (*model.PropertyField,
}
}
message := model.NewWebSocketEvent(model.WebsocketEventCPAFieldCreated, "", "", "", nil, "")
message.Add("field", newField)
a.Publish(message)
return newField, nil
}
@ -123,6 +134,10 @@ func (a *App) PatchCPAField(fieldID string, patch *model.PropertyFieldPatch) (*m
}
}
message := model.NewWebSocketEvent(model.WebsocketEventCPAFieldUpdated, "", "", "", nil, "")
message.Add("field", patchedField)
a.Publish(message)
return patchedField, nil
}
@ -151,6 +166,10 @@ func (a *App) DeleteCPAField(id string) *model.AppError {
}
}
message := model.NewWebSocketEvent(model.WebsocketEventCPAFieldDeleted, "", "", "", nil, "")
message.Add("field_id", id)
a.Publish(message)
return nil
}
@ -160,17 +179,14 @@ func (a *App) ListCPAValues(userID string) ([]*model.PropertyValue, *model.AppEr
return nil, model.NewAppError("GetCPAFields", "app.custom_profile_attributes.cpa_group_id.app_error", nil, "", http.StatusInternalServerError).Wrap(err)
}
opts := model.PropertyValueSearchOpts{
Page: 0,
PerPage: 999999,
IncludeDeleted: false,
}
fields, err := a.Srv().propertyService.SearchPropertyValues(groupID, userID, opts)
values, err := a.Srv().propertyService.SearchPropertyValues(groupID, userID, model.PropertyValueSearchOpts{
PerPage: CustomProfileAttributesFieldLimit,
})
if err != nil {
return nil, model.NewAppError("ListCPAValues", "app.custom_profile_attributes.list_property_values.app_error", nil, "", http.StatusInternalServerError).Wrap(err)
}
return fields, nil
return values, nil
}
func (a *App) GetCPAValue(valueID string) (*model.PropertyValue, *model.AppError) {
@ -188,49 +204,54 @@ func (a *App) GetCPAValue(valueID string) (*model.PropertyValue, *model.AppError
}
func (a *App) PatchCPAValue(userID string, fieldID string, value json.RawMessage) (*model.PropertyValue, *model.AppError) {
values, appErr := a.PatchCPAValues(userID, map[string]json.RawMessage{fieldID: value})
if appErr != nil {
return nil, appErr
}
return values[0], nil
}
func (a *App) PatchCPAValues(userID string, fieldValueMap map[string]json.RawMessage) ([]*model.PropertyValue, *model.AppError) {
groupID, err := a.cpaGroupID()
if err != nil {
return nil, model.NewAppError("PatchCPAValues", "app.custom_profile_attributes.cpa_group_id.app_error", nil, "", http.StatusInternalServerError).Wrap(err)
}
// make sure field exists in this group
existingField, appErr := a.GetCPAField(fieldID)
if appErr != nil {
return nil, model.NewAppError("PatchCPAValue", "app.custom_profile_attributes.property_field_not_found.app_error", nil, "", http.StatusNotFound).Wrap(appErr)
} else if existingField.DeleteAt > 0 {
return nil, model.NewAppError("PatchCPAValue", "app.custom_profile_attributes.property_field_not_found.app_error", nil, "", http.StatusNotFound)
}
existingValues, appErr := a.ListCPAValues(userID)
if appErr != nil {
return nil, model.NewAppError("PatchCPAValue", "app.custom_profile_attributes.property_value_list.app_error", nil, "", http.StatusNotFound).Wrap(err)
}
var existingValue *model.PropertyValue
for key, value := range existingValues {
if value.FieldID == fieldID {
existingValue = existingValues[key]
break
valuesToUpdate := []*model.PropertyValue{}
for fieldID, value := range fieldValueMap {
// make sure field exists in this group
existingField, appErr := a.GetCPAField(fieldID)
if appErr != nil {
return nil, model.NewAppError("PatchCPAValue", "app.custom_profile_attributes.property_field_not_found.app_error", nil, "", http.StatusNotFound).Wrap(appErr)
} else if existingField.DeleteAt > 0 {
return nil, model.NewAppError("PatchCPAValue", "app.custom_profile_attributes.property_field_not_found.app_error", nil, "", http.StatusNotFound)
}
}
if existingValue != nil {
existingValue.Value = value
_, err = a.ch.srv.propertyService.UpdatePropertyValue(existingValue)
if err != nil {
return nil, model.NewAppError("PatchCPAValue", "app.custom_profile_attributes.property_value_update.app_error", nil, "", http.StatusInternalServerError).Wrap(err)
}
} else {
propertyValue := &model.PropertyValue{
value := &model.PropertyValue{
GroupID: groupID,
TargetType: "user",
TargetID: userID,
FieldID: fieldID,
Value: value,
}
existingValue, err = a.ch.srv.propertyService.CreatePropertyValue(propertyValue)
if err != nil {
return nil, model.NewAppError("PatchCPAValue", "app.custom_profile_attributes.property_value_creation.app_error", nil, "", http.StatusInternalServerError).Wrap(err)
}
valuesToUpdate = append(valuesToUpdate, value)
}
return existingValue, nil
updatedValues, err := a.Srv().propertyService.UpsertPropertyValues(valuesToUpdate)
if err != nil {
return nil, model.NewAppError("PatchCPAValues", "app.custom_profile_attributes.property_value_upsert.app_error", nil, "", http.StatusInternalServerError).Wrap(err)
}
updatedFieldValueMap := map[string]json.RawMessage{}
for _, value := range updatedValues {
updatedFieldValueMap[value.FieldID] = value.Value
}
message := model.NewWebSocketEvent(model.WebsocketEventCPAValuesUpdated, "", "", "", nil, "")
message.Add("user_id", userID)
message.Add("values", updatedFieldValueMap)
a.Publish(message)
return updatedValues, nil
}

View File

@ -51,7 +51,7 @@ func TestGetCPAField(t *testing.T) {
GroupID: cpaGroupID,
Name: "Test Field",
Type: model.PropertyFieldTypeText,
Attrs: map[string]any{"visibility": "hidden"},
Attrs: model.StringInterface{"visibility": "hidden"},
}
createdField, err := th.App.CreateCPAField(field)
@ -76,13 +76,14 @@ func TestListCPAFields(t *testing.T) {
require.NoError(t, cErr)
t.Run("should list the CPA property fields", func(t *testing.T) {
field1 := &model.PropertyField{
field1 := model.PropertyField{
GroupID: cpaGroupID,
Name: "Field 1",
Type: model.PropertyFieldTypeText,
Attrs: model.StringInterface{model.CustomProfileAttributesPropertyAttrsSortOrder: 1},
}
_, err := th.App.Srv().propertyService.CreatePropertyField(field1)
_, err := th.App.Srv().propertyService.CreatePropertyField(&field1)
require.NoError(t, err)
field2 := &model.PropertyField{
@ -93,23 +94,20 @@ func TestListCPAFields(t *testing.T) {
_, err = th.App.Srv().propertyService.CreatePropertyField(field2)
require.NoError(t, err)
field3 := &model.PropertyField{
field3 := model.PropertyField{
GroupID: cpaGroupID,
Name: "Field 3",
Type: model.PropertyFieldTypeText,
Attrs: model.StringInterface{model.CustomProfileAttributesPropertyAttrsSortOrder: 0},
}
_, err = th.App.Srv().propertyService.CreatePropertyField(field3)
_, err = th.App.Srv().propertyService.CreatePropertyField(&field3)
require.NoError(t, err)
fields, appErr := th.App.ListCPAFields()
require.Nil(t, appErr)
require.Len(t, fields, 2)
fieldNames := []string{}
for _, field := range fields {
fieldNames = append(fieldNames, field.Name)
}
require.ElementsMatch(t, []string{"Field 1", "Field 3"}, fieldNames)
require.Equal(t, "Field 3", fields[0].Name)
require.Equal(t, "Field 1", fields[1].Name)
})
}
@ -146,7 +144,7 @@ func TestCreateCPAField(t *testing.T) {
GroupID: cpaGroupID,
Name: model.NewId(),
Type: model.PropertyFieldTypeText,
Attrs: map[string]any{"visibility": "hidden"},
Attrs: model.StringInterface{"visibility": "hidden"},
}
createdField, err := th.App.CreateCPAField(field)
@ -226,14 +224,14 @@ func TestPatchCPAField(t *testing.T) {
GroupID: cpaGroupID,
Name: model.NewId(),
Type: model.PropertyFieldTypeText,
Attrs: map[string]any{"visibility": "hidden"},
Attrs: model.StringInterface{"visibility": "hidden"},
}
createdField, err := th.App.CreateCPAField(newField)
require.Nil(t, err)
patch := &model.PropertyFieldPatch{
Name: model.NewPointer("Patched name"),
Attrs: model.NewPointer(map[string]any{"visibility": "default"}),
Attrs: model.NewPointer(model.StringInterface{"visibility": "default"}),
TargetID: model.NewPointer(model.NewId()),
TargetType: model.NewPointer(model.NewId()),
}
@ -517,3 +515,60 @@ func TestPatchCPAValue(t *testing.T) {
require.Equal(t, userID, updatedValue.TargetID)
})
}
func TestListCPAValues(t *testing.T) {
os.Setenv("MM_FEATUREFLAGS_CUSTOMPROFILEATTRIBUTES", "true")
defer os.Unsetenv("MM_FEATUREFLAGS_CUSTOMPROFILEATTRIBUTES")
th := Setup(t).InitBasic()
defer th.TearDown()
cpaGroupID, cErr := th.App.cpaGroupID()
require.NoError(t, cErr)
userID := model.NewId()
t.Run("should return empty list when user has no values", func(t *testing.T) {
values, appErr := th.App.ListCPAValues(userID)
require.Nil(t, appErr)
require.Empty(t, values)
})
t.Run("should list all values for a user", func(t *testing.T) {
var expectedValues []json.RawMessage
for i := 1; i <= CustomProfileAttributesFieldLimit; i++ {
field := &model.PropertyField{
GroupID: cpaGroupID,
Name: fmt.Sprintf("Field %d", i),
Type: model.PropertyFieldTypeText,
}
_, err := th.App.Srv().propertyService.CreatePropertyField(field)
require.NoError(t, err)
value := &model.PropertyValue{
TargetID: userID,
TargetType: "user",
GroupID: cpaGroupID,
FieldID: field.ID,
Value: json.RawMessage(fmt.Sprintf(`"Value %d"`, i)),
}
_, err = th.App.Srv().propertyService.CreatePropertyValue(value)
require.NoError(t, err)
expectedValues = append(expectedValues, value.Value)
}
// List values for original user
values, appErr := th.App.ListCPAValues(userID)
require.Nil(t, appErr)
require.Len(t, values, CustomProfileAttributesFieldLimit)
actualValues := make([]json.RawMessage, len(values))
for i, value := range values {
require.Equal(t, userID, value.TargetID)
require.Equal(t, "user", value.TargetType)
require.Equal(t, cpaGroupID, value.GroupID)
actualValues[i] = value.Value
}
require.ElementsMatch(t, expectedValues, actualValues)
})
}

View File

@ -19,7 +19,7 @@ func (a *App) RegisterPerformanceReport(rctx request.CTX, report *model.Performa
for _, c := range report.Counters {
switch c.Metric {
case model.ClientLongTasks:
a.Metrics().IncrementClientLongTasks(commonLabels["platform"], commonLabels["agent"], c.Value)
a.Metrics().IncrementClientLongTasks(commonLabels["platform"], commonLabels["agent"], userID, c.Value)
default:
// we intentionally skip unknown metrics
}
@ -50,22 +50,26 @@ func (a *App) RegisterPerformanceReport(rctx request.CTX, report *model.Performa
case model.ClientFirstContentfulPaint:
a.Metrics().ObserveClientFirstContentfulPaint(commonLabels["platform"],
commonLabels["agent"],
userID,
h.Value/1000)
case model.ClientLargestContentfulPaint:
a.Metrics().ObserveClientLargestContentfulPaint(
commonLabels["platform"],
commonLabels["agent"],
h.GetLabelValue("region", model.AcceptedLCPRegions, "other"),
userID,
h.Value/1000)
case model.ClientInteractionToNextPaint:
a.Metrics().ObserveClientInteractionToNextPaint(
commonLabels["platform"],
commonLabels["agent"],
h.GetLabelValue("interaction", model.AcceptedInteractions, "other"),
userID,
h.Value/1000)
case model.ClientCumulativeLayoutShift:
a.Metrics().ObserveClientCumulativeLayoutShift(commonLabels["platform"],
commonLabels["agent"],
userID,
h.Value)
case model.ClientPageLoadDuration:
a.Metrics().ObserveClientPageLoadDuration(commonLabels["platform"],
@ -76,20 +80,24 @@ func (a *App) RegisterPerformanceReport(rctx request.CTX, report *model.Performa
commonLabels["platform"],
commonLabels["agent"],
h.GetLabelValue("fresh", model.AcceptedTrueFalseLabels, ""),
userID,
h.Value/1000)
case model.ClientTeamSwitchDuration:
a.Metrics().ObserveClientTeamSwitchDuration(
commonLabels["platform"],
commonLabels["agent"],
h.GetLabelValue("fresh", model.AcceptedTrueFalseLabels, ""),
userID,
h.Value/1000)
case model.ClientRHSLoadDuration:
a.Metrics().ObserveClientRHSLoadDuration(commonLabels["platform"],
commonLabels["agent"],
userID,
h.Value/1000)
case model.ClientGlobalThreadsLoadDuration:
a.Metrics().ObserveGlobalThreadsLoadDuration(commonLabels["platform"],
commonLabels["agent"],
userID,
h.Value/1000)
case model.MobileClientLoadDuration:
a.Metrics().ObserveMobileClientLoadDuration(commonLabels["platform"],

View File

@ -27,6 +27,7 @@ import (
"github.com/mattermost/mattermost/server/public/shared/i18n"
"github.com/mattermost/mattermost/server/public/shared/mlog"
"github.com/mattermost/mattermost/server/public/shared/request"
"github.com/mattermost/mattermost/server/v8/channels/store/sqlstore"
)
const (
@ -95,8 +96,10 @@ type WebConn struct {
UserId string
PostedAck bool
lastUserActivityAt int64
send chan model.WebSocketMessage
allChannelMembers map[string]string
lastAllChannelMembersTime int64
lastUserActivityAt int64
send chan model.WebSocketMessage
// deadQueue behaves like a queue of a finite size
// which is used to store all messages that are sent via the websocket.
// It basically acts as the user-space socket buffer, and is used
@ -758,6 +761,8 @@ func (wc *WebConn) drainDeadQueue(index int) error {
// InvalidateCache resets all internal data of the WebConn.
func (wc *WebConn) InvalidateCache() {
wc.allChannelMembers = nil
wc.lastAllChannelMembersTime = 0
wc.SetSession(nil)
wc.SetSessionExpiresAt(0)
}
@ -938,9 +943,36 @@ func (wc *WebConn) ShouldSendEvent(msg *model.WebSocketEvent) bool {
return false
}
// We don't need to do any further checks because this is already scoped
// to channel members from web_hub.
return true
if *wc.Platform.Config().ServiceSettings.EnableWebHubChannelIteration {
// We don't need to do any further checks because this is already scoped
// to channel members from web_hub.
return true
}
if model.GetMillis()-wc.lastAllChannelMembersTime > webConnMemberCacheTime {
wc.allChannelMembers = nil
wc.lastAllChannelMembersTime = 0
}
if wc.allChannelMembers == nil {
result, err := wc.Platform.Store.Channel().GetAllChannelMembersForUser(
sqlstore.RequestContextWithMaster(request.EmptyContext(wc.Platform.logger)),
wc.UserId,
false,
false,
)
if err != nil {
mlog.Error("webhub.shouldSendEvent.", mlog.Err(err))
return false
}
wc.allChannelMembers = result
wc.lastAllChannelMembersTime = model.GetMillis()
}
if _, ok := wc.allChannelMembers[chID]; ok {
return true
}
return false
}
// Only report events to users who are in the team for the event

View File

@ -486,6 +486,7 @@ func (h *Hub) Start() {
connIndex := newHubConnectionIndex(inactiveConnReaperInterval,
h.platform.Store,
h.platform.logger,
*h.platform.Config().ServiceSettings.EnableWebHubChannelIteration,
)
for {
@ -599,6 +600,11 @@ func (h *Hub) Start() {
for _, webConn := range connIndex.ForUser(userID) {
webConn.InvalidateCache()
}
if !*h.platform.Config().ServiceSettings.EnableWebHubChannelIteration {
continue
}
err := connIndex.InvalidateCMCacheForUser(userID)
if err != nil {
h.platform.Log().Error("Error while invalidating channel member cache", mlog.String("user_id", userID), mlog.Err(err))
@ -666,7 +672,7 @@ func (h *Hub) Start() {
}
} else if userID := msg.GetBroadcast().UserId; userID != "" {
targetConns = connIndex.ForUser(userID)
} else if channelID := msg.GetBroadcast().ChannelId; channelID != "" {
} else if channelID := msg.GetBroadcast().ChannelId; channelID != "" && *h.platform.Config().ServiceSettings.EnableWebHubChannelIteration {
targetConns = connIndex.ForChannel(channelID)
}
if targetConns != nil {
@ -733,6 +739,10 @@ func closeAndRemoveConn(connIndex *hubConnectionIndex, conn *WebConn) {
connIndex.Remove(conn)
}
type connMetadata struct {
channelIDs []string
}
// hubConnectionIndex provides fast addition, removal, and iteration of web connections.
// It requires 4 functionalities which need to be very fast:
// - check if a connection exists or not.
@ -740,90 +750,95 @@ func closeAndRemoveConn(connIndex *hubConnectionIndex, conn *WebConn) {
// - get all connections for a given channelID.
// - get all connections.
type hubConnectionIndex struct {
// byUserId stores the list of connections for a given userID
byUserId map[string][]*WebConn
// byChannelID stores the list of connections for a given channelID.
byChannelID map[string][]*WebConn
// byConnection serves the dual purpose of storing the index of the webconn
// in the value of byUserId map, and also to get all connections.
byConnection map[*WebConn]int
// byUserId stores the set of connections for a given userID
byUserId map[string]map[*WebConn]struct{}
// byChannelID stores the set of connections for a given channelID
byChannelID map[string]map[*WebConn]struct{}
// byConnection serves the dual purpose of storing the channelIDs
// and also to get all connections
byConnection map[*WebConn]connMetadata
byConnectionId map[string]*WebConn
// staleThreshold is the limit beyond which inactive connections
// will be deleted.
staleThreshold time.Duration
store store.Store
logger mlog.LoggerIFace
fastIteration bool
store store.Store
logger mlog.LoggerIFace
}
func newHubConnectionIndex(interval time.Duration,
store store.Store,
logger mlog.LoggerIFace,
fastIteration bool,
) *hubConnectionIndex {
return &hubConnectionIndex{
byUserId: make(map[string][]*WebConn),
byChannelID: make(map[string][]*WebConn),
byConnection: make(map[*WebConn]int),
byUserId: make(map[string]map[*WebConn]struct{}),
byChannelID: make(map[string]map[*WebConn]struct{}),
byConnection: make(map[*WebConn]connMetadata),
byConnectionId: make(map[string]*WebConn),
staleThreshold: interval,
store: store,
logger: logger,
fastIteration: fastIteration,
}
}
func (i *hubConnectionIndex) Add(wc *WebConn) error {
cm, err := i.store.Channel().GetAllChannelMembersForUser(request.EmptyContext(i.logger), wc.UserId, false, false)
if err != nil {
return fmt.Errorf("error getChannelMembersForUser: %v", err)
}
for chID := range cm {
i.byChannelID[chID] = append(i.byChannelID[chID], wc)
var channelIDs []string
if i.fastIteration {
cm, err := i.store.Channel().GetAllChannelMembersForUser(request.EmptyContext(i.logger), wc.UserId, false, false)
if err != nil {
return fmt.Errorf("error getChannelMembersForUser: %v", err)
}
// Store channel IDs and add to byChannelID
channelIDs = make([]string, 0, len(cm))
for chID := range cm {
channelIDs = append(channelIDs, chID)
// Initialize the channel's map if it doesn't exist
if _, ok := i.byChannelID[chID]; !ok {
i.byChannelID[chID] = make(map[*WebConn]struct{})
}
i.byChannelID[chID][wc] = struct{}{}
}
}
i.byUserId[wc.UserId] = append(i.byUserId[wc.UserId], wc)
i.byConnection[wc] = len(i.byUserId[wc.UserId]) - 1
// Initialize the user's map if it doesn't exist
if _, ok := i.byUserId[wc.UserId]; !ok {
i.byUserId[wc.UserId] = make(map[*WebConn]struct{})
}
i.byUserId[wc.UserId][wc] = struct{}{}
i.byConnection[wc] = connMetadata{
channelIDs: channelIDs,
}
i.byConnectionId[wc.GetConnectionID()] = wc
return nil
}
func (i *hubConnectionIndex) Remove(wc *WebConn) {
userConnIndex, ok := i.byConnection[wc]
connMeta, ok := i.byConnection[wc]
if !ok {
return
}
// Remove the wc from i.byUserId
// get the conn slice.
userConnections := i.byUserId[wc.UserId]
// get the last connection.
last := userConnections[len(userConnections)-1]
// https://go.dev/wiki/SliceTricks#delete-without-preserving-order
userConnections[userConnIndex] = last
userConnections[len(userConnections)-1] = nil
i.byUserId[wc.UserId] = userConnections[:len(userConnections)-1]
// set the index of the connection that was moved to the new index.
i.byConnection[last] = userConnIndex
// Remove from byUserId
if userConns, ok := i.byUserId[wc.UserId]; ok {
delete(userConns, wc)
}
connectionID := wc.GetConnectionID()
// Remove webconns from i.byChannelID
// This has O(n) complexity. We are trading off speed while removing
// a connection, to improve broadcasting a message.
for chID, webConns := range i.byChannelID {
// https://go.dev/wiki/SliceTricks#filtering-without-allocating
filtered := webConns[:0]
for _, conn := range webConns {
if conn.GetConnectionID() != connectionID {
filtered = append(filtered, conn)
if i.fastIteration {
// Remove from byChannelID for each channel
for _, chID := range connMeta.channelIDs {
if channelConns, ok := i.byChannelID[chID]; ok {
delete(channelConns, wc)
}
}
for i := len(filtered); i < len(webConns); i++ {
webConns[i] = nil
}
i.byChannelID[chID] = filtered
}
delete(i.byConnection, wc)
delete(i.byConnectionId, connectionID)
delete(i.byConnectionId, wc.GetConnectionID())
}
func (i *hubConnectionIndex) InvalidateCMCacheForUser(userID string) error {
@ -833,25 +848,40 @@ func (i *hubConnectionIndex) InvalidateCMCacheForUser(userID string) error {
return err
}
// Clear out all user entries which belong to channels.
for chID, webConns := range i.byChannelID {
// https://go.dev/wiki/SliceTricks#filtering-without-allocating
filtered := webConns[:0]
for _, conn := range webConns {
if conn.UserId != userID {
filtered = append(filtered, conn)
// Get all connections for this user
conns := i.ForUser(userID)
// Remove all user connections from existing channels
for _, conn := range conns {
if meta, ok := i.byConnection[conn]; ok {
// Remove from old channels
for _, chID := range meta.channelIDs {
if channelConns, ok := i.byChannelID[chID]; ok {
delete(channelConns, conn)
}
}
}
for i := len(filtered); i < len(webConns); i++ {
webConns[i] = nil
}
i.byChannelID[chID] = filtered
}
// re-populate the cache
for chID := range cm {
i.byChannelID[chID] = append(i.byChannelID[chID], i.ForUser(userID)...)
// Add connections to new channels
for _, conn := range conns {
newChannelIDs := make([]string, 0, len(cm))
for chID := range cm {
newChannelIDs = append(newChannelIDs, chID)
// Initialize channel map if needed
if _, ok := i.byChannelID[chID]; !ok {
i.byChannelID[chID] = make(map[*WebConn]struct{})
}
i.byChannelID[chID][conn] = struct{}{}
}
// Update connection metadata
if meta, ok := i.byConnection[conn]; ok {
meta.channelIDs = newChannelIDs
i.byConnection[conn] = meta
}
}
return nil
}
@ -862,26 +892,31 @@ func (i *hubConnectionIndex) Has(wc *WebConn) bool {
// ForUser returns all connections for a user ID.
func (i *hubConnectionIndex) ForUser(id string) []*WebConn {
// Fast path if there is only one or fewer connection.
if len(i.byUserId[id]) <= 1 {
return i.byUserId[id]
userConns, ok := i.byUserId[id]
if !ok {
return nil
}
// Move to using maps.Keys to use the iterator pattern with 1.23.
// This saves the additional slice copy.
conns := make([]*WebConn, 0, len(userConns))
for conn := range userConns {
conns = append(conns, conn)
}
// If there are multiple connections per user,
// then we have to return a clone of the slice
// to allow connIndex.Remove to be safely called while
// iterating the slice.
conns := make([]*WebConn, len(i.byUserId[id]))
copy(conns, i.byUserId[id])
return conns
}
// ForChannel returns all connections for a channelID.
func (i *hubConnectionIndex) ForChannel(channelID string) []*WebConn {
// Note: this is expensive because usually there will be
// more than 1 member for a channel, and broadcasting
// is a hot path, but worth it.
conns := make([]*WebConn, len(i.byChannelID[channelID]))
copy(conns, i.byChannelID[channelID])
channelConns, ok := i.byChannelID[channelID]
if !ok {
return nil
}
conns := make([]*WebConn, 0, len(channelConns))
for conn := range channelConns {
conns = append(conns, conn)
}
return conns
}
@ -902,7 +937,7 @@ func (i *hubConnectionIndex) ForConnection(id string) *WebConn {
}
// All returns the full webConn index.
func (i *hubConnectionIndex) All() map[*WebConn]int {
func (i *hubConnectionIndex) All() map[*WebConn]connMetadata {
return i.byConnection
}

View File

@ -6,6 +6,7 @@ package platform
import (
"bytes"
"encoding/json"
"fmt"
"net"
"net/http"
"net/http/httptest"
@ -197,154 +198,158 @@ func TestHubConnIndex(t *testing.T) {
})
require.NoError(t, err)
t.Run("Basic", func(t *testing.T) {
connIndex := newHubConnectionIndex(1*time.Second, th.Service.Store, th.Service.logger)
for _, fastIterate := range []bool{true, false} {
t.Run(fmt.Sprintf("fastIterate=%t", fastIterate), func(t *testing.T) {
t.Run("Basic", func(t *testing.T) {
connIndex := newHubConnectionIndex(1*time.Second, th.Service.Store, th.Service.logger, fastIterate)
// User1
wc1 := &WebConn{
Platform: th.Service,
Suite: th.Suite,
UserId: model.NewId(),
}
wc1.SetConnectionID(model.NewId())
wc1.SetSession(&model.Session{})
// User1
wc1 := &WebConn{
Platform: th.Service,
Suite: th.Suite,
UserId: model.NewId(),
}
wc1.SetConnectionID(model.NewId())
wc1.SetSession(&model.Session{})
// User2
wc2 := &WebConn{
Platform: th.Service,
Suite: th.Suite,
UserId: model.NewId(),
}
wc2.SetConnectionID(model.NewId())
wc2.SetSession(&model.Session{})
// User2
wc2 := &WebConn{
Platform: th.Service,
Suite: th.Suite,
UserId: model.NewId(),
}
wc2.SetConnectionID(model.NewId())
wc2.SetSession(&model.Session{})
wc3 := &WebConn{
Platform: th.Service,
Suite: th.Suite,
UserId: wc2.UserId,
}
wc3.SetConnectionID(model.NewId())
wc3.SetSession(&model.Session{})
wc3 := &WebConn{
Platform: th.Service,
Suite: th.Suite,
UserId: wc2.UserId,
}
wc3.SetConnectionID(model.NewId())
wc3.SetSession(&model.Session{})
wc4 := &WebConn{
Platform: th.Service,
Suite: th.Suite,
UserId: wc2.UserId,
}
wc4.SetConnectionID(model.NewId())
wc4.SetSession(&model.Session{})
wc4 := &WebConn{
Platform: th.Service,
Suite: th.Suite,
UserId: wc2.UserId,
}
wc4.SetConnectionID(model.NewId())
wc4.SetSession(&model.Session{})
connIndex.Add(wc1)
connIndex.Add(wc2)
connIndex.Add(wc3)
connIndex.Add(wc4)
connIndex.Add(wc1)
connIndex.Add(wc2)
connIndex.Add(wc3)
connIndex.Add(wc4)
t.Run("Basic", func(t *testing.T) {
assert.True(t, connIndex.Has(wc1))
assert.True(t, connIndex.Has(wc2))
t.Run("Basic", func(t *testing.T) {
assert.True(t, connIndex.Has(wc1))
assert.True(t, connIndex.Has(wc2))
assert.ElementsMatch(t, connIndex.ForUser(wc2.UserId), []*WebConn{wc2, wc3, wc4})
assert.ElementsMatch(t, connIndex.ForUser(wc1.UserId), []*WebConn{wc1})
assert.True(t, connIndex.Has(wc2))
assert.True(t, connIndex.Has(wc1))
assert.Len(t, connIndex.All(), 4)
assert.ElementsMatch(t, connIndex.ForUser(wc2.UserId), []*WebConn{wc2, wc3, wc4})
assert.ElementsMatch(t, connIndex.ForUser(wc1.UserId), []*WebConn{wc1})
assert.True(t, connIndex.Has(wc2))
assert.True(t, connIndex.Has(wc1))
assert.Len(t, connIndex.All(), 4)
})
t.Run("RemoveMiddleUser2", func(t *testing.T) {
connIndex.Remove(wc3) // Remove from middle from user2
assert.ElementsMatch(t, connIndex.ForUser(wc2.UserId), []*WebConn{wc2, wc4})
assert.ElementsMatch(t, connIndex.ForUser(wc1.UserId), []*WebConn{wc1})
assert.True(t, connIndex.Has(wc2))
assert.False(t, connIndex.Has(wc3))
assert.True(t, connIndex.Has(wc4))
assert.Len(t, connIndex.All(), 3)
})
t.Run("RemoveUser1", func(t *testing.T) {
connIndex.Remove(wc1) // Remove sole connection from user1
assert.ElementsMatch(t, connIndex.ForUser(wc2.UserId), []*WebConn{wc2, wc4})
assert.ElementsMatch(t, connIndex.ForUser(wc1.UserId), []*WebConn{})
assert.Len(t, connIndex.ForUser(wc1.UserId), 0)
assert.Len(t, connIndex.All(), 2)
assert.False(t, connIndex.Has(wc1))
assert.True(t, connIndex.Has(wc2))
})
t.Run("RemoveEndUser2", func(t *testing.T) {
connIndex.Remove(wc4) // Remove from end from user2
assert.ElementsMatch(t, connIndex.ForUser(wc2.UserId), []*WebConn{wc2})
assert.ElementsMatch(t, connIndex.ForUser(wc1.UserId), []*WebConn{})
assert.True(t, connIndex.Has(wc2))
assert.False(t, connIndex.Has(wc3))
assert.False(t, connIndex.Has(wc4))
assert.Len(t, connIndex.All(), 1)
})
})
t.Run("ByConnectionId", func(t *testing.T) {
connIndex := newHubConnectionIndex(1*time.Second, th.Service.Store, th.Service.logger, fastIterate)
// User1
wc1ID := model.NewId()
wc1 := &WebConn{
Platform: th.Service,
Suite: th.Suite,
UserId: th.BasicUser.Id,
}
wc1.SetConnectionID(wc1ID)
wc1.SetSession(&model.Session{})
// User2
wc2ID := model.NewId()
wc2 := &WebConn{
Platform: th.Service,
Suite: th.Suite,
UserId: th.BasicUser2.Id,
}
wc2.SetConnectionID(wc2ID)
wc2.SetSession(&model.Session{})
wc3ID := model.NewId()
wc3 := &WebConn{
Platform: th.Service,
Suite: th.Suite,
UserId: wc2.UserId,
}
wc3.SetConnectionID(wc3ID)
wc3.SetSession(&model.Session{})
t.Run("no connections", func(t *testing.T) {
assert.False(t, connIndex.Has(wc1))
assert.False(t, connIndex.Has(wc2))
assert.False(t, connIndex.Has(wc3))
assert.Empty(t, connIndex.byConnectionId)
})
t.Run("adding", func(t *testing.T) {
connIndex.Add(wc1)
connIndex.Add(wc3)
assert.Len(t, connIndex.byConnectionId, 2)
assert.Equal(t, wc1, connIndex.ForConnection(wc1ID))
assert.Equal(t, wc3, connIndex.ForConnection(wc3ID))
assert.Equal(t, (*WebConn)(nil), connIndex.ForConnection(wc2ID))
})
t.Run("removing", func(t *testing.T) {
connIndex.Remove(wc3)
assert.Len(t, connIndex.byConnectionId, 1)
assert.Equal(t, wc1, connIndex.ForConnection(wc1ID))
assert.Equal(t, (*WebConn)(nil), connIndex.ForConnection(wc3ID))
assert.Equal(t, (*WebConn)(nil), connIndex.ForConnection(wc2ID))
})
})
})
t.Run("RemoveMiddleUser2", func(t *testing.T) {
connIndex.Remove(wc3) // Remove from middle from user2
assert.ElementsMatch(t, connIndex.ForUser(wc2.UserId), []*WebConn{wc2, wc4})
assert.ElementsMatch(t, connIndex.ForUser(wc1.UserId), []*WebConn{wc1})
assert.True(t, connIndex.Has(wc2))
assert.False(t, connIndex.Has(wc3))
assert.True(t, connIndex.Has(wc4))
assert.Len(t, connIndex.All(), 3)
})
t.Run("RemoveUser1", func(t *testing.T) {
connIndex.Remove(wc1) // Remove sole connection from user1
assert.ElementsMatch(t, connIndex.ForUser(wc2.UserId), []*WebConn{wc2, wc4})
assert.ElementsMatch(t, connIndex.ForUser(wc1.UserId), []*WebConn{})
assert.Len(t, connIndex.ForUser(wc1.UserId), 0)
assert.Len(t, connIndex.All(), 2)
assert.False(t, connIndex.Has(wc1))
assert.True(t, connIndex.Has(wc2))
})
t.Run("RemoveEndUser2", func(t *testing.T) {
connIndex.Remove(wc4) // Remove from end from user2
assert.ElementsMatch(t, connIndex.ForUser(wc2.UserId), []*WebConn{wc2})
assert.ElementsMatch(t, connIndex.ForUser(wc1.UserId), []*WebConn{})
assert.True(t, connIndex.Has(wc2))
assert.False(t, connIndex.Has(wc3))
assert.False(t, connIndex.Has(wc4))
assert.Len(t, connIndex.All(), 1)
})
})
t.Run("ByConnectionId", func(t *testing.T) {
connIndex := newHubConnectionIndex(1*time.Second, th.Service.Store, th.Service.logger)
// User1
wc1ID := model.NewId()
wc1 := &WebConn{
Platform: th.Service,
Suite: th.Suite,
UserId: th.BasicUser.Id,
}
wc1.SetConnectionID(wc1ID)
wc1.SetSession(&model.Session{})
// User2
wc2ID := model.NewId()
wc2 := &WebConn{
Platform: th.Service,
Suite: th.Suite,
UserId: th.BasicUser2.Id,
}
wc2.SetConnectionID(wc2ID)
wc2.SetSession(&model.Session{})
wc3ID := model.NewId()
wc3 := &WebConn{
Platform: th.Service,
Suite: th.Suite,
UserId: wc2.UserId,
}
wc3.SetConnectionID(wc3ID)
wc3.SetSession(&model.Session{})
t.Run("no connections", func(t *testing.T) {
assert.False(t, connIndex.Has(wc1))
assert.False(t, connIndex.Has(wc2))
assert.False(t, connIndex.Has(wc3))
assert.Empty(t, connIndex.byConnectionId)
})
t.Run("adding", func(t *testing.T) {
connIndex.Add(wc1)
connIndex.Add(wc3)
assert.Len(t, connIndex.byConnectionId, 2)
assert.Equal(t, wc1, connIndex.ForConnection(wc1ID))
assert.Equal(t, wc3, connIndex.ForConnection(wc3ID))
assert.Equal(t, (*WebConn)(nil), connIndex.ForConnection(wc2ID))
})
t.Run("removing", func(t *testing.T) {
connIndex.Remove(wc3)
assert.Len(t, connIndex.byConnectionId, 1)
assert.Equal(t, wc1, connIndex.ForConnection(wc1ID))
assert.Equal(t, (*WebConn)(nil), connIndex.ForConnection(wc3ID))
assert.Equal(t, (*WebConn)(nil), connIndex.ForConnection(wc2ID))
})
})
}
t.Run("ByChannelId", func(t *testing.T) {
connIndex := newHubConnectionIndex(1*time.Second, th.Service.Store, th.Service.logger)
connIndex := newHubConnectionIndex(1*time.Second, th.Service.Store, th.Service.logger, true)
// User1
wc1ID := model.NewId()
@ -414,7 +419,7 @@ func TestHubConnIndexIncorrectRemoval(t *testing.T) {
th := Setup(t)
defer th.TearDown()
connIndex := newHubConnectionIndex(1*time.Second, th.Service.Store, th.Service.logger)
connIndex := newHubConnectionIndex(1*time.Second, th.Service.Store, th.Service.logger, false)
// User2
wc2 := &WebConn{
@ -461,7 +466,7 @@ func TestHubConnIndexInactive(t *testing.T) {
th := Setup(t)
defer th.TearDown()
connIndex := newHubConnectionIndex(2*time.Second, th.Service.Store, th.Service.logger)
connIndex := newHubConnectionIndex(2*time.Second, th.Service.Store, th.Service.logger, false)
// User1
wc1 := &WebConn{
@ -621,7 +626,7 @@ func TestHubWebConnCount(t *testing.T) {
func BenchmarkHubConnIndex(b *testing.B) {
th := Setup(b).InitBasic()
defer th.TearDown()
connIndex := newHubConnectionIndex(1*time.Second, th.Service.Store, th.Service.logger)
connIndex := newHubConnectionIndex(1*time.Second, th.Service.Store, th.Service.logger, false)
// User1
wc1 := &WebConn{
@ -666,7 +671,7 @@ func TestHubConnIndexRemoveMemLeak(t *testing.T) {
th := Setup(t)
defer th.TearDown()
connIndex := newHubConnectionIndex(1*time.Second, th.Service.Store, th.Service.logger)
connIndex := newHubConnectionIndex(1*time.Second, th.Service.Store, th.Service.logger, false)
wc := &WebConn{
Platform: th.Service,

View File

@ -19,11 +19,16 @@ func (ps *PropertyService) GetPropertyFields(groupID string, ids []string) ([]*m
return ps.fieldStore.GetMany(groupID, ids)
}
func (ps *PropertyService) CountActivePropertyFieldsForGroup(groupID string) (int64, error) {
return ps.fieldStore.CountForGroup(groupID, false)
}
func (ps *PropertyService) SearchPropertyFields(groupID, targetID string, opts model.PropertyFieldSearchOpts) ([]*model.PropertyField, error) {
// groupID and targetID are part of the search method signature to
// incentivize the use of the database indexes in searches
opts.GroupID = groupID
opts.TargetID = targetID
return ps.fieldStore.SearchPropertyFields(opts)
}

View File

@ -40,6 +40,19 @@ func (ps *PropertyService) UpdatePropertyValues(values []*model.PropertyValue) (
return ps.valueStore.Update(values)
}
func (ps *PropertyService) UpsertPropertyValue(value *model.PropertyValue) (*model.PropertyValue, error) {
values, err := ps.UpsertPropertyValues([]*model.PropertyValue{value})
if err != nil {
return nil, err
}
return values[0], nil
}
func (ps *PropertyService) UpsertPropertyValues(values []*model.PropertyValue) ([]*model.PropertyValue, error) {
return ps.valueStore.Upsert(values)
}
func (ps *PropertyService) DeletePropertyValue(id string) error {
return ps.valueStore.Delete(id)
}

View File

@ -257,6 +257,10 @@ channels/db/migrations/mysql/000129_add_property_system_architecture.down.sql
channels/db/migrations/mysql/000129_add_property_system_architecture.up.sql
channels/db/migrations/mysql/000130_system_console_stats.down.sql
channels/db/migrations/mysql/000130_system_console_stats.up.sql
channels/db/migrations/mysql/000131_create_index_pagination_on_property_values.down.sql
channels/db/migrations/mysql/000131_create_index_pagination_on_property_values.up.sql
channels/db/migrations/mysql/000132_create_index_pagination_on_property_fields.down.sql
channels/db/migrations/mysql/000132_create_index_pagination_on_property_fields.up.sql
channels/db/migrations/postgres/000001_create_teams.down.sql
channels/db/migrations/postgres/000001_create_teams.up.sql
channels/db/migrations/postgres/000002_create_team_members.down.sql
@ -515,3 +519,7 @@ channels/db/migrations/postgres/000129_add_property_system_architecture.down.sql
channels/db/migrations/postgres/000129_add_property_system_architecture.up.sql
channels/db/migrations/postgres/000130_system_console_stats.down.sql
channels/db/migrations/postgres/000130_system_console_stats.up.sql
channels/db/migrations/postgres/000131_create_index_pagination_on_property_values.down.sql
channels/db/migrations/postgres/000131_create_index_pagination_on_property_values.up.sql
channels/db/migrations/postgres/000132_create_index_pagination_on_property_fields.down.sql
channels/db/migrations/postgres/000132_create_index_pagination_on_property_fields.up.sql

View File

@ -0,0 +1,14 @@
SET @preparedStatement = (SELECT IF(
(
SELECT COUNT(*) FROM INFORMATION_SCHEMA.STATISTICS
WHERE table_name = 'PropertyValues'
AND table_schema = DATABASE()
AND index_name = 'idx_propertyvalues_create_at_id'
) > 0,
'DROP INDEX idx_propertyvalues_create_at_id ON PropertyValues;',
'SELECT 1'
));
PREPARE removeIndexIfExists FROM @preparedStatement;
EXECUTE removeIndexIfExists;
DEALLOCATE PREPARE removeIndexIfExists;

View File

@ -0,0 +1,14 @@
SET @preparedStatement = (SELECT IF(
(
SELECT COUNT(*) FROM INFORMATION_SCHEMA.STATISTICS
WHERE table_name = 'PropertyValues'
AND table_schema = DATABASE()
AND index_name = 'idx_propertyvalues_create_at_id'
) > 0,
'SELECT 1',
'CREATE INDEX idx_propertyvalues_create_at_id ON PropertyValues(CreateAt, ID);'
));
PREPARE createIndexIfNotExists FROM @preparedStatement;
EXECUTE createIndexIfNotExists;
DEALLOCATE PREPARE createIndexIfNotExists;

View File

@ -0,0 +1,14 @@
SET @preparedStatement = (SELECT IF(
(
SELECT COUNT(*) FROM INFORMATION_SCHEMA.STATISTICS
WHERE table_name = 'PropertyFields'
AND table_schema = DATABASE()
AND index_name = 'idx_propertyfields_create_at_id'
) > 0,
'DROP INDEX idx_propertyfields_create_at_id ON PropertyFields;',
'SELECT 1'
));
PREPARE removeIndexIfExists FROM @preparedStatement;
EXECUTE removeIndexIfExists;
DEALLOCATE PREPARE removeIndexIfExists;

View File

@ -0,0 +1,14 @@
SET @preparedStatement = (SELECT IF(
(
SELECT COUNT(*) FROM INFORMATION_SCHEMA.STATISTICS
WHERE table_name = 'PropertyFields'
AND table_schema = DATABASE()
AND index_name = 'idx_propertyfields_create_at_id'
) > 0,
'SELECT 1',
'CREATE INDEX idx_propertyfields_create_at_id ON PropertyFields(CreateAt, ID);'
));
PREPARE createIndexIfNotExists FROM @preparedStatement;
EXECUTE createIndexIfNotExists;
DEALLOCATE PREPARE createIndexIfNotExists;

View File

@ -0,0 +1,2 @@
-- morph:nontransactional
DROP INDEX CONCURRENTLY IF EXISTS idx_propertyvalues_create_at_id;

View File

@ -0,0 +1,2 @@
-- morph:nontransactional
CREATE INDEX CONCURRENTLY IF NOT EXISTS idx_propertyvalues_create_at_id ON PropertyValues(CreateAt, ID)

View File

@ -0,0 +1,2 @@
-- morph:nontransactional
DROP INDEX CONCURRENTLY IF EXISTS idx_propertyfields_create_at_id;

View File

@ -0,0 +1,2 @@
-- morph:nontransactional
CREATE INDEX CONCURRENTLY IF NOT EXISTS idx_propertyfields_create_at_id ON PropertyFields(CreateAt, ID)

View File

@ -3153,11 +3153,11 @@ func (s *RetryLayerChannelBookmarkStore) Delete(bookmarkID string, deleteFile bo
}
func (s *RetryLayerChannelBookmarkStore) ErrorIfBookmarkFileInfoAlreadyAttached(fileID string) error {
func (s *RetryLayerChannelBookmarkStore) ErrorIfBookmarkFileInfoAlreadyAttached(fileID string, channelID string) error {
tries := 0
for {
err := s.ChannelBookmarkStore.ErrorIfBookmarkFileInfoAlreadyAttached(fileID)
err := s.ChannelBookmarkStore.ErrorIfBookmarkFileInfoAlreadyAttached(fileID, channelID)
if err == nil {
return nil
}
@ -8949,6 +8949,27 @@ func (s *RetryLayerProductNoticesStore) View(userID string, notices []string) er
}
func (s *RetryLayerPropertyFieldStore) CountForGroup(groupID string, includeDeleted bool) (int64, error) {
tries := 0
for {
result, err := s.PropertyFieldStore.CountForGroup(groupID, includeDeleted)
if err == nil {
return result, nil
}
if !isRepeatableError(err) {
return result, err
}
tries++
if tries >= 3 {
err = errors.Wrap(err, "giving up after 3 consecutive repeatable transaction failures")
return result, err
}
timepkg.Sleep(100 * timepkg.Millisecond)
}
}
func (s *RetryLayerPropertyFieldStore) Create(field *model.PropertyField) (*model.PropertyField, error) {
tries := 0
@ -9054,11 +9075,11 @@ func (s *RetryLayerPropertyFieldStore) SearchPropertyFields(opts model.PropertyF
}
func (s *RetryLayerPropertyFieldStore) Update(field []*model.PropertyField) ([]*model.PropertyField, error) {
func (s *RetryLayerPropertyFieldStore) Update(fields []*model.PropertyField) ([]*model.PropertyField, error) {
tries := 0
for {
result, err := s.PropertyFieldStore.Update(field)
result, err := s.PropertyFieldStore.Update(fields)
if err == nil {
return result, nil
}
@ -9243,11 +9264,32 @@ func (s *RetryLayerPropertyValueStore) SearchPropertyValues(opts model.PropertyV
}
func (s *RetryLayerPropertyValueStore) Update(field []*model.PropertyValue) ([]*model.PropertyValue, error) {
func (s *RetryLayerPropertyValueStore) Update(values []*model.PropertyValue) ([]*model.PropertyValue, error) {
tries := 0
for {
result, err := s.PropertyValueStore.Update(field)
result, err := s.PropertyValueStore.Update(values)
if err == nil {
return result, nil
}
if !isRepeatableError(err) {
return result, err
}
tries++
if tries >= 3 {
err = errors.Wrap(err, "giving up after 3 consecutive repeatable transaction failures")
return result, err
}
timepkg.Sleep(100 * timepkg.Millisecond)
}
}
func (s *RetryLayerPropertyValueStore) Upsert(values []*model.PropertyValue) ([]*model.PropertyValue, error) {
tries := 0
for {
result, err := s.PropertyValueStore.Upsert(values)
if err == nil {
return result, nil
}

View File

@ -51,7 +51,7 @@ func bookmarkWithFileInfoSliceColumns() []string {
}
}
func (s *SqlChannelBookmarkStore) ErrorIfBookmarkFileInfoAlreadyAttached(fileId string) error {
func (s *SqlChannelBookmarkStore) ErrorIfBookmarkFileInfoAlreadyAttached(fileId string, channelId string) error {
existingQuery := s.getSubQueryBuilder().
Select("FileInfoId").
From("ChannelBookmarks").
@ -66,11 +66,13 @@ func (s *SqlChannelBookmarkStore) ErrorIfBookmarkFileInfoAlreadyAttached(fileId
Where(sq.Or{
sq.Expr("Id IN (?)", existingQuery),
sq.And{
sq.Eq{"Id": fileId},
sq.Or{
sq.NotEq{"PostId": ""},
sq.NotEq{"CreatorId": model.BookmarkFileOwner},
sq.NotEq{"ChannelId": channelId},
sq.NotEq{"DeleteAt": 0},
},
sq.Eq{"Id": fileId},
},
})
@ -139,7 +141,7 @@ func (s *SqlChannelBookmarkStore) Save(bookmark *model.ChannelBookmark, increase
}
if bookmark.FileId != "" {
err = s.ErrorIfBookmarkFileInfoAlreadyAttached(bookmark.FileId)
err = s.ErrorIfBookmarkFileInfoAlreadyAttached(bookmark.FileId, bookmark.ChannelId)
if err != nil {
return nil, errors.Wrap(err, "unable_to_save_channel_bookmark")
}

View File

@ -86,9 +86,26 @@ func (s *SqlPropertyFieldStore) GetMany(groupID string, ids []string) ([]*model.
return fields, nil
}
func (s *SqlPropertyFieldStore) CountForGroup(groupID string, includeDeleted bool) (int64, error) {
var count int64
builder := s.getQueryBuilder().
Select("COUNT(id)").
From("PropertyFields").
Where(sq.Eq{"GroupID": groupID})
if !includeDeleted {
builder = builder.Where(sq.Eq{"DeleteAt": 0})
}
if err := s.GetReplica().GetBuilder(&count, builder); err != nil {
return int64(0), errors.Wrap(err, "failed to count Sessions")
}
return count, nil
}
func (s *SqlPropertyFieldStore) SearchPropertyFields(opts model.PropertyFieldSearchOpts) ([]*model.PropertyField, error) {
if opts.Page < 0 {
return nil, errors.New("page must be positive integer")
if err := opts.Cursor.IsValid(); err != nil {
return nil, fmt.Errorf("cursor is invalid: %w", err)
}
if opts.PerPage < 1 {
@ -96,10 +113,19 @@ func (s *SqlPropertyFieldStore) SearchPropertyFields(opts model.PropertyFieldSea
}
builder := s.tableSelectQuery.
OrderBy("CreateAt ASC").
Offset(uint64(opts.Page * opts.PerPage)).
OrderBy("CreateAt ASC, Id ASC").
Limit(uint64(opts.PerPage))
if !opts.Cursor.IsEmpty() {
builder = builder.Where(sq.Or{
sq.Gt{"CreateAt": opts.Cursor.CreateAt},
sq.And{
sq.Eq{"CreateAt": opts.Cursor.CreateAt},
sq.Gt{"Id": opts.Cursor.PropertyFieldID},
},
})
}
if !opts.IncludeDeleted {
builder = builder.Where(sq.Eq{"DeleteAt": 0})
}
@ -136,44 +162,66 @@ func (s *SqlPropertyFieldStore) Update(fields []*model.PropertyField) (_ []*mode
defer finalizeTransactionX(transaction, &err)
updateTime := model.GetMillis()
for _, field := range fields {
field.UpdateAt = updateTime
isPostgres := s.DriverName() == model.DatabaseDriverPostgres
nameCase := sq.Case("id")
typeCase := sq.Case("id")
attrsCase := sq.Case("id")
targetIDCase := sq.Case("id")
targetTypeCase := sq.Case("id")
deleteAtCase := sq.Case("id")
ids := make([]string, len(fields))
for i, field := range fields {
field.UpdateAt = updateTime
if vErr := field.IsValid(); vErr != nil {
return nil, errors.Wrap(vErr, "property_field_update_isvalid")
}
queryString, args, err := s.getQueryBuilder().
Update("PropertyFields").
Set("Name", field.Name).
Set("Type", field.Type).
Set("Attrs", field.Attrs).
Set("TargetID", field.TargetID).
Set("TargetType", field.TargetType).
Set("UpdateAt", field.UpdateAt).
Set("DeleteAt", field.DeleteAt).
Where(sq.Eq{"id": field.ID}).
ToSql()
if err != nil {
return nil, errors.Wrap(err, "property_field_update_tosql")
}
result, err := transaction.Exec(queryString, args...)
if err != nil {
return nil, errors.Wrapf(err, "failed to update property field with id: %s", field.ID)
}
count, err := result.RowsAffected()
if err != nil {
return nil, errors.Wrap(err, "property_field_update_rowsaffected")
}
if count == 0 {
return nil, store.NewErrNotFound("PropertyField", field.ID)
ids[i] = field.ID
whenID := sq.Expr("?", field.ID)
if isPostgres {
nameCase = nameCase.When(whenID, sq.Expr("?::text", field.Name))
typeCase = typeCase.When(whenID, sq.Expr("?::property_field_type", field.Type))
attrsCase = attrsCase.When(whenID, sq.Expr("?::jsonb", field.Attrs))
targetIDCase = targetIDCase.When(whenID, sq.Expr("?::text", field.TargetID))
targetTypeCase = targetTypeCase.When(whenID, sq.Expr("?::text", field.TargetType))
deleteAtCase = deleteAtCase.When(whenID, sq.Expr("?::bigint", field.DeleteAt))
} else {
nameCase = nameCase.When(whenID, sq.Expr("?", field.Name))
typeCase = typeCase.When(whenID, sq.Expr("?", field.Type))
attrsCase = attrsCase.When(whenID, sq.Expr("?", field.Attrs))
targetIDCase = targetIDCase.When(whenID, sq.Expr("?", field.TargetID))
targetTypeCase = targetTypeCase.When(whenID, sq.Expr("?", field.TargetType))
deleteAtCase = deleteAtCase.When(whenID, sq.Expr("?", field.DeleteAt))
}
}
builder := s.getQueryBuilder().
Update("PropertyFields").
Set("Name", nameCase).
Set("Type", typeCase).
Set("Attrs", attrsCase).
Set("TargetID", targetIDCase).
Set("TargetType", targetTypeCase).
Set("UpdateAt", updateTime).
Set("DeleteAt", deleteAtCase).
Where(sq.Eq{"id": ids})
result, err := transaction.ExecBuilder(builder)
if err != nil {
return nil, errors.Wrap(err, "property_field_update_exec")
}
count, err := result.RowsAffected()
if err != nil {
return nil, errors.Wrap(err, "property_field_update_rowsaffected")
}
if count != int64(len(fields)) {
return nil, errors.Errorf("failed to update, some property fields were not found, got %d of %d", count, len(fields))
}
if err := transaction.Commit(); err != nil {
return nil, errors.Wrap(err, "property_field_update_commit")
return nil, errors.Wrap(err, "property_field_update_commit_transaction")
}
return fields, nil

View File

@ -91,8 +91,8 @@ func (s *SqlPropertyValueStore) GetMany(groupID string, ids []string) ([]*model.
}
func (s *SqlPropertyValueStore) SearchPropertyValues(opts model.PropertyValueSearchOpts) ([]*model.PropertyValue, error) {
if opts.Page < 0 {
return nil, errors.New("page must be positive integer")
if err := opts.Cursor.IsValid(); err != nil {
return nil, fmt.Errorf("cursor is invalid: %w", err)
}
if opts.PerPage < 1 {
@ -100,10 +100,19 @@ func (s *SqlPropertyValueStore) SearchPropertyValues(opts model.PropertyValueSea
}
builder := s.tableSelectQuery.
OrderBy("CreateAt ASC").
Offset(uint64(opts.Page * opts.PerPage)).
OrderBy("CreateAt ASC, Id ASC").
Limit(uint64(opts.PerPage))
if !opts.Cursor.IsEmpty() {
builder = builder.Where(sq.Or{
sq.Gt{"CreateAt": opts.Cursor.CreateAt},
sq.And{
sq.Eq{"CreateAt": opts.Cursor.CreateAt},
sq.Gt{"Id": opts.Cursor.PropertyValueID},
},
})
}
if !opts.IncludeDeleted {
builder = builder.Where(sq.Eq{"DeleteAt": 0})
}
@ -144,11 +153,78 @@ func (s *SqlPropertyValueStore) Update(values []*model.PropertyValue) (_ []*mode
defer finalizeTransactionX(transaction, &err)
updateTime := model.GetMillis()
for _, value := range values {
isPostgres := s.DriverName() == model.DatabaseDriverPostgres
valueCase := sq.Case("id")
deleteAtCase := sq.Case("id")
ids := make([]string, len(values))
for i, value := range values {
value.UpdateAt = updateTime
if vErr := value.IsValid(); vErr != nil {
return nil, errors.Wrap(vErr, "property_value_update_isvalid")
}
ids[i] = value.ID
valueJSON := value.Value
if s.IsBinaryParamEnabled() {
valueJSON = AppendBinaryFlag(valueJSON)
}
if isPostgres {
valueCase = valueCase.When(sq.Expr("?", value.ID), sq.Expr("?::jsonb", valueJSON))
deleteAtCase = deleteAtCase.When(sq.Expr("?", value.ID), sq.Expr("?::bigint", value.DeleteAt))
} else {
valueCase = valueCase.When(sq.Expr("?", value.ID), sq.Expr("?", valueJSON))
deleteAtCase = deleteAtCase.When(sq.Expr("?", value.ID), sq.Expr("?", value.DeleteAt))
}
}
builder := s.getQueryBuilder().
Update("PropertyValues").
Set("Value", valueCase).
Set("DeleteAt", deleteAtCase).
Set("UpdateAt", updateTime).
Where(sq.Eq{"id": ids})
result, err := transaction.ExecBuilder(builder)
if err != nil {
return nil, errors.Wrap(err, "property_value_update_exec")
}
count, err := result.RowsAffected()
if err != nil {
return nil, errors.Wrap(err, "property_value_update_rowsaffected")
}
if count != int64(len(values)) {
return nil, errors.Errorf("failed to update, some property values were not found, got %d of %d", count, len(values))
}
if err := transaction.Commit(); err != nil {
return nil, errors.Wrap(err, "property_value_update_commit_transaction")
}
return values, nil
}
func (s *SqlPropertyValueStore) Upsert(values []*model.PropertyValue) (_ []*model.PropertyValue, err error) {
if len(values) == 0 {
return nil, nil
}
transaction, err := s.GetMaster().Beginx()
if err != nil {
return nil, errors.Wrap(err, "property_value_upsert_begin_transaction")
}
defer finalizeTransactionX(transaction, &err)
updatedValues := make([]*model.PropertyValue, len(values))
updateTime := model.GetMillis()
for i, value := range values {
value.PreSave()
value.UpdateAt = updateTime
if err := value.IsValid(); err != nil {
return nil, errors.Wrap(err, "property_value_update_isvalid")
return nil, errors.Wrap(err, "property_value_upsert_isvalid")
}
valueJSON := value.Value
@ -156,36 +232,69 @@ func (s *SqlPropertyValueStore) Update(values []*model.PropertyValue) (_ []*mode
valueJSON = AppendBinaryFlag(valueJSON)
}
queryString, args, err := s.getQueryBuilder().
Update("PropertyValues").
Set("Value", valueJSON).
Set("UpdateAt", value.UpdateAt).
Set("DeleteAt", value.DeleteAt).
Where(sq.Eq{"id": value.ID}).
ToSql()
if err != nil {
return nil, errors.Wrap(err, "property_value_update_tosql")
}
builder := s.getQueryBuilder().
Insert("PropertyValues").
Columns("ID", "TargetID", "TargetType", "GroupID", "FieldID", "Value", "CreateAt", "UpdateAt", "DeleteAt").
Values(value.ID, value.TargetID, value.TargetType, value.GroupID, value.FieldID, valueJSON, value.CreateAt, value.UpdateAt, value.DeleteAt)
result, err := transaction.Exec(queryString, args...)
if err != nil {
return nil, errors.Wrapf(err, "failed to update property value with id: %s", value.ID)
}
if s.DriverName() == model.DatabaseDriverMysql {
builder = builder.SuffixExpr(sq.Expr(
"ON DUPLICATE KEY UPDATE Value = ?, UpdateAt = ?, DeleteAt = ?",
valueJSON,
value.UpdateAt,
0,
))
count, err := result.RowsAffected()
if err != nil {
return nil, errors.Wrap(err, "property_value_update_rowsaffected")
}
if count == 0 {
return nil, store.NewErrNotFound("PropertyValue", value.ID)
if _, err := transaction.ExecBuilder(builder); err != nil {
return nil, errors.Wrap(err, "property_value_upsert_exec")
}
// MySQL doesn't support RETURNING, so we need to fetch
// the new field to get its ID in case we hit a DUPLICATED
// KEY and the value.ID we have is not the right one
gBuilder := s.tableSelectQuery.Where(sq.Eq{
"GroupID": value.GroupID,
"TargetID": value.TargetID,
"FieldID": value.FieldID,
"DeleteAt": 0,
})
var values []*model.PropertyValue
if gErr := transaction.SelectBuilder(&values, gBuilder); gErr != nil {
return nil, errors.Wrap(gErr, "property_value_upsert_select")
}
if len(values) != 1 {
return nil, errors.New("property_value_upsert_select_length")
}
updatedValues[i] = values[0]
} else {
builder = builder.SuffixExpr(sq.Expr(
"ON CONFLICT (GroupID, TargetID, FieldID) WHERE DeleteAt = 0 DO UPDATE SET Value = ?, UpdateAt = ?, DeleteAt = ? RETURNING *",
valueJSON,
value.UpdateAt,
0,
))
var values []*model.PropertyValue
if err := transaction.SelectBuilder(&values, builder); err != nil {
return nil, errors.Wrapf(err, "failed to upsert property value with id: %s", value.ID)
}
if len(values) != 1 {
return nil, errors.New("property_value_upsert_select_length")
}
updatedValues[i] = values[0]
}
}
if err := transaction.Commit(); err != nil {
return nil, errors.Wrap(err, "property_value_update_commit")
return nil, errors.Wrap(err, "property_value_upsert_commit")
}
return values, nil
return updatedValues, nil
}
func (s *SqlPropertyValueStore) Delete(id string) error {

View File

@ -1059,7 +1059,7 @@ type PostPersistentNotificationStore interface {
DeleteByTeam(teamIds []string) error
}
type ChannelBookmarkStore interface {
ErrorIfBookmarkFileInfoAlreadyAttached(fileID string) error
ErrorIfBookmarkFileInfoAlreadyAttached(fileID string, channelID string) error
Get(Id string, includeDeleted bool) (b *model.ChannelBookmarkWithFileInfo, err error)
Save(bookmark *model.ChannelBookmark, increaseSortOrder bool) (b *model.ChannelBookmarkWithFileInfo, err error)
Update(bookmark *model.ChannelBookmark) error
@ -1089,8 +1089,9 @@ type PropertyFieldStore interface {
Create(field *model.PropertyField) (*model.PropertyField, error)
Get(groupID, id string) (*model.PropertyField, error)
GetMany(groupID string, ids []string) ([]*model.PropertyField, error)
CountForGroup(groupID string, includeDeleted bool) (int64, error)
SearchPropertyFields(opts model.PropertyFieldSearchOpts) ([]*model.PropertyField, error)
Update(field []*model.PropertyField) ([]*model.PropertyField, error)
Update(fields []*model.PropertyField) ([]*model.PropertyField, error)
Delete(id string) error
}
@ -1099,7 +1100,8 @@ type PropertyValueStore interface {
Get(groupID, id string) (*model.PropertyValue, error)
GetMany(groupID string, ids []string) ([]*model.PropertyValue, error)
SearchPropertyValues(opts model.PropertyValueSearchOpts) ([]*model.PropertyValue, error)
Update(field []*model.PropertyValue) ([]*model.PropertyValue, error)
Update(values []*model.PropertyValue) ([]*model.PropertyValue, error)
Upsert(values []*model.PropertyValue) ([]*model.PropertyValue, error)
Delete(id string) error
DeleteForField(id string) error
}

View File

@ -34,8 +34,12 @@ func TestChannelBookmarkStore(t *testing.T, rctx request.CTX, ss store.Store, s
func testSaveChannelBookmark(t *testing.T, rctx request.CTX, ss store.Store) {
channelID := model.NewId()
otherChannelID := model.NewId()
userID := model.NewId()
createAt := time.Now().Add(-1 * time.Minute)
deleteAt := createAt.Add(1 * time.Second)
bookmark1 := &model.ChannelBookmark{
ChannelId: channelID,
OwnerId: userID,
@ -47,6 +51,7 @@ func testSaveChannelBookmark(t *testing.T, rctx request.CTX, ss store.Store) {
file := &model.FileInfo{
Id: model.NewId(),
ChannelId: channelID,
CreatorId: model.BookmarkFileOwner,
Path: "somepath",
ThumbnailPath: "thumbpath",
@ -80,6 +85,7 @@ func testSaveChannelBookmark(t *testing.T, rctx request.CTX, ss store.Store) {
file2 := &model.FileInfo{
Id: model.NewId(),
ChannelId: channelID,
CreatorId: userID,
Path: "somepath",
ThumbnailPath: "thumbpath",
@ -102,6 +108,60 @@ func testSaveChannelBookmark(t *testing.T, rctx request.CTX, ss store.Store) {
Emoji: ":smile:",
}
deletedFile := &model.FileInfo{
Id: model.NewId(),
ChannelId: channelID,
CreatorId: model.BookmarkFileOwner,
Path: "somepath",
ThumbnailPath: "thumbpath",
PreviewPath: "prevPath",
Name: "test file",
Extension: "png",
MimeType: "images/png",
Size: 873182,
Width: 3076,
Height: 2200,
HasPreviewImage: true,
CreateAt: createAt.UnixMilli(),
UpdateAt: createAt.UnixMilli(),
DeleteAt: deleteAt.UnixMilli(),
}
bookmarkFileDeleted := &model.ChannelBookmark{
ChannelId: channelID,
OwnerId: userID,
DisplayName: "file deleted",
FileId: deletedFile.Id,
Type: model.ChannelBookmarkFile,
Emoji: ":smile:",
}
// another channel
anotherChannelFile := &model.FileInfo{
Id: model.NewId(),
ChannelId: otherChannelID,
CreatorId: model.BookmarkFileOwner,
Path: "somepath",
ThumbnailPath: "thumbpath",
PreviewPath: "prevPath",
Name: "test file",
Extension: "png",
MimeType: "images/png",
Size: 873182,
Width: 3076,
Height: 2200,
HasPreviewImage: true,
}
bookmarkFileAnotherChannel := &model.ChannelBookmark{
ChannelId: channelID,
OwnerId: userID,
DisplayName: "file another channel",
FileId: anotherChannelFile.Id,
Type: model.ChannelBookmarkFile,
Emoji: ":smile:",
}
_, err := ss.FileInfo().Save(rctx, file)
require.NoError(t, err)
defer ss.FileInfo().PermanentDelete(rctx, file.Id)
@ -113,6 +173,14 @@ func testSaveChannelBookmark(t *testing.T, rctx request.CTX, ss store.Store) {
err = ss.FileInfo().AttachToPost(rctx, file2.Id, model.NewId(), channelID, userID)
require.NoError(t, err)
_, err = ss.FileInfo().Save(rctx, deletedFile)
require.NoError(t, err)
defer ss.FileInfo().PermanentDelete(rctx, deletedFile.Id)
_, err = ss.FileInfo().Save(rctx, anotherChannelFile)
require.NoError(t, err)
defer ss.FileInfo().PermanentDelete(rctx, anotherChannelFile.Id)
t.Run("save bookmarks", func(t *testing.T) {
bookmarkResp, err := ss.ChannelBookmark().Save(bookmark1.Clone(), true)
assert.NoError(t, err)
@ -137,6 +205,12 @@ func testSaveChannelBookmark(t *testing.T, rctx request.CTX, ss store.Store) {
_, err = ss.ChannelBookmark().Save(bookmark4.Clone(), true)
assert.Error(t, err) // Error as the file is attached to a post
_, err = ss.ChannelBookmark().Save(bookmarkFileDeleted.Clone(), true)
assert.Error(t, err) // Error as the file is deleted
_, err = ss.ChannelBookmark().Save(bookmarkFileAnotherChannel.Clone(), true)
assert.Error(t, err) // Error as the file is from another channel
})
}
@ -204,6 +278,7 @@ func testUpdateSortOrderChannelBookmark(t *testing.T, rctx request.CTX, ss store
file := &model.FileInfo{
Id: model.NewId(),
ChannelId: channelID,
CreatorId: model.BookmarkFileOwner,
Path: "somepath",
ThumbnailPath: "thumbpath",
@ -391,6 +466,7 @@ func testDeleteChannelBookmark(t *testing.T, rctx request.CTX, ss store.Store) {
file := &model.FileInfo{
Id: model.NewId(),
ChannelId: channelID,
CreatorId: model.BookmarkFileOwner,
Path: "somepath",
ThumbnailPath: "thumbpath",

View File

@ -32,17 +32,17 @@ func (_m *ChannelBookmarkStore) Delete(bookmarkID string, deleteFile bool) error
return r0
}
// ErrorIfBookmarkFileInfoAlreadyAttached provides a mock function with given fields: fileID
func (_m *ChannelBookmarkStore) ErrorIfBookmarkFileInfoAlreadyAttached(fileID string) error {
ret := _m.Called(fileID)
// ErrorIfBookmarkFileInfoAlreadyAttached provides a mock function with given fields: fileID, channelID
func (_m *ChannelBookmarkStore) ErrorIfBookmarkFileInfoAlreadyAttached(fileID string, channelID string) error {
ret := _m.Called(fileID, channelID)
if len(ret) == 0 {
panic("no return value specified for ErrorIfBookmarkFileInfoAlreadyAttached")
}
var r0 error
if rf, ok := ret.Get(0).(func(string) error); ok {
r0 = rf(fileID)
if rf, ok := ret.Get(0).(func(string, string) error); ok {
r0 = rf(fileID, channelID)
} else {
r0 = ret.Error(0)
}

View File

@ -14,6 +14,34 @@ type PropertyFieldStore struct {
mock.Mock
}
// CountForGroup provides a mock function with given fields: groupID, includeDeleted
func (_m *PropertyFieldStore) CountForGroup(groupID string, includeDeleted bool) (int64, error) {
ret := _m.Called(groupID, includeDeleted)
if len(ret) == 0 {
panic("no return value specified for CountForGroup")
}
var r0 int64
var r1 error
if rf, ok := ret.Get(0).(func(string, bool) (int64, error)); ok {
return rf(groupID, includeDeleted)
}
if rf, ok := ret.Get(0).(func(string, bool) int64); ok {
r0 = rf(groupID, includeDeleted)
} else {
r0 = ret.Get(0).(int64)
}
if rf, ok := ret.Get(1).(func(string, bool) error); ok {
r1 = rf(groupID, includeDeleted)
} else {
r1 = ret.Error(1)
}
return r0, r1
}
// Create provides a mock function with given fields: field
func (_m *PropertyFieldStore) Create(field *model.PropertyField) (*model.PropertyField, error) {
ret := _m.Called(field)
@ -152,9 +180,9 @@ func (_m *PropertyFieldStore) SearchPropertyFields(opts model.PropertyFieldSearc
return r0, r1
}
// Update provides a mock function with given fields: field
func (_m *PropertyFieldStore) Update(field []*model.PropertyField) ([]*model.PropertyField, error) {
ret := _m.Called(field)
// Update provides a mock function with given fields: fields
func (_m *PropertyFieldStore) Update(fields []*model.PropertyField) ([]*model.PropertyField, error) {
ret := _m.Called(fields)
if len(ret) == 0 {
panic("no return value specified for Update")
@ -163,10 +191,10 @@ func (_m *PropertyFieldStore) Update(field []*model.PropertyField) ([]*model.Pro
var r0 []*model.PropertyField
var r1 error
if rf, ok := ret.Get(0).(func([]*model.PropertyField) ([]*model.PropertyField, error)); ok {
return rf(field)
return rf(fields)
}
if rf, ok := ret.Get(0).(func([]*model.PropertyField) []*model.PropertyField); ok {
r0 = rf(field)
r0 = rf(fields)
} else {
if ret.Get(0) != nil {
r0 = ret.Get(0).([]*model.PropertyField)
@ -174,7 +202,7 @@ func (_m *PropertyFieldStore) Update(field []*model.PropertyField) ([]*model.Pro
}
if rf, ok := ret.Get(1).(func([]*model.PropertyField) error); ok {
r1 = rf(field)
r1 = rf(fields)
} else {
r1 = ret.Error(1)
}

View File

@ -170,9 +170,9 @@ func (_m *PropertyValueStore) SearchPropertyValues(opts model.PropertyValueSearc
return r0, r1
}
// Update provides a mock function with given fields: field
func (_m *PropertyValueStore) Update(field []*model.PropertyValue) ([]*model.PropertyValue, error) {
ret := _m.Called(field)
// Update provides a mock function with given fields: values
func (_m *PropertyValueStore) Update(values []*model.PropertyValue) ([]*model.PropertyValue, error) {
ret := _m.Called(values)
if len(ret) == 0 {
panic("no return value specified for Update")
@ -181,10 +181,10 @@ func (_m *PropertyValueStore) Update(field []*model.PropertyValue) ([]*model.Pro
var r0 []*model.PropertyValue
var r1 error
if rf, ok := ret.Get(0).(func([]*model.PropertyValue) ([]*model.PropertyValue, error)); ok {
return rf(field)
return rf(values)
}
if rf, ok := ret.Get(0).(func([]*model.PropertyValue) []*model.PropertyValue); ok {
r0 = rf(field)
r0 = rf(values)
} else {
if ret.Get(0) != nil {
r0 = ret.Get(0).([]*model.PropertyValue)
@ -192,7 +192,37 @@ func (_m *PropertyValueStore) Update(field []*model.PropertyValue) ([]*model.Pro
}
if rf, ok := ret.Get(1).(func([]*model.PropertyValue) error); ok {
r1 = rf(field)
r1 = rf(values)
} else {
r1 = ret.Error(1)
}
return r0, r1
}
// Upsert provides a mock function with given fields: values
func (_m *PropertyValueStore) Upsert(values []*model.PropertyValue) ([]*model.PropertyValue, error) {
ret := _m.Called(values)
if len(ret) == 0 {
panic("no return value specified for Upsert")
}
var r0 []*model.PropertyValue
var r1 error
if rf, ok := ret.Get(0).(func([]*model.PropertyValue) ([]*model.PropertyValue, error)); ok {
return rf(values)
}
if rf, ok := ret.Get(0).(func([]*model.PropertyValue) []*model.PropertyValue); ok {
r0 = rf(values)
} else {
if ret.Get(0) != nil {
r0 = ret.Get(0).([]*model.PropertyValue)
}
}
if rf, ok := ret.Get(1).(func([]*model.PropertyValue) error); ok {
r1 = rf(values)
} else {
r1 = ret.Error(1)
}

View File

@ -5,6 +5,7 @@ package storetest
import (
"database/sql"
"fmt"
"testing"
"time"
@ -21,6 +22,7 @@ func TestPropertyFieldStore(t *testing.T, rctx request.CTX, ss store.Store, s Sq
t.Run("UpdatePropertyField", func(t *testing.T) { testUpdatePropertyField(t, rctx, ss) })
t.Run("DeletePropertyField", func(t *testing.T) { testDeletePropertyField(t, rctx, ss) })
t.Run("SearchPropertyFields", func(t *testing.T) { testSearchPropertyFields(t, rctx, ss) })
t.Run("CountForGroup", func(t *testing.T) { testCountForGroup(t, rctx, ss) })
}
func testCreatePropertyField(t *testing.T, _ request.CTX, ss store.Store) {
@ -182,8 +184,7 @@ func testUpdatePropertyField(t *testing.T, _ request.CTX, ss store.Store) {
}
updatedField, err := ss.PropertyField().Update([]*model.PropertyField{field})
require.Zero(t, updatedField)
var enf *store.ErrNotFound
require.ErrorAs(t, err, &enf)
require.ErrorContains(t, err, "failed to update, some property fields were not found, got 0 of 1")
})
t.Run("should fail if the property field is not valid", func(t *testing.T) {
@ -316,6 +317,46 @@ func testUpdatePropertyField(t *testing.T, _ request.CTX, ss store.Store) {
require.Equal(t, groupID, updated2.GroupID)
require.Equal(t, originalUpdateAt2, updated2.UpdateAt)
})
t.Run("should not update any fields if one update points to a nonexisting one", func(t *testing.T) {
// Create a valid field
field1 := &model.PropertyField{
GroupID: model.NewId(),
Name: "First field",
Type: model.PropertyFieldTypeText,
}
_, err := ss.PropertyField().Create(field1)
require.NoError(t, err)
originalUpdateAt := field1.UpdateAt
// Try to update both the valid field and a nonexistent one
field2 := &model.PropertyField{
ID: model.NewId(),
GroupID: model.NewId(),
Name: "Second field",
Type: model.PropertyFieldTypeText,
TargetID: model.NewId(),
TargetType: "test_type",
CreateAt: 1,
Attrs: map[string]any{
"key": "value",
},
}
field1.Name = "Updated First"
_, err = ss.PropertyField().Update([]*model.PropertyField{field1, field2})
require.Error(t, err)
require.ErrorContains(t, err, "failed to update, some property fields were not found")
// Check that the valid field was not updated
updated1, err := ss.PropertyField().Get("", field1.ID)
require.NoError(t, err)
require.Equal(t, "First field", updated1.Name)
require.Equal(t, originalUpdateAt, updated1.UpdateAt)
})
}
func testDeletePropertyField(t *testing.T, _ request.CTX, ss store.Store) {
@ -353,6 +394,97 @@ func testDeletePropertyField(t *testing.T, _ request.CTX, ss store.Store) {
})
}
func testCountForGroup(t *testing.T, _ request.CTX, ss store.Store) {
t.Run("should return 0 for group with no properties", func(t *testing.T) {
count, err := ss.PropertyField().CountForGroup(model.NewId(), false)
require.NoError(t, err)
require.Equal(t, int64(0), count)
})
t.Run("should return correct count for group with properties", func(t *testing.T) {
groupID := model.NewId()
// Create 5 property fields
for i := 0; i < 5; i++ {
field := &model.PropertyField{
GroupID: groupID,
Name: fmt.Sprintf("Field %d", i),
Type: model.PropertyFieldTypeText,
}
_, err := ss.PropertyField().Create(field)
require.NoError(t, err)
}
count, err := ss.PropertyField().CountForGroup(groupID, false)
require.NoError(t, err)
require.Equal(t, int64(5), count)
})
t.Run("should not count deleted properties when includeDeleted is false", func(t *testing.T) {
groupID := model.NewId()
// Create 5 property fields
for i := 0; i < 5; i++ {
field := &model.PropertyField{
GroupID: groupID,
Name: fmt.Sprintf("Field %d", i),
Type: model.PropertyFieldTypeText,
}
_, err := ss.PropertyField().Create(field)
require.NoError(t, err)
}
// Create one more and delete it
deletedField := &model.PropertyField{
GroupID: groupID,
Name: "To be deleted",
Type: model.PropertyFieldTypeText,
}
_, err := ss.PropertyField().Create(deletedField)
require.NoError(t, err)
err = ss.PropertyField().Delete(deletedField.ID)
require.NoError(t, err)
// Count should be 5 since the deleted field shouldn't be counted
count, err := ss.PropertyField().CountForGroup(groupID, false)
require.NoError(t, err)
require.Equal(t, int64(5), count)
})
t.Run("should count deleted properties when includeDeleted is true", func(t *testing.T) {
groupID := model.NewId()
// Create 5 property fields
for i := 0; i < 5; i++ {
field := &model.PropertyField{
GroupID: groupID,
Name: fmt.Sprintf("Field %d", i),
Type: model.PropertyFieldTypeText,
}
_, err := ss.PropertyField().Create(field)
require.NoError(t, err)
}
// Create one more and delete it
deletedField := &model.PropertyField{
GroupID: groupID,
Name: "To be deleted",
Type: model.PropertyFieldTypeText,
}
_, err := ss.PropertyField().Create(deletedField)
require.NoError(t, err)
err = ss.PropertyField().Delete(deletedField.ID)
require.NoError(t, err)
// Count should be 6 since we're including deleted fields
count, err := ss.PropertyField().CountForGroup(groupID, true)
require.NoError(t, err)
require.Equal(t, int64(6), count)
})
}
func testSearchPropertyFields(t *testing.T, _ request.CTX, ss store.Store) {
groupID := model.NewId()
targetID := model.NewId()
@ -403,18 +535,9 @@ func testSearchPropertyFields(t *testing.T, _ request.CTX, ss store.Store) {
expectedError bool
expectedIDs []string
}{
{
name: "negative page",
opts: model.PropertyFieldSearchOpts{
Page: -1,
PerPage: 10,
},
expectedError: true,
},
{
name: "negative per_page",
opts: model.PropertyFieldSearchOpts{
Page: 0,
PerPage: -1,
},
expectedError: true,
@ -423,7 +546,6 @@ func testSearchPropertyFields(t *testing.T, _ request.CTX, ss store.Store) {
name: "filter by group_id",
opts: model.PropertyFieldSearchOpts{
GroupID: groupID,
Page: 0,
PerPage: 10,
},
expectedIDs: []string{field1.ID, field2.ID},
@ -432,7 +554,6 @@ func testSearchPropertyFields(t *testing.T, _ request.CTX, ss store.Store) {
name: "filter by group_id including deleted",
opts: model.PropertyFieldSearchOpts{
GroupID: groupID,
Page: 0,
PerPage: 10,
IncludeDeleted: true,
},
@ -442,7 +563,6 @@ func testSearchPropertyFields(t *testing.T, _ request.CTX, ss store.Store) {
name: "filter by target_type",
opts: model.PropertyFieldSearchOpts{
TargetType: "test_type",
Page: 0,
PerPage: 10,
},
expectedIDs: []string{field1.ID, field3.ID},
@ -451,7 +571,6 @@ func testSearchPropertyFields(t *testing.T, _ request.CTX, ss store.Store) {
name: "filter by target_id",
opts: model.PropertyFieldSearchOpts{
TargetID: targetID,
Page: 0,
PerPage: 10,
},
expectedIDs: []string{field1.ID, field2.ID},
@ -460,7 +579,6 @@ func testSearchPropertyFields(t *testing.T, _ request.CTX, ss store.Store) {
name: "pagination page 0",
opts: model.PropertyFieldSearchOpts{
GroupID: groupID,
Page: 0,
PerPage: 2,
IncludeDeleted: true,
},
@ -469,8 +587,11 @@ func testSearchPropertyFields(t *testing.T, _ request.CTX, ss store.Store) {
{
name: "pagination page 1",
opts: model.PropertyFieldSearchOpts{
GroupID: groupID,
Page: 1,
GroupID: groupID,
Cursor: model.PropertyFieldSearchCursor{
CreateAt: field2.CreateAt,
PropertyFieldID: field2.ID,
},
PerPage: 2,
IncludeDeleted: true,
},
@ -487,7 +608,7 @@ func testSearchPropertyFields(t *testing.T, _ request.CTX, ss store.Store) {
}
require.NoError(t, err)
var ids = make([]string, len(results))
ids := make([]string, len(results))
for i, field := range results {
ids[i] = field.ID
}

View File

@ -22,6 +22,7 @@ func TestPropertyValueStore(t *testing.T, rctx request.CTX, ss store.Store, s Sq
t.Run("GetPropertyValue", func(t *testing.T) { testGetPropertyValue(t, rctx, ss) })
t.Run("GetManyPropertyValues", func(t *testing.T) { testGetManyPropertyValues(t, rctx, ss) })
t.Run("UpdatePropertyValue", func(t *testing.T) { testUpdatePropertyValue(t, rctx, ss) })
t.Run("UpsertPropertyValue", func(t *testing.T) { testUpsertPropertyValue(t, rctx, ss) })
t.Run("DeletePropertyValue", func(t *testing.T) { testDeletePropertyValue(t, rctx, ss) })
t.Run("SearchPropertyValues", func(t *testing.T) { testSearchPropertyValues(t, rctx, ss) })
t.Run("DeleteForField", func(t *testing.T) { testDeleteForField(t, rctx, ss) })
@ -186,8 +187,7 @@ func testUpdatePropertyValue(t *testing.T, _ request.CTX, ss store.Store) {
}
updatedValue, err := ss.PropertyValue().Update([]*model.PropertyValue{value})
require.Zero(t, updatedValue)
var enf *store.ErrNotFound
require.ErrorAs(t, err, &enf)
require.ErrorContains(t, err, "failed to update, some property values were not found, got 0 of 1")
})
t.Run("should fail if the property value is not valid", func(t *testing.T) {
@ -257,7 +257,7 @@ func testUpdatePropertyValue(t *testing.T, _ request.CTX, ss store.Store) {
require.Greater(t, updated2.UpdateAt, updated2.CreateAt)
})
t.Run("should not update any fields if one update is invalid", func(t *testing.T) {
t.Run("should not update any values if one update is invalid", func(t *testing.T) {
// Create two valid values
groupID := model.NewId()
value1 := &model.PropertyValue{
@ -303,6 +303,208 @@ func testUpdatePropertyValue(t *testing.T, _ request.CTX, ss store.Store) {
require.Equal(t, groupID, updated2.GroupID)
require.Equal(t, originalUpdateAt2, updated2.UpdateAt)
})
t.Run("should not update any values if one update points to a nonexisting one", func(t *testing.T) {
// Create a valid value
value1 := &model.PropertyValue{
TargetID: model.NewId(),
TargetType: "test_type",
GroupID: model.NewId(),
FieldID: model.NewId(),
Value: json.RawMessage(`"Value 1"`),
}
_, err := ss.PropertyValue().Create(value1)
require.NoError(t, err)
originalUpdateAt := value1.UpdateAt
// Try to update both the valid value and a nonexistent one
value2 := &model.PropertyValue{
ID: model.NewId(),
TargetID: model.NewId(),
CreateAt: 1,
TargetType: "test_type",
GroupID: model.NewId(),
FieldID: model.NewId(),
Value: json.RawMessage(`"Value 2"`),
}
value1.Value = json.RawMessage(`"Updated Value 1"`)
_, err = ss.PropertyValue().Update([]*model.PropertyValue{value1, value2})
require.Error(t, err)
require.ErrorContains(t, err, "failed to update, some property values were not found")
// Check that the valid value was not updated
updated1, err := ss.PropertyValue().Get("", value1.ID)
require.NoError(t, err)
require.Equal(t, json.RawMessage(`"Value 1"`), updated1.Value)
require.Equal(t, originalUpdateAt, updated1.UpdateAt)
})
}
func testUpsertPropertyValue(t *testing.T, _ request.CTX, ss store.Store) {
t.Run("should fail if the property value is not valid", func(t *testing.T) {
value := &model.PropertyValue{
TargetID: "",
TargetType: "test_type",
GroupID: model.NewId(),
FieldID: model.NewId(),
Value: json.RawMessage(`"test value"`),
}
updatedValue, err := ss.PropertyValue().Upsert([]*model.PropertyValue{value})
require.Zero(t, updatedValue)
require.ErrorContains(t, err, "model.property_value.is_valid.app_error")
value.TargetID = model.NewId()
value.GroupID = ""
updatedValue, err = ss.PropertyValue().Upsert([]*model.PropertyValue{value})
require.Zero(t, updatedValue)
require.ErrorContains(t, err, "model.property_value.is_valid.app_error")
})
t.Run("should be able to insert new property values", func(t *testing.T) {
value1 := &model.PropertyValue{
TargetID: model.NewId(),
TargetType: "test_type",
GroupID: model.NewId(),
FieldID: model.NewId(),
Value: json.RawMessage(`"value 1"`),
}
value2 := &model.PropertyValue{
TargetID: model.NewId(),
TargetType: "test_type",
GroupID: model.NewId(),
FieldID: model.NewId(),
Value: json.RawMessage(`"value 2"`),
}
values, err := ss.PropertyValue().Upsert([]*model.PropertyValue{value1, value2})
require.NoError(t, err)
require.Len(t, values, 2)
require.NotEmpty(t, values[0].ID)
require.NotEmpty(t, values[1].ID)
require.NotZero(t, values[0].CreateAt)
require.NotZero(t, values[1].CreateAt)
valuesFromStore, err := ss.PropertyValue().GetMany("", []string{values[0].ID, values[1].ID})
require.NoError(t, err)
require.Len(t, valuesFromStore, 2)
})
t.Run("should be able to update existing property values", func(t *testing.T) {
// Create initial value
value := &model.PropertyValue{
TargetID: model.NewId(),
TargetType: "test_type",
GroupID: model.NewId(),
FieldID: model.NewId(),
Value: json.RawMessage(`"initial value"`),
}
_, err := ss.PropertyValue().Create(value)
require.NoError(t, err)
valueID := value.ID
time.Sleep(10 * time.Millisecond)
// Update via upsert
value.ID = ""
value.Value = json.RawMessage(`"updated value"`)
values, err := ss.PropertyValue().Upsert([]*model.PropertyValue{value})
require.NoError(t, err)
require.Len(t, values, 1)
require.Equal(t, valueID, values[0].ID)
require.Equal(t, json.RawMessage(`"updated value"`), values[0].Value)
require.Greater(t, values[0].UpdateAt, values[0].CreateAt)
// Verify in database
updated, err := ss.PropertyValue().Get("", valueID)
require.NoError(t, err)
require.Equal(t, json.RawMessage(`"updated value"`), updated.Value)
require.Greater(t, updated.UpdateAt, updated.CreateAt)
})
t.Run("should handle mixed insert and update operations", func(t *testing.T) {
// Create first value
existingValue := &model.PropertyValue{
TargetID: model.NewId(),
TargetType: "test_type",
GroupID: model.NewId(),
FieldID: model.NewId(),
Value: json.RawMessage(`"existing value"`),
}
_, err := ss.PropertyValue().Create(existingValue)
require.NoError(t, err)
// Prepare new value
newValue := &model.PropertyValue{
TargetID: model.NewId(),
TargetType: "test_type",
GroupID: model.NewId(),
FieldID: model.NewId(),
Value: json.RawMessage(`"new value"`),
}
// Update existing and insert new via upsert
existingValue.Value = json.RawMessage(`"updated existing"`)
values, err := ss.PropertyValue().Upsert([]*model.PropertyValue{existingValue, newValue})
require.NoError(t, err)
require.Len(t, values, 2)
// Verify both values
newValueUpserted, err := ss.PropertyValue().Get("", newValue.ID)
require.NoError(t, err)
require.Equal(t, json.RawMessage(`"new value"`), newValueUpserted.Value)
existingValueUpserted, err := ss.PropertyValue().Get("", existingValue.ID)
require.NoError(t, err)
require.Equal(t, json.RawMessage(`"updated existing"`), existingValueUpserted.Value)
})
t.Run("should not perform any operation if one of the fields is invalid", func(t *testing.T) {
// Create initial valid value
existingValue := &model.PropertyValue{
TargetID: model.NewId(),
TargetType: "test_type",
GroupID: model.NewId(),
FieldID: model.NewId(),
Value: json.RawMessage(`"existing value"`),
}
_, err := ss.PropertyValue().Create(existingValue)
require.NoError(t, err)
originalValue := *existingValue
// Prepare an invalid value
invalidValue := &model.PropertyValue{
TargetID: model.NewId(),
TargetType: "test_type",
GroupID: "", // Invalid: empty group ID
FieldID: model.NewId(),
Value: json.RawMessage(`"new value"`),
}
// Try to update existing and insert invalid via upsert
existingValue.Value = json.RawMessage(`"should not update"`)
_, err = ss.PropertyValue().Upsert([]*model.PropertyValue{existingValue, invalidValue})
require.Error(t, err)
require.Contains(t, err.Error(), "model.property_value.is_valid.app_error")
// Verify the existing value was not changed
retrieved, err := ss.PropertyValue().Get("", existingValue.ID)
require.NoError(t, err)
require.Equal(t, originalValue.Value, retrieved.Value)
require.Equal(t, originalValue.UpdateAt, retrieved.UpdateAt)
// Verify the invalid value was not inserted
results, err := ss.PropertyValue().SearchPropertyValues(model.PropertyValueSearchOpts{
TargetID: invalidValue.TargetID,
PerPage: 10,
})
require.NoError(t, err)
require.Empty(t, results)
})
}
func testDeletePropertyValue(t *testing.T, _ request.CTX, ss store.Store) {
@ -401,18 +603,9 @@ func testSearchPropertyValues(t *testing.T, _ request.CTX, ss store.Store) {
expectedError bool
expectedIDs []string
}{
{
name: "negative page",
opts: model.PropertyValueSearchOpts{
Page: -1,
PerPage: 10,
},
expectedError: true,
},
{
name: "negative per_page",
opts: model.PropertyValueSearchOpts{
Page: 0,
PerPage: -1,
},
expectedError: true,
@ -421,7 +614,6 @@ func testSearchPropertyValues(t *testing.T, _ request.CTX, ss store.Store) {
name: "filter by group_id",
opts: model.PropertyValueSearchOpts{
GroupID: groupID,
Page: 0,
PerPage: 10,
},
expectedIDs: []string{value1.ID, value2.ID},
@ -431,7 +623,6 @@ func testSearchPropertyValues(t *testing.T, _ request.CTX, ss store.Store) {
opts: model.PropertyValueSearchOpts{
GroupID: groupID,
TargetType: "test_type",
Page: 0,
PerPage: 10,
},
expectedIDs: []string{value1.ID},
@ -442,7 +633,6 @@ func testSearchPropertyValues(t *testing.T, _ request.CTX, ss store.Store) {
GroupID: groupID,
TargetType: "test_type",
IncludeDeleted: true,
Page: 0,
PerPage: 10,
},
expectedIDs: []string{value1.ID, value4.ID},
@ -451,7 +641,6 @@ func testSearchPropertyValues(t *testing.T, _ request.CTX, ss store.Store) {
name: "filter by target_id",
opts: model.PropertyValueSearchOpts{
TargetID: targetID,
Page: 0,
PerPage: 10,
},
expectedIDs: []string{value1.ID, value2.ID},
@ -461,7 +650,6 @@ func testSearchPropertyValues(t *testing.T, _ request.CTX, ss store.Store) {
opts: model.PropertyValueSearchOpts{
GroupID: groupID,
TargetID: targetID,
Page: 0,
PerPage: 10,
},
expectedIDs: []string{value1.ID, value2.ID},
@ -470,7 +658,6 @@ func testSearchPropertyValues(t *testing.T, _ request.CTX, ss store.Store) {
name: "filter by field_id",
opts: model.PropertyValueSearchOpts{
FieldID: fieldID,
Page: 0,
PerPage: 10,
},
expectedIDs: []string{value1.ID},
@ -480,7 +667,6 @@ func testSearchPropertyValues(t *testing.T, _ request.CTX, ss store.Store) {
opts: model.PropertyValueSearchOpts{
FieldID: fieldID,
IncludeDeleted: true,
Page: 0,
PerPage: 10,
},
expectedIDs: []string{value1.ID, value4.ID},
@ -489,7 +675,6 @@ func testSearchPropertyValues(t *testing.T, _ request.CTX, ss store.Store) {
name: "pagination page 0",
opts: model.PropertyValueSearchOpts{
GroupID: groupID,
Page: 0,
PerPage: 1,
},
expectedIDs: []string{value1.ID},
@ -498,7 +683,10 @@ func testSearchPropertyValues(t *testing.T, _ request.CTX, ss store.Store) {
name: "pagination page 1",
opts: model.PropertyValueSearchOpts{
GroupID: groupID,
Page: 1,
Cursor: model.PropertyValueSearchCursor{
CreateAt: value1.CreateAt,
PropertyValueID: value1.ID,
},
PerPage: 1,
},
expectedIDs: []string{value2.ID},
@ -514,7 +702,7 @@ func testSearchPropertyValues(t *testing.T, _ request.CTX, ss store.Store) {
}
require.NoError(t, err)
var ids = make([]string, len(results))
ids := make([]string, len(results))
for i, value := range results {
ids[i] = value.ID
}

View File

@ -2614,10 +2614,10 @@ func (s *TimerLayerChannelBookmarkStore) Delete(bookmarkID string, deleteFile bo
return err
}
func (s *TimerLayerChannelBookmarkStore) ErrorIfBookmarkFileInfoAlreadyAttached(fileID string) error {
func (s *TimerLayerChannelBookmarkStore) ErrorIfBookmarkFileInfoAlreadyAttached(fileID string, channelID string) error {
start := time.Now()
err := s.ChannelBookmarkStore.ErrorIfBookmarkFileInfoAlreadyAttached(fileID)
err := s.ChannelBookmarkStore.ErrorIfBookmarkFileInfoAlreadyAttached(fileID, channelID)
elapsed := float64(time.Since(start)) / float64(time.Second)
if s.Root.Metrics != nil {
@ -7105,6 +7105,22 @@ func (s *TimerLayerProductNoticesStore) View(userID string, notices []string) er
return err
}
func (s *TimerLayerPropertyFieldStore) CountForGroup(groupID string, includeDeleted bool) (int64, error) {
start := time.Now()
result, err := s.PropertyFieldStore.CountForGroup(groupID, includeDeleted)
elapsed := float64(time.Since(start)) / float64(time.Second)
if s.Root.Metrics != nil {
success := "false"
if err == nil {
success = "true"
}
s.Root.Metrics.ObserveStoreMethodDuration("PropertyFieldStore.CountForGroup", success, elapsed)
}
return result, err
}
func (s *TimerLayerPropertyFieldStore) Create(field *model.PropertyField) (*model.PropertyField, error) {
start := time.Now()
@ -7185,10 +7201,10 @@ func (s *TimerLayerPropertyFieldStore) SearchPropertyFields(opts model.PropertyF
return result, err
}
func (s *TimerLayerPropertyFieldStore) Update(field []*model.PropertyField) ([]*model.PropertyField, error) {
func (s *TimerLayerPropertyFieldStore) Update(fields []*model.PropertyField) ([]*model.PropertyField, error) {
start := time.Now()
result, err := s.PropertyFieldStore.Update(field)
result, err := s.PropertyFieldStore.Update(fields)
elapsed := float64(time.Since(start)) / float64(time.Second)
if s.Root.Metrics != nil {
@ -7329,10 +7345,10 @@ func (s *TimerLayerPropertyValueStore) SearchPropertyValues(opts model.PropertyV
return result, err
}
func (s *TimerLayerPropertyValueStore) Update(field []*model.PropertyValue) ([]*model.PropertyValue, error) {
func (s *TimerLayerPropertyValueStore) Update(values []*model.PropertyValue) ([]*model.PropertyValue, error) {
start := time.Now()
result, err := s.PropertyValueStore.Update(field)
result, err := s.PropertyValueStore.Update(values)
elapsed := float64(time.Since(start)) / float64(time.Second)
if s.Root.Metrics != nil {
@ -7345,6 +7361,22 @@ func (s *TimerLayerPropertyValueStore) Update(field []*model.PropertyValue) ([]*
return result, err
}
func (s *TimerLayerPropertyValueStore) Upsert(values []*model.PropertyValue) ([]*model.PropertyValue, error) {
start := time.Now()
result, err := s.PropertyValueStore.Upsert(values)
elapsed := float64(time.Since(start)) / float64(time.Second)
if s.Root.Metrics != nil {
success := "false"
if err == nil {
success = "true"
}
s.Root.Metrics.ObserveStoreMethodDuration("PropertyValueStore.Upsert", success, elapsed)
}
return result, err
}
func (s *TimerLayerReactionStore) BulkGetForPosts(postIds []string) ([]*model.Reaction, error) {
start := time.Now()

View File

@ -108,16 +108,16 @@ type MetricsInterface interface {
ObserveClientTimeToLastByte(platform, agent, userID string, elapsed float64)
ObserveClientTimeToDomInteractive(platform, agent, userID string, elapsed float64)
ObserveClientSplashScreenEnd(platform, agent, pageType, userID string, elapsed float64)
ObserveClientFirstContentfulPaint(platform, agent string, elapsed float64)
ObserveClientLargestContentfulPaint(platform, agent, region string, elapsed float64)
ObserveClientInteractionToNextPaint(platform, agent, interaction string, elapsed float64)
ObserveClientCumulativeLayoutShift(platform, agent string, elapsed float64)
IncrementClientLongTasks(platform, agent string, inc float64)
ObserveClientFirstContentfulPaint(platform, agent, userID string, elapsed float64)
ObserveClientLargestContentfulPaint(platform, agent, region, userID string, elapsed float64)
ObserveClientInteractionToNextPaint(platform, agent, interaction, userID string, elapsed float64)
ObserveClientCumulativeLayoutShift(platform, agent, userID string, elapsed float64)
IncrementClientLongTasks(platform, agent, userID string, inc float64)
ObserveClientPageLoadDuration(platform, agent, userID string, elapsed float64)
ObserveClientChannelSwitchDuration(platform, agent, fresh string, elapsed float64)
ObserveClientTeamSwitchDuration(platform, agent, fresh string, elapsed float64)
ObserveClientRHSLoadDuration(platform, agent string, elapsed float64)
ObserveGlobalThreadsLoadDuration(platform, agent string, elapsed float64)
ObserveClientChannelSwitchDuration(platform, agent, fresh, userID string, elapsed float64)
ObserveClientTeamSwitchDuration(platform, agent, fresh, userID string, elapsed float64)
ObserveClientRHSLoadDuration(platform, agent, userID string, elapsed float64)
ObserveGlobalThreadsLoadDuration(platform, agent, userID string, elapsed float64)
ObserveMobileClientLoadDuration(platform string, elapsed float64)
ObserveMobileClientChannelSwitchDuration(platform string, elapsed float64)
ObserveMobileClientTeamSwitchDuration(platform string, elapsed float64)

View File

@ -78,9 +78,9 @@ func (_m *MetricsInterface) IncrementChannelIndexCounter() {
_m.Called()
}
// IncrementClientLongTasks provides a mock function with given fields: platform, agent, inc
func (_m *MetricsInterface) IncrementClientLongTasks(platform string, agent string, inc float64) {
_m.Called(platform, agent, inc)
// IncrementClientLongTasks provides a mock function with given fields: platform, agent, userID, inc
func (_m *MetricsInterface) IncrementClientLongTasks(platform string, agent string, userID string, inc float64) {
_m.Called(platform, agent, userID, inc)
}
// IncrementClusterEventType provides a mock function with given fields: eventType
@ -303,29 +303,29 @@ func (_m *MetricsInterface) ObserveAPIEndpointDuration(endpoint string, method s
_m.Called(endpoint, method, statusCode, originClient, pageLoadContext, elapsed)
}
// ObserveClientChannelSwitchDuration provides a mock function with given fields: platform, agent, fresh, elapsed
func (_m *MetricsInterface) ObserveClientChannelSwitchDuration(platform string, agent string, fresh string, elapsed float64) {
_m.Called(platform, agent, fresh, elapsed)
// ObserveClientChannelSwitchDuration provides a mock function with given fields: platform, agent, fresh, userID, elapsed
func (_m *MetricsInterface) ObserveClientChannelSwitchDuration(platform string, agent string, fresh string, userID string, elapsed float64) {
_m.Called(platform, agent, fresh, userID, elapsed)
}
// ObserveClientCumulativeLayoutShift provides a mock function with given fields: platform, agent, elapsed
func (_m *MetricsInterface) ObserveClientCumulativeLayoutShift(platform string, agent string, elapsed float64) {
_m.Called(platform, agent, elapsed)
// ObserveClientCumulativeLayoutShift provides a mock function with given fields: platform, agent, userID, elapsed
func (_m *MetricsInterface) ObserveClientCumulativeLayoutShift(platform string, agent string, userID string, elapsed float64) {
_m.Called(platform, agent, userID, elapsed)
}
// ObserveClientFirstContentfulPaint provides a mock function with given fields: platform, agent, elapsed
func (_m *MetricsInterface) ObserveClientFirstContentfulPaint(platform string, agent string, elapsed float64) {
_m.Called(platform, agent, elapsed)
// ObserveClientFirstContentfulPaint provides a mock function with given fields: platform, agent, userID, elapsed
func (_m *MetricsInterface) ObserveClientFirstContentfulPaint(platform string, agent string, userID string, elapsed float64) {
_m.Called(platform, agent, userID, elapsed)
}
// ObserveClientInteractionToNextPaint provides a mock function with given fields: platform, agent, interaction, elapsed
func (_m *MetricsInterface) ObserveClientInteractionToNextPaint(platform string, agent string, interaction string, elapsed float64) {
_m.Called(platform, agent, interaction, elapsed)
// ObserveClientInteractionToNextPaint provides a mock function with given fields: platform, agent, interaction, userID, elapsed
func (_m *MetricsInterface) ObserveClientInteractionToNextPaint(platform string, agent string, interaction string, userID string, elapsed float64) {
_m.Called(platform, agent, interaction, userID, elapsed)
}
// ObserveClientLargestContentfulPaint provides a mock function with given fields: platform, agent, region, elapsed
func (_m *MetricsInterface) ObserveClientLargestContentfulPaint(platform string, agent string, region string, elapsed float64) {
_m.Called(platform, agent, region, elapsed)
// ObserveClientLargestContentfulPaint provides a mock function with given fields: platform, agent, region, userID, elapsed
func (_m *MetricsInterface) ObserveClientLargestContentfulPaint(platform string, agent string, region string, userID string, elapsed float64) {
_m.Called(platform, agent, region, userID, elapsed)
}
// ObserveClientPageLoadDuration provides a mock function with given fields: platform, agent, userID, elapsed
@ -333,9 +333,9 @@ func (_m *MetricsInterface) ObserveClientPageLoadDuration(platform string, agent
_m.Called(platform, agent, userID, elapsed)
}
// ObserveClientRHSLoadDuration provides a mock function with given fields: platform, agent, elapsed
func (_m *MetricsInterface) ObserveClientRHSLoadDuration(platform string, agent string, elapsed float64) {
_m.Called(platform, agent, elapsed)
// ObserveClientRHSLoadDuration provides a mock function with given fields: platform, agent, userID, elapsed
func (_m *MetricsInterface) ObserveClientRHSLoadDuration(platform string, agent string, userID string, elapsed float64) {
_m.Called(platform, agent, userID, elapsed)
}
// ObserveClientSplashScreenEnd provides a mock function with given fields: platform, agent, pageType, userID, elapsed
@ -343,9 +343,9 @@ func (_m *MetricsInterface) ObserveClientSplashScreenEnd(platform string, agent
_m.Called(platform, agent, pageType, userID, elapsed)
}
// ObserveClientTeamSwitchDuration provides a mock function with given fields: platform, agent, fresh, elapsed
func (_m *MetricsInterface) ObserveClientTeamSwitchDuration(platform string, agent string, fresh string, elapsed float64) {
_m.Called(platform, agent, fresh, elapsed)
// ObserveClientTeamSwitchDuration provides a mock function with given fields: platform, agent, fresh, userID, elapsed
func (_m *MetricsInterface) ObserveClientTeamSwitchDuration(platform string, agent string, fresh string, userID string, elapsed float64) {
_m.Called(platform, agent, fresh, userID, elapsed)
}
// ObserveClientTimeToDomInteractive provides a mock function with given fields: platform, agent, userID, elapsed
@ -388,9 +388,9 @@ func (_m *MetricsInterface) ObserveFilesSearchDuration(elapsed float64) {
_m.Called(elapsed)
}
// ObserveGlobalThreadsLoadDuration provides a mock function with given fields: platform, agent, elapsed
func (_m *MetricsInterface) ObserveGlobalThreadsLoadDuration(platform string, agent string, elapsed float64) {
_m.Called(platform, agent, elapsed)
// ObserveGlobalThreadsLoadDuration provides a mock function with given fields: platform, agent, userID, elapsed
func (_m *MetricsInterface) ObserveGlobalThreadsLoadDuration(platform string, agent string, userID string, elapsed float64) {
_m.Called(platform, agent, userID, elapsed)
}
// ObserveMobileClientChannelSwitchDuration provides a mock function with given fields: platform, elapsed

View File

@ -55,6 +55,8 @@ type MetricsInterfaceImpl struct {
Registry *prometheus.Registry
ClientSideUserIds map[string]bool
DbMasterConnectionsGauge prometheus.GaugeFunc
DbReadConnectionsGauge prometheus.GaugeFunc
DbSearchConnectionsGauge prometheus.GaugeFunc
@ -240,7 +242,7 @@ func init() {
})
}
// New creates a new MetricsInterface. The driver and datasoruce parameters are added during
// New creates a new MetricsInterface. The driver and datasource parameters are added during
// migrating configuration store to the new platform service. Once the store and license are migrated,
// we will be able to remove server dependency and lean on platform service during initialization.
func New(ps *platform.PlatformService, driver, dataSource string) *MetricsInterfaceImpl {
@ -248,6 +250,12 @@ func New(ps *platform.PlatformService, driver, dataSource string) *MetricsInterf
Platform: ps,
}
// Initialize ClientSideUserIds map
m.ClientSideUserIds = make(map[string]bool)
for _, userId := range ps.Config().MetricsSettings.ClientSideUserIds {
m.ClientSideUserIds[userId] = true
}
m.Registry = prometheus.NewRegistry()
options := collectors.ProcessCollectorOpts{
Namespace: MetricsNamespace,
@ -1196,7 +1204,7 @@ func New(ps *platform.PlatformService, driver, dataSource string) *MetricsInterf
Help: "Duration from when a browser starts to request a page from a server until when it starts to receive data in response (seconds)",
ConstLabels: additionalLabels,
},
[]string{"platform", "agent"},
[]string{"platform", "agent", "user_id"},
m.Platform.Log(),
)
m.Registry.MustRegister(m.ClientTimeToFirstByte)
@ -1209,7 +1217,7 @@ func New(ps *platform.PlatformService, driver, dataSource string) *MetricsInterf
Help: "Duration from when a browser starts to request a page from a server until when it receives the last byte of the resource or immediately before the transport connection is closed, whichever comes first. (seconds)",
ConstLabels: additionalLabels,
},
[]string{"platform", "agent"},
[]string{"platform", "agent", "user_id"},
m.Platform.Log(),
)
m.Registry.MustRegister(m.ClientTimeToLastByte)
@ -1223,7 +1231,7 @@ func New(ps *platform.PlatformService, driver, dataSource string) *MetricsInterf
Buckets: []float64{.1, .25, .5, 1, 2.5, 5, 7.5, 10, 12.5, 15},
ConstLabels: additionalLabels,
},
[]string{"platform", "agent"},
[]string{"platform", "agent", "user_id"},
m.Platform.Log(),
)
m.Registry.MustRegister(m.ClientTimeToDOMInteractive)
@ -1237,7 +1245,7 @@ func New(ps *platform.PlatformService, driver, dataSource string) *MetricsInterf
Buckets: []float64{.1, .25, .5, 1, 2.5, 5, 7.5, 10, 12.5, 15},
ConstLabels: additionalLabels,
},
[]string{"platform", "agent", "page_type"},
[]string{"platform", "agent", "page_type", "user_id"},
m.Platform.Log(),
)
m.Registry.MustRegister(m.ClientSplashScreenEnd)
@ -1253,7 +1261,7 @@ func New(ps *platform.PlatformService, driver, dataSource string) *MetricsInterf
Buckets: []float64{.005, .01, .025, .05, .1, .25, .5, 1, 2.5, 5, 10, 15, 20},
ConstLabels: additionalLabels,
},
[]string{"platform", "agent"},
[]string{"platform", "agent", "user_id"},
)
m.Registry.MustRegister(m.ClientFirstContentfulPaint)
@ -1268,7 +1276,7 @@ func New(ps *platform.PlatformService, driver, dataSource string) *MetricsInterf
Buckets: []float64{.005, .01, .025, .05, .1, .25, .5, 1, 2.5, 5, 10, 15, 20},
ConstLabels: additionalLabels,
},
[]string{"platform", "agent", "region"},
[]string{"platform", "agent", "region", "user_id"},
)
m.Registry.MustRegister(m.ClientLargestContentfulPaint)
@ -1280,7 +1288,7 @@ func New(ps *platform.PlatformService, driver, dataSource string) *MetricsInterf
Help: "Measure of how long it takes for a user to see the effects of clicking with a mouse, tapping with a touchscreen, or pressing a key on the keyboard (seconds)",
ConstLabels: additionalLabels,
},
[]string{"platform", "agent", "interaction"},
[]string{"platform", "agent", "interaction", "user_id"},
)
m.Registry.MustRegister(m.ClientInteractionToNextPaint)
@ -1292,7 +1300,7 @@ func New(ps *platform.PlatformService, driver, dataSource string) *MetricsInterf
Help: "Measure of how much a page's content shifts unexpectedly",
ConstLabels: additionalLabels,
},
[]string{"platform", "agent"},
[]string{"platform", "agent", "user_id"},
)
m.Registry.MustRegister(m.ClientCumulativeLayoutShift)
@ -1304,7 +1312,7 @@ func New(ps *platform.PlatformService, driver, dataSource string) *MetricsInterf
Help: "Counter of the number of times that the browser's main UI thread is blocked for more than 50ms by a single task",
ConstLabels: additionalLabels,
},
[]string{"platform", "agent"},
[]string{"platform", "agent", "user_id"},
)
m.Registry.MustRegister(m.ClientLongTasks)
@ -1317,7 +1325,7 @@ func New(ps *platform.PlatformService, driver, dataSource string) *MetricsInterf
Buckets: []float64{.005, .01, .025, .05, .1, .25, .5, 1, 2.5, 5, 10, 20, 40},
ConstLabels: additionalLabels,
},
[]string{"platform", "agent"},
[]string{"platform", "agent", "user_id"},
m.Platform.Log(),
)
m.Registry.MustRegister(m.ClientPageLoadDuration)
@ -1330,7 +1338,7 @@ func New(ps *platform.PlatformService, driver, dataSource string) *MetricsInterf
Help: "Duration of the time taken from when a user clicks on a channel in the LHS to when posts in that channel become visible (seconds)",
ConstLabels: additionalLabels,
},
[]string{"platform", "agent", "fresh"},
[]string{"platform", "agent", "fresh", "user_id"},
)
m.Registry.MustRegister(m.ClientChannelSwitchDuration)
@ -1342,7 +1350,7 @@ func New(ps *platform.PlatformService, driver, dataSource string) *MetricsInterf
Help: "Duration of the time taken from when a user clicks on a team in the LHS to when posts in that team become visible (seconds)",
ConstLabels: additionalLabels,
},
[]string{"platform", "agent", "fresh"},
[]string{"platform", "agent", "fresh", "user_id"},
)
m.Registry.MustRegister(m.ClientTeamSwitchDuration)
@ -1354,7 +1362,7 @@ func New(ps *platform.PlatformService, driver, dataSource string) *MetricsInterf
Help: "Duration of the time taken from when a user clicks to open a thread in the RHS until when posts in that thread become visible (seconds)",
ConstLabels: additionalLabels,
},
[]string{"platform", "agent"},
[]string{"platform", "agent", "user_id"},
)
m.Registry.MustRegister(m.ClientRHSLoadDuration)
@ -1366,7 +1374,7 @@ func New(ps *platform.PlatformService, driver, dataSource string) *MetricsInterf
Help: "Duration of the time taken from when a user clicks to open Threads in the LHS until when the global threads view becomes visible (milliseconds)",
ConstLabels: additionalLabels,
},
[]string{"platform", "agent"},
[]string{"platform", "agent", "user_id"},
)
m.Registry.MustRegister(m.ClientGlobalThreadsLoadDuration)
@ -2024,63 +2032,81 @@ func (mi *MetricsInterfaceImpl) DecrementHTTPWebSockets(originClient string) {
mi.HTTPWebsocketsGauge.With(prometheus.Labels{"origin_client": originClient}).Dec()
}
func (mi *MetricsInterfaceImpl) getEffectiveUserID(userID string) string {
if mi.ClientSideUserIds[userID] {
return userID
}
return "<placeholder>"
}
func (mi *MetricsInterfaceImpl) ObserveClientTimeToFirstByte(platform, agent, userID string, elapsed float64) {
mi.ClientTimeToFirstByte.With(prometheus.Labels{"platform": platform, "agent": agent}, userID).Observe(elapsed)
effectiveUserID := mi.getEffectiveUserID(userID)
mi.ClientTimeToFirstByte.With(prometheus.Labels{"platform": platform, "agent": agent, "user_id": effectiveUserID}, userID).Observe(elapsed)
}
func (mi *MetricsInterfaceImpl) ObserveClientTimeToLastByte(platform, agent, userID string, elapsed float64) {
mi.ClientTimeToLastByte.With(prometheus.Labels{"platform": platform, "agent": agent}, userID).Observe(elapsed)
effectiveUserID := mi.getEffectiveUserID(userID)
mi.ClientTimeToLastByte.With(prometheus.Labels{"platform": platform, "agent": agent, "user_id": effectiveUserID}, userID).Observe(elapsed)
}
func (mi *MetricsInterfaceImpl) ObserveClientTimeToDomInteractive(platform, agent, userID string, elapsed float64) {
mi.ClientTimeToDOMInteractive.With(prometheus.Labels{"platform": platform, "agent": agent}, userID).Observe(elapsed)
effectiveUserID := mi.getEffectiveUserID(userID)
mi.ClientTimeToDOMInteractive.With(prometheus.Labels{"platform": platform, "agent": agent, "user_id": effectiveUserID}, userID).Observe(elapsed)
}
func (mi *MetricsInterfaceImpl) ObserveClientSplashScreenEnd(platform, agent, pageType, userID string, elapsed float64) {
mi.ClientSplashScreenEnd.With(prometheus.Labels{"platform": platform, "agent": agent, "page_type": pageType}, userID).Observe(elapsed)
effectiveUserID := mi.getEffectiveUserID(userID)
mi.ClientSplashScreenEnd.With(prometheus.Labels{"platform": platform, "agent": agent, "page_type": pageType, "user_id": effectiveUserID}, userID).Observe(elapsed)
}
func (mi *MetricsInterfaceImpl) ObserveClientFirstContentfulPaint(platform, agent string, elapsed float64) {
mi.ClientFirstContentfulPaint.With(prometheus.Labels{"platform": platform, "agent": agent}).Observe(elapsed)
func (mi *MetricsInterfaceImpl) ObserveClientFirstContentfulPaint(platform, agent, userID string, elapsed float64) {
effectiveUserID := mi.getEffectiveUserID(userID)
mi.ClientFirstContentfulPaint.With(prometheus.Labels{"platform": platform, "agent": agent, "user_id": effectiveUserID}).Observe(elapsed)
}
func (mi *MetricsInterfaceImpl) ObserveClientLargestContentfulPaint(platform, agent, region string, elapsed float64) {
mi.ClientLargestContentfulPaint.With(prometheus.Labels{"platform": platform, "agent": agent, "region": region}).Observe(elapsed)
func (mi *MetricsInterfaceImpl) ObserveClientLargestContentfulPaint(platform, agent, region, userID string, elapsed float64) {
effectiveUserID := mi.getEffectiveUserID(userID)
mi.ClientLargestContentfulPaint.With(prometheus.Labels{"platform": platform, "agent": agent, "region": region, "user_id": effectiveUserID}).Observe(elapsed)
}
func (mi *MetricsInterfaceImpl) ObserveClientInteractionToNextPaint(platform, agent, interaction string, elapsed float64) {
mi.ClientInteractionToNextPaint.With(prometheus.Labels{"platform": platform, "agent": agent, "interaction": interaction}).Observe(elapsed)
func (mi *MetricsInterfaceImpl) ObserveClientInteractionToNextPaint(platform, agent, interaction, userID string, elapsed float64) {
effectiveUserID := mi.getEffectiveUserID(userID)
mi.ClientInteractionToNextPaint.With(prometheus.Labels{"platform": platform, "agent": agent, "interaction": interaction, "user_id": effectiveUserID}).Observe(elapsed)
}
func (mi *MetricsInterfaceImpl) ObserveClientCumulativeLayoutShift(platform, agent string, elapsed float64) {
mi.ClientCumulativeLayoutShift.With(prometheus.Labels{"platform": platform, "agent": agent}).Observe(elapsed)
func (mi *MetricsInterfaceImpl) ObserveClientCumulativeLayoutShift(platform, agent, userID string, elapsed float64) {
effectiveUserID := mi.getEffectiveUserID(userID)
mi.ClientCumulativeLayoutShift.With(prometheus.Labels{"platform": platform, "agent": agent, "user_id": effectiveUserID}).Observe(elapsed)
}
func (mi *MetricsInterfaceImpl) IncrementClientLongTasks(platform, agent string, inc float64) {
mi.ClientLongTasks.With(prometheus.Labels{"platform": platform, "agent": agent}).Add(inc)
func (mi *MetricsInterfaceImpl) IncrementClientLongTasks(platform, agent, userID string, inc float64) {
effectiveUserID := mi.getEffectiveUserID(userID)
mi.ClientLongTasks.With(prometheus.Labels{"platform": platform, "agent": agent, "user_id": effectiveUserID}).Add(inc)
}
func (mi *MetricsInterfaceImpl) ObserveClientPageLoadDuration(platform, agent, userID string, elapsed float64) {
mi.ClientPageLoadDuration.With(prometheus.Labels{
"platform": platform,
"agent": agent,
}, userID).Observe(elapsed)
effectiveUserID := mi.getEffectiveUserID(userID)
mi.ClientPageLoadDuration.With(prometheus.Labels{"platform": platform, "agent": agent, "user_id": effectiveUserID}, userID).Observe(elapsed)
}
func (mi *MetricsInterfaceImpl) ObserveClientChannelSwitchDuration(platform, agent, fresh string, elapsed float64) {
mi.ClientChannelSwitchDuration.With(prometheus.Labels{"platform": platform, "agent": agent, "fresh": fresh}).Observe(elapsed)
func (mi *MetricsInterfaceImpl) ObserveClientChannelSwitchDuration(platform, agent, fresh, userID string, elapsed float64) {
effectiveUserID := mi.getEffectiveUserID(userID)
mi.ClientChannelSwitchDuration.With(prometheus.Labels{"platform": platform, "agent": agent, "fresh": fresh, "user_id": effectiveUserID}).Observe(elapsed)
}
func (mi *MetricsInterfaceImpl) ObserveClientTeamSwitchDuration(platform, agent, fresh string, elapsed float64) {
mi.ClientTeamSwitchDuration.With(prometheus.Labels{"platform": platform, "agent": agent, "fresh": fresh}).Observe(elapsed)
func (mi *MetricsInterfaceImpl) ObserveClientTeamSwitchDuration(platform, agent, fresh, userID string, elapsed float64) {
effectiveUserID := mi.getEffectiveUserID(userID)
mi.ClientTeamSwitchDuration.With(prometheus.Labels{"platform": platform, "agent": agent, "fresh": fresh, "user_id": effectiveUserID}).Observe(elapsed)
}
func (mi *MetricsInterfaceImpl) ObserveClientRHSLoadDuration(platform, agent string, elapsed float64) {
mi.ClientRHSLoadDuration.With(prometheus.Labels{"platform": platform, "agent": agent}).Observe(elapsed)
func (mi *MetricsInterfaceImpl) ObserveClientRHSLoadDuration(platform, agent, userID string, elapsed float64) {
effectiveUserID := mi.getEffectiveUserID(userID)
mi.ClientRHSLoadDuration.With(prometheus.Labels{"platform": platform, "agent": agent, "user_id": effectiveUserID}).Observe(elapsed)
}
func (mi *MetricsInterfaceImpl) ObserveGlobalThreadsLoadDuration(platform, agent string, elapsed float64) {
mi.ClientGlobalThreadsLoadDuration.With(prometheus.Labels{"platform": platform, "agent": agent}).Observe(elapsed)
func (mi *MetricsInterfaceImpl) ObserveGlobalThreadsLoadDuration(platform, agent, userID string, elapsed float64) {
effectiveUserID := mi.getEffectiveUserID(userID)
mi.ClientGlobalThreadsLoadDuration.With(prometheus.Labels{"platform": platform, "agent": agent, "user_id": effectiveUserID}).Observe(elapsed)
}
func (mi *MetricsInterfaceImpl) ObserveDesktopCpuUsage(platform, version, process string, usage float64) {

View File

@ -5006,6 +5006,10 @@
"id": "app.custom_group.unique_name",
"translation": "group name is not unique"
},
{
"id": "app.custom_profile_attributes.count_property_fields.app_error",
"translation": "Unable to count the number of fields for the custom profile attribute group"
},
{
"id": "app.custom_profile_attributes.cpa_group_id.app_error",
"translation": "Cannot register Custom Profile Attributes property group"
@ -5039,16 +5043,8 @@
"translation": "Unable to update Custom Profile Attribute field"
},
{
"id": "app.custom_profile_attributes.property_value_creation.app_error",
"translation": "Cannot create property value"
},
{
"id": "app.custom_profile_attributes.property_value_list.app_error",
"translation": "Unable to retrieve property values"
},
{
"id": "app.custom_profile_attributes.property_value_update.app_error",
"translation": "Cannot update property value"
"id": "app.custom_profile_attributes.property_value_upsert.app_error",
"translation": "Unable to upsert Custom Profile Attribute fields"
},
{
"id": "app.custom_profile_attributes.search_property_fields.app_error",
@ -9080,6 +9076,14 @@
"id": "model.config.is_valid.message_export.global_relay.smtp_username.app_error",
"translation": "Message export job GlobalRelaySettings.SmtpUsername must be set."
},
{
"id": "model.config.is_valid.metrics_client_side_user_id.app_error",
"translation": "Invalid client side user id: {{.Id}}"
},
{
"id": "model.config.is_valid.metrics_client_side_user_ids.app_error",
"translation": "Number of elements in ClientSideUserIds {{.CurrentLength}} is higher than maximum limit of {{.MaxLength}}."
},
{
"id": "model.config.is_valid.move_thread.domain_invalid.app_error",
"translation": "Invalid domain for move thread settings"
@ -9430,7 +9434,7 @@
},
{
"id": "model.incoming_hook.id.app_error",
"translation": "Invalid Id."
"translation": "Invalid Id: {{.Id}}."
},
{
"id": "model.incoming_hook.parse_data.app_error",

View File

@ -439,6 +439,7 @@ type ServiceSettings struct {
MaximumPayloadSizeBytes *int64 `access:"environment_file_storage,write_restrictable,cloud_restrictable"`
MaximumURLLength *int `access:"environment_file_storage,write_restrictable,cloud_restrictable"`
ScheduledPosts *bool `access:"site_posts"`
EnableWebHubChannelIteration *bool `access:"write_restrictable,cloud_restrictable"` // telemetry: none
}
var MattermostGiphySdkKey string
@ -962,6 +963,10 @@ func (s *ServiceSettings) SetDefaults(isUpdate bool) {
if s.ScheduledPosts == nil {
s.ScheduledPosts = NewPointer(true)
}
if s.EnableWebHubChannelIteration == nil {
s.EnableWebHubChannelIteration = NewPointer(false)
}
}
type CacheSettings struct {
@ -1071,11 +1076,12 @@ func (s *ClusterSettings) SetDefaults() {
}
type MetricsSettings struct {
Enable *bool `access:"environment_performance_monitoring,write_restrictable,cloud_restrictable"`
BlockProfileRate *int `access:"environment_performance_monitoring,write_restrictable,cloud_restrictable"`
ListenAddress *string `access:"environment_performance_monitoring,write_restrictable,cloud_restrictable"` // telemetry: none
EnableClientMetrics *bool `access:"environment_performance_monitoring,write_restrictable,cloud_restrictable"`
EnableNotificationMetrics *bool `access:"environment_performance_monitoring,write_restrictable,cloud_restrictable"`
Enable *bool `access:"environment_performance_monitoring,write_restrictable,cloud_restrictable"`
BlockProfileRate *int `access:"environment_performance_monitoring,write_restrictable,cloud_restrictable"`
ListenAddress *string `access:"environment_performance_monitoring,write_restrictable,cloud_restrictable"` // telemetry: none
EnableClientMetrics *bool `access:"environment_performance_monitoring,write_restrictable,cloud_restrictable"`
EnableNotificationMetrics *bool `access:"environment_performance_monitoring,write_restrictable,cloud_restrictable"`
ClientSideUserIds []string `access:"environment_performance_monitoring,write_restrictable,cloud_restrictable"` // telemetry: none
}
func (s *MetricsSettings) SetDefaults() {
@ -1098,6 +1104,23 @@ func (s *MetricsSettings) SetDefaults() {
if s.EnableNotificationMetrics == nil {
s.EnableNotificationMetrics = NewPointer(true)
}
if s.ClientSideUserIds == nil {
s.ClientSideUserIds = []string{}
}
}
func (s *MetricsSettings) isValid() *AppError {
const maxLength = 5
if len(s.ClientSideUserIds) > maxLength {
return NewAppError("MetricsSettings.IsValid", "model.config.is_valid.metrics_client_side_user_ids.app_error", map[string]any{"MaxLength": maxLength, "CurrentLength": len(s.ClientSideUserIds)}, "", http.StatusBadRequest)
}
for _, id := range s.ClientSideUserIds {
if !IsValidId(id) {
return NewAppError("MetricsSettings.IsValid", "model.config.is_valid.metrics_client_side_user_id.app_error", map[string]any{"Id": id}, "", http.StatusBadRequest)
}
}
return nil
}
type ExperimentalSettings struct {
@ -3806,6 +3829,10 @@ func (o *Config) IsValid() *AppError {
return NewAppError("Config.IsValid", "model.config.is_valid.cluster_email_batching.app_error", nil, "", http.StatusBadRequest)
}
if appErr := o.MetricsSettings.isValid(); appErr != nil {
return appErr
}
if appErr := o.CacheSettings.isValid(); appErr != nil {
return appErr
}

View File

@ -4,3 +4,19 @@
package model
const CustomProfileAttributesPropertyGroupName = "custom_profile_attributes"
const CustomProfileAttributesPropertyAttrsSortOrder = "sort_order"
func CustomProfileAttributesPropertySortOrder(p *PropertyField) int {
value, ok := p.Attrs[CustomProfileAttributesPropertyAttrsSortOrder]
if !ok {
return 0
}
order, ok := value.(float64)
if !ok {
return 0
}
return int(order)
}

View File

@ -66,7 +66,7 @@ type IncomingWebhooksWithCount struct {
func (o *IncomingWebhook) IsValid() *AppError {
if !IsValidId(o.Id) {
return NewAppError("IncomingWebhook.IsValid", "model.incoming_hook.id.app_error", nil, "", http.StatusBadRequest)
return NewAppError("IncomingWebhook.IsValid", "model.incoming_hook.id.app_error", map[string]any{"Id": o.Id}, "", http.StatusBadRequest)
}
if o.CreateAt == 0 {

View File

@ -4,6 +4,7 @@
package model
import (
"errors"
"net/http"
"strings"
)
@ -98,7 +99,7 @@ func (pf *PropertyField) SanitizeInput() {
type PropertyFieldPatch struct {
Name *string `json:"name"`
Type *PropertyFieldType `json:"type"`
Attrs *map[string]any `json:"attrs"`
Attrs *StringInterface `json:"attrs"`
TargetID *string `json:"target_id"`
TargetType *string `json:"target_type"`
}
@ -141,11 +142,39 @@ func (pf *PropertyField) Patch(patch *PropertyFieldPatch) {
}
}
type PropertyFieldSearchCursor struct {
PropertyFieldID string
CreateAt int64
}
func (p PropertyFieldSearchCursor) IsEmpty() bool {
return p.PropertyFieldID == "" && p.CreateAt == 0
}
func (p PropertyFieldSearchCursor) IsValid() error {
if p.IsEmpty() {
return nil
}
if p.CreateAt <= 0 {
return errors.New("create at cannot be negative or zero")
}
if !IsValidId(p.PropertyFieldID) {
return errors.New("property field id is invalid")
}
return nil
}
type PropertyFieldSearchOpts struct {
GroupID string
TargetType string
TargetID string
IncludeDeleted bool
Page int
Cursor PropertyFieldSearchCursor
PerPage int
}
func (pf *PropertyField) GetAttr(key string) any {
return pf.Attrs[key]
}

View File

@ -0,0 +1,218 @@
// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved.
// See LICENSE.txt for license information.
package model
import (
"testing"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
)
func TestPropertyField_PreSave(t *testing.T) {
t.Run("sets ID if empty", func(t *testing.T) {
pf := &PropertyField{}
pf.PreSave()
assert.NotEmpty(t, pf.ID)
assert.Len(t, pf.ID, 26) // Length of NewId()
})
t.Run("keeps existing ID", func(t *testing.T) {
pf := &PropertyField{ID: "existing_id"}
pf.PreSave()
assert.Equal(t, "existing_id", pf.ID)
})
t.Run("sets CreateAt if zero", func(t *testing.T) {
pf := &PropertyField{}
pf.PreSave()
assert.NotZero(t, pf.CreateAt)
})
t.Run("sets UpdateAt equal to CreateAt", func(t *testing.T) {
pf := &PropertyField{}
pf.PreSave()
assert.Equal(t, pf.CreateAt, pf.UpdateAt)
})
}
func TestPropertyField_IsValid(t *testing.T) {
t.Run("valid field", func(t *testing.T) {
pf := &PropertyField{
ID: NewId(),
GroupID: NewId(),
Name: "test field",
Type: PropertyFieldTypeText,
CreateAt: GetMillis(),
UpdateAt: GetMillis(),
}
require.NoError(t, pf.IsValid())
})
t.Run("invalid ID", func(t *testing.T) {
pf := &PropertyField{
ID: "invalid",
GroupID: NewId(),
Name: "test field",
Type: PropertyFieldTypeText,
CreateAt: GetMillis(),
UpdateAt: GetMillis(),
}
require.Error(t, pf.IsValid())
})
t.Run("invalid GroupID", func(t *testing.T) {
pf := &PropertyField{
ID: NewId(),
GroupID: "invalid",
Name: "test field",
Type: PropertyFieldTypeText,
CreateAt: GetMillis(),
UpdateAt: GetMillis(),
}
require.Error(t, pf.IsValid())
})
t.Run("empty name", func(t *testing.T) {
pf := &PropertyField{
ID: NewId(),
GroupID: NewId(),
Name: "",
Type: PropertyFieldTypeText,
CreateAt: GetMillis(),
UpdateAt: GetMillis(),
}
require.Error(t, pf.IsValid())
})
t.Run("invalid type", func(t *testing.T) {
pf := &PropertyField{
ID: NewId(),
GroupID: NewId(),
Name: "test field",
Type: "invalid",
CreateAt: GetMillis(),
UpdateAt: GetMillis(),
}
require.Error(t, pf.IsValid())
})
t.Run("zero CreateAt", func(t *testing.T) {
pf := &PropertyField{
ID: NewId(),
GroupID: NewId(),
Name: "test field",
Type: PropertyFieldTypeText,
CreateAt: 0,
UpdateAt: GetMillis(),
}
require.Error(t, pf.IsValid())
})
t.Run("zero UpdateAt", func(t *testing.T) {
pf := &PropertyField{
ID: NewId(),
GroupID: NewId(),
Name: "test field",
Type: PropertyFieldTypeText,
CreateAt: GetMillis(),
UpdateAt: 0,
}
require.Error(t, pf.IsValid())
})
}
func TestPropertyField_SanitizeInput(t *testing.T) {
t.Run("trims spaces from name", func(t *testing.T) {
pf := &PropertyField{Name: " test field "}
pf.SanitizeInput()
assert.Equal(t, "test field", pf.Name)
})
}
func TestPropertyField_Patch(t *testing.T) {
t.Run("patches all fields", func(t *testing.T) {
pf := &PropertyField{
Name: "original name",
Type: PropertyFieldTypeText,
TargetID: "original_target",
TargetType: "original_type",
}
patch := &PropertyFieldPatch{
Name: NewPointer("new name"),
Type: NewPointer(PropertyFieldTypeSelect),
TargetID: NewPointer("new_target"),
TargetType: NewPointer("new_type"),
Attrs: &StringInterface{"key": "value"},
}
pf.Patch(patch)
assert.Equal(t, "new name", pf.Name)
assert.Equal(t, PropertyFieldTypeSelect, pf.Type)
assert.Equal(t, "new_target", pf.TargetID)
assert.Equal(t, "new_type", pf.TargetType)
assert.EqualValues(t, StringInterface{"key": "value"}, pf.Attrs)
})
t.Run("patches only specified fields", func(t *testing.T) {
pf := &PropertyField{
Name: "original name",
Type: PropertyFieldTypeText,
TargetID: "original_target",
TargetType: "original_type",
}
patch := &PropertyFieldPatch{
Name: NewPointer("new name"),
}
pf.Patch(patch)
assert.Equal(t, "new name", pf.Name)
assert.Equal(t, PropertyFieldTypeText, pf.Type)
assert.Equal(t, "original_target", pf.TargetID)
assert.Equal(t, "original_type", pf.TargetType)
})
}
func TestPropertyFieldSearchCursor_IsValid(t *testing.T) {
t.Run("empty cursor is valid", func(t *testing.T) {
cursor := PropertyFieldSearchCursor{}
assert.NoError(t, cursor.IsValid())
})
t.Run("valid cursor", func(t *testing.T) {
cursor := PropertyFieldSearchCursor{
PropertyFieldID: NewId(),
CreateAt: GetMillis(),
}
assert.NoError(t, cursor.IsValid())
})
t.Run("invalid PropertyFieldID", func(t *testing.T) {
cursor := PropertyFieldSearchCursor{
PropertyFieldID: "invalid",
CreateAt: GetMillis(),
}
assert.Error(t, cursor.IsValid())
})
t.Run("zero CreateAt", func(t *testing.T) {
cursor := PropertyFieldSearchCursor{
PropertyFieldID: NewId(),
CreateAt: 0,
}
assert.Error(t, cursor.IsValid())
})
t.Run("negative CreateAt", func(t *testing.T) {
cursor := PropertyFieldSearchCursor{
PropertyFieldID: NewId(),
CreateAt: -1,
}
assert.Error(t, cursor.IsValid())
})
}

View File

@ -6,6 +6,8 @@ package model
import (
"encoding/json"
"net/http"
"github.com/pkg/errors"
)
type PropertyValue struct {
@ -63,12 +65,36 @@ func (pv *PropertyValue) IsValid() error {
return nil
}
type PropertyValueSearchCursor struct {
PropertyValueID string
CreateAt int64
}
func (p PropertyValueSearchCursor) IsEmpty() bool {
return p.PropertyValueID == "" && p.CreateAt == 0
}
func (p PropertyValueSearchCursor) IsValid() error {
if p.IsEmpty() {
return nil
}
if p.CreateAt <= 0 {
return errors.New("create at cannot be negative or zero")
}
if !IsValidId(p.PropertyValueID) {
return errors.New("property field id is invalid")
}
return nil
}
type PropertyValueSearchOpts struct {
GroupID string
TargetType string
TargetID string
FieldID string
IncludeDeleted bool
Page int
Cursor PropertyValueSearchCursor
PerPage int
}

View File

@ -0,0 +1,186 @@
// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved.
// See LICENSE.txt for license information.
package model
import (
"encoding/json"
"testing"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
)
func TestPropertyValue_PreSave(t *testing.T) {
t.Run("sets ID if empty", func(t *testing.T) {
pv := &PropertyValue{}
pv.PreSave()
assert.NotEmpty(t, pv.ID)
assert.Len(t, pv.ID, 26) // Length of NewId()
})
t.Run("keeps existing ID", func(t *testing.T) {
pv := &PropertyValue{ID: "existing_id"}
pv.PreSave()
assert.Equal(t, "existing_id", pv.ID)
})
t.Run("sets CreateAt if zero", func(t *testing.T) {
pv := &PropertyValue{}
pv.PreSave()
assert.NotZero(t, pv.CreateAt)
})
t.Run("sets UpdateAt equal to CreateAt", func(t *testing.T) {
pv := &PropertyValue{}
pv.PreSave()
assert.Equal(t, pv.CreateAt, pv.UpdateAt)
})
}
func TestPropertyValue_IsValid(t *testing.T) {
t.Run("valid value", func(t *testing.T) {
value := json.RawMessage(`{"test": "value"}`)
pv := &PropertyValue{
ID: NewId(),
TargetID: NewId(),
TargetType: "test_type",
GroupID: NewId(),
FieldID: NewId(),
Value: value,
CreateAt: GetMillis(),
UpdateAt: GetMillis(),
}
require.NoError(t, pv.IsValid())
})
t.Run("invalid ID", func(t *testing.T) {
pv := &PropertyValue{
ID: "invalid",
TargetID: NewId(),
TargetType: "test_type",
GroupID: NewId(),
FieldID: NewId(),
CreateAt: GetMillis(),
UpdateAt: GetMillis(),
}
require.Error(t, pv.IsValid())
})
t.Run("invalid TargetID", func(t *testing.T) {
pv := &PropertyValue{
ID: NewId(),
TargetID: "invalid",
TargetType: "test_type",
GroupID: NewId(),
FieldID: NewId(),
CreateAt: GetMillis(),
UpdateAt: GetMillis(),
}
require.Error(t, pv.IsValid())
})
t.Run("empty TargetType", func(t *testing.T) {
pv := &PropertyValue{
ID: NewId(),
TargetID: NewId(),
TargetType: "",
GroupID: NewId(),
FieldID: NewId(),
CreateAt: GetMillis(),
UpdateAt: GetMillis(),
}
require.Error(t, pv.IsValid())
})
t.Run("invalid GroupID", func(t *testing.T) {
pv := &PropertyValue{
ID: NewId(),
TargetID: NewId(),
TargetType: "test_type",
GroupID: "invalid",
FieldID: NewId(),
CreateAt: GetMillis(),
UpdateAt: GetMillis(),
}
require.Error(t, pv.IsValid())
})
t.Run("invalid FieldID", func(t *testing.T) {
pv := &PropertyValue{
ID: NewId(),
TargetID: NewId(),
TargetType: "test_type",
GroupID: NewId(),
FieldID: "invalid",
CreateAt: GetMillis(),
UpdateAt: GetMillis(),
}
require.Error(t, pv.IsValid())
})
t.Run("zero CreateAt", func(t *testing.T) {
pv := &PropertyValue{
ID: NewId(),
TargetID: NewId(),
TargetType: "test_type",
GroupID: NewId(),
FieldID: NewId(),
CreateAt: 0,
UpdateAt: GetMillis(),
}
require.Error(t, pv.IsValid())
})
t.Run("zero UpdateAt", func(t *testing.T) {
pv := &PropertyValue{
ID: NewId(),
TargetID: NewId(),
TargetType: "test_type",
GroupID: NewId(),
FieldID: NewId(),
CreateAt: GetMillis(),
UpdateAt: 0,
}
require.Error(t, pv.IsValid())
})
}
func TestPropertyValueSearchCursor_IsValid(t *testing.T) {
t.Run("empty cursor is valid", func(t *testing.T) {
cursor := PropertyValueSearchCursor{}
assert.NoError(t, cursor.IsValid())
})
t.Run("valid cursor", func(t *testing.T) {
cursor := PropertyValueSearchCursor{
PropertyValueID: NewId(),
CreateAt: GetMillis(),
}
assert.NoError(t, cursor.IsValid())
})
t.Run("invalid PropertyValueID", func(t *testing.T) {
cursor := PropertyValueSearchCursor{
PropertyValueID: "invalid",
CreateAt: GetMillis(),
}
assert.Error(t, cursor.IsValid())
})
t.Run("zero CreateAt", func(t *testing.T) {
cursor := PropertyValueSearchCursor{
PropertyValueID: NewId(),
CreateAt: 0,
}
assert.Error(t, cursor.IsValid())
})
t.Run("negative CreateAt", func(t *testing.T) {
cursor := PropertyValueSearchCursor{
PropertyValueID: NewId(),
CreateAt: -1,
}
assert.Error(t, cursor.IsValid())
})
}

View File

@ -94,6 +94,10 @@ const (
WebsocketScheduledPostCreated WebsocketEventType = "scheduled_post_created"
WebsocketScheduledPostUpdated WebsocketEventType = "scheduled_post_updated"
WebsocketScheduledPostDeleted WebsocketEventType = "scheduled_post_deleted"
WebsocketEventCPAFieldCreated WebsocketEventType = "custom_profile_attributes_field_created"
WebsocketEventCPAFieldUpdated WebsocketEventType = "custom_profile_attributes_field_updated"
WebsocketEventCPAFieldDeleted WebsocketEventType = "custom_profile_attributes_field_deleted"
WebsocketEventCPAValuesUpdated WebsocketEventType = "custom_profile_attributes_values_updated"
WebSocketMsgTypeResponse = "response"
WebSocketMsgTypeEvent = "event"

View File

@ -0,0 +1,470 @@
// Jest Snapshot v1, https://goo.gl/fbAQLP
exports[`components/AdminConsole/ClientSideUserIdsSetting initial state with multiple items 1`] = `
<ClientSideUserIdsSetting
disabled={false}
id="MySetting"
onChange={[MockFunction]}
setByEnv={false}
value={
Array [
"userid1",
"userid2",
"id3",
]
}
>
<Memo(Settings)
helpText={
<Memo(MemoizedFormattedMessage)
defaultMessage="Set the user ids you want to track for client side metrics. Separate values with a comma."
id="admin.customization.clientSideUserIdsDesc"
/>
}
inputId="MySetting"
label={
<Memo(MemoizedFormattedMessage)
defaultMessage="Client side user ids:"
id="admin.customization.clientSideUserIds"
/>
}
setByEnv={false}
>
<div
className="form-group"
data-testid="MySetting"
>
<label
className="control-label col-sm-4"
htmlFor="MySetting"
>
<FormattedMessage
defaultMessage="Client side user ids:"
id="admin.customization.clientSideUserIds"
>
<span>
Client side user ids:
</span>
</FormattedMessage>
</label>
<div
className="col-sm-8"
>
<LocalizedPlaceholderInput
className="form-control"
disabled={false}
id="MySetting"
onChange={[Function]}
placeholder={
Object {
"defaultMessage": "E.g.: \\"userid1,userid2\\"",
"id": "admin.customization.clientSideUserIdsPlaceholder",
}
}
type="text"
value="userid1,userid2,id3"
>
<input
className="form-control"
disabled={false}
id="MySetting"
onChange={[Function]}
placeholder="E.g.: \\"userid1,userid2\\""
type="text"
value="userid1,userid2,id3"
/>
</LocalizedPlaceholderInput>
<div
className="help-text"
data-testid="MySettinghelp-text"
>
<FormattedMessage
defaultMessage="Set the user ids you want to track for client side metrics. Separate values with a comma."
id="admin.customization.clientSideUserIdsDesc"
>
<span>
Set the user ids you want to track for client side metrics. Separate values with a comma.
</span>
</FormattedMessage>
</div>
</div>
</div>
</Memo(Settings)>
</ClientSideUserIdsSetting>
`;
exports[`components/AdminConsole/ClientSideUserIdsSetting initial state with no items 1`] = `
<ClientSideUserIdsSetting
disabled={false}
id="MySetting"
onChange={[MockFunction]}
setByEnv={false}
value={Array []}
>
<Memo(Settings)
helpText={
<Memo(MemoizedFormattedMessage)
defaultMessage="Set the user ids you want to track for client side metrics. Separate values with a comma."
id="admin.customization.clientSideUserIdsDesc"
/>
}
inputId="MySetting"
label={
<Memo(MemoizedFormattedMessage)
defaultMessage="Client side user ids:"
id="admin.customization.clientSideUserIds"
/>
}
setByEnv={false}
>
<div
className="form-group"
data-testid="MySetting"
>
<label
className="control-label col-sm-4"
htmlFor="MySetting"
>
<FormattedMessage
defaultMessage="Client side user ids:"
id="admin.customization.clientSideUserIds"
>
<span>
Client side user ids:
</span>
</FormattedMessage>
</label>
<div
className="col-sm-8"
>
<LocalizedPlaceholderInput
className="form-control"
disabled={false}
id="MySetting"
onChange={[Function]}
placeholder={
Object {
"defaultMessage": "E.g.: \\"userid1,userid2\\"",
"id": "admin.customization.clientSideUserIdsPlaceholder",
}
}
type="text"
value=""
>
<input
className="form-control"
disabled={false}
id="MySetting"
onChange={[Function]}
placeholder="E.g.: \\"userid1,userid2\\""
type="text"
value=""
/>
</LocalizedPlaceholderInput>
<div
className="help-text"
data-testid="MySettinghelp-text"
>
<FormattedMessage
defaultMessage="Set the user ids you want to track for client side metrics. Separate values with a comma."
id="admin.customization.clientSideUserIdsDesc"
>
<span>
Set the user ids you want to track for client side metrics. Separate values with a comma.
</span>
</FormattedMessage>
</div>
</div>
</div>
</Memo(Settings)>
</ClientSideUserIdsSetting>
`;
exports[`components/AdminConsole/ClientSideUserIdsSetting initial state with one item 1`] = `
<ClientSideUserIdsSetting
disabled={false}
id="MySetting"
onChange={[MockFunction]}
setByEnv={false}
value={
Array [
"userid1",
]
}
>
<Memo(Settings)
helpText={
<Memo(MemoizedFormattedMessage)
defaultMessage="Set the user ids you want to track for client side metrics. Separate values with a comma."
id="admin.customization.clientSideUserIdsDesc"
/>
}
inputId="MySetting"
label={
<Memo(MemoizedFormattedMessage)
defaultMessage="Client side user ids:"
id="admin.customization.clientSideUserIds"
/>
}
setByEnv={false}
>
<div
className="form-group"
data-testid="MySetting"
>
<label
className="control-label col-sm-4"
htmlFor="MySetting"
>
<FormattedMessage
defaultMessage="Client side user ids:"
id="admin.customization.clientSideUserIds"
>
<span>
Client side user ids:
</span>
</FormattedMessage>
</label>
<div
className="col-sm-8"
>
<LocalizedPlaceholderInput
className="form-control"
disabled={false}
id="MySetting"
onChange={[Function]}
placeholder={
Object {
"defaultMessage": "E.g.: \\"userid1,userid2\\"",
"id": "admin.customization.clientSideUserIdsPlaceholder",
}
}
type="text"
value="userid1"
>
<input
className="form-control"
disabled={false}
id="MySetting"
onChange={[Function]}
placeholder="E.g.: \\"userid1,userid2\\""
type="text"
value="userid1"
/>
</LocalizedPlaceholderInput>
<div
className="help-text"
data-testid="MySettinghelp-text"
>
<FormattedMessage
defaultMessage="Set the user ids you want to track for client side metrics. Separate values with a comma."
id="admin.customization.clientSideUserIdsDesc"
>
<span>
Set the user ids you want to track for client side metrics. Separate values with a comma.
</span>
</FormattedMessage>
</div>
</div>
</div>
</Memo(Settings)>
</ClientSideUserIdsSetting>
`;
exports[`components/AdminConsole/ClientSideUserIdsSetting renders properly when disabled 1`] = `
<ClientSideUserIdsSetting
disabled={true}
id="MySetting"
onChange={[MockFunction]}
setByEnv={false}
value={
Array [
"userid1",
"userid2",
]
}
>
<Memo(Settings)
helpText={
<Memo(MemoizedFormattedMessage)
defaultMessage="Set the user ids you want to track for client side metrics. Separate values with a comma."
id="admin.customization.clientSideUserIdsDesc"
/>
}
inputId="MySetting"
label={
<Memo(MemoizedFormattedMessage)
defaultMessage="Client side user ids:"
id="admin.customization.clientSideUserIds"
/>
}
setByEnv={false}
>
<div
className="form-group"
data-testid="MySetting"
>
<label
className="control-label col-sm-4"
htmlFor="MySetting"
>
<FormattedMessage
defaultMessage="Client side user ids:"
id="admin.customization.clientSideUserIds"
>
<span>
Client side user ids:
</span>
</FormattedMessage>
</label>
<div
className="col-sm-8"
>
<LocalizedPlaceholderInput
className="form-control"
disabled={true}
id="MySetting"
onChange={[Function]}
placeholder={
Object {
"defaultMessage": "E.g.: \\"userid1,userid2\\"",
"id": "admin.customization.clientSideUserIdsPlaceholder",
}
}
type="text"
value="userid1,userid2"
>
<input
className="form-control"
disabled={true}
id="MySetting"
onChange={[Function]}
placeholder="E.g.: \\"userid1,userid2\\""
type="text"
value="userid1,userid2"
/>
</LocalizedPlaceholderInput>
<div
className="help-text"
data-testid="MySettinghelp-text"
>
<FormattedMessage
defaultMessage="Set the user ids you want to track for client side metrics. Separate values with a comma."
id="admin.customization.clientSideUserIdsDesc"
>
<span>
Set the user ids you want to track for client side metrics. Separate values with a comma.
</span>
</FormattedMessage>
</div>
</div>
</div>
</Memo(Settings)>
</ClientSideUserIdsSetting>
`;
exports[`components/AdminConsole/ClientSideUserIdsSetting renders properly when set by environment variable 1`] = `
<ClientSideUserIdsSetting
disabled={false}
id="MySetting"
onChange={[MockFunction]}
setByEnv={true}
value={
Array [
"userid1",
"userid2",
]
}
>
<Memo(Settings)
helpText={
<Memo(MemoizedFormattedMessage)
defaultMessage="Set the user ids you want to track for client side metrics. Separate values with a comma."
id="admin.customization.clientSideUserIdsDesc"
/>
}
inputId="MySetting"
label={
<Memo(MemoizedFormattedMessage)
defaultMessage="Client side user ids:"
id="admin.customization.clientSideUserIds"
/>
}
setByEnv={true}
>
<div
className="form-group"
data-testid="MySetting"
>
<label
className="control-label col-sm-4"
htmlFor="MySetting"
>
<FormattedMessage
defaultMessage="Client side user ids:"
id="admin.customization.clientSideUserIds"
>
<span>
Client side user ids:
</span>
</FormattedMessage>
</label>
<div
className="col-sm-8"
>
<LocalizedPlaceholderInput
className="form-control"
disabled={true}
id="MySetting"
onChange={[Function]}
placeholder={
Object {
"defaultMessage": "E.g.: \\"userid1,userid2\\"",
"id": "admin.customization.clientSideUserIdsPlaceholder",
}
}
type="text"
value="userid1,userid2"
>
<input
className="form-control"
disabled={true}
id="MySetting"
onChange={[Function]}
placeholder="E.g.: \\"userid1,userid2\\""
type="text"
value="userid1,userid2"
/>
</LocalizedPlaceholderInput>
<div
className="help-text"
data-testid="MySettinghelp-text"
>
<FormattedMessage
defaultMessage="Set the user ids you want to track for client side metrics. Separate values with a comma."
id="admin.customization.clientSideUserIdsDesc"
>
<span>
Set the user ids you want to track for client side metrics. Separate values with a comma.
</span>
</FormattedMessage>
</div>
<SetByEnv>
<div
className="alert alert-warning"
>
<FormattedMessage
defaultMessage="This setting has been set through an environment variable. It cannot be changed through the System Console."
id="admin.set_by_env"
>
<span>
This setting has been set through an environment variable. It cannot be changed through the System Console.
</span>
</FormattedMessage>
</div>
</SetByEnv>
</div>
</div>
</Memo(Settings)>
</ClientSideUserIdsSetting>
`;

View File

@ -49,6 +49,7 @@ import CompanyInfo, {searchableStrings as billingCompanyInfoSearchableStrings} f
import CompanyInfoEdit from './billing/company_info_edit';
import BleveSettings, {searchableStrings as bleveSearchableStrings} from './bleve_settings';
import BrandImageSetting from './brand_image_setting/brand_image_setting';
import ClientSideUserIdsSetting from './client_side_userids_setting';
import ClusterSettings, {searchableStrings as clusterSearchableStrings} from './cluster_settings';
import CustomEnableDisableGuestAccountsSetting from './custom_enable_disable_guest_accounts_setting';
import CustomTermsOfServiceSettings from './custom_terms_of_service_settings';
@ -1963,6 +1964,15 @@ const AdminDefinition: AdminDefinitionType = {
it.configIsFalse('MetricsSettings', 'Enable'),
),
},
{
type: 'custom',
key: 'MetricsSettings.ClientSideUserIds',
component: ClientSideUserIdsSetting,
isDisabled: it.any(
it.not(it.userHasWritePermissionOnResource(RESOURCE_KEYS.ENVIRONMENT.PERFORMANCE_MONITORING)),
it.configIsFalse('MetricsSettings', 'EnableClientMetrics'),
),
},
{
type: 'text',
key: 'MetricsSettings.ListenAddress',

View File

@ -0,0 +1,133 @@
// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved.
// See LICENSE.txt for license information.
import React from 'react';
import {mountWithIntl} from 'tests/helpers/intl-test-helper';
import ClientSideUserIdsSetting from './client_side_userids_setting';
describe('components/AdminConsole/ClientSideUserIdsSetting', () => {
const baseProps = {
id: 'MySetting',
value: ['userid1', 'userid2'],
onChange: jest.fn(),
disabled: false,
setByEnv: false,
};
describe('initial state', () => {
test('with no items', () => {
const props = {
...baseProps,
value: [],
};
const wrapper = mountWithIntl(
<ClientSideUserIdsSetting {...props}/>,
);
expect(wrapper).toMatchSnapshot();
expect(wrapper.state('value')).toEqual('');
});
test('with one item', () => {
const props = {
...baseProps,
value: ['userid1'],
};
const wrapper = mountWithIntl(
<ClientSideUserIdsSetting {...props}/>,
);
expect(wrapper).toMatchSnapshot();
expect(wrapper.state('value')).toEqual('userid1');
});
test('with multiple items', () => {
const props = {
...baseProps,
value: ['userid1', 'userid2', 'id3'],
};
const wrapper = mountWithIntl(
<ClientSideUserIdsSetting {...props}/>,
);
expect(wrapper).toMatchSnapshot();
expect(wrapper.state('value')).toEqual('userid1,userid2,id3');
});
});
describe('onChange', () => {
test('called on change to empty', () => {
const props = {
...baseProps,
onChange: jest.fn(),
};
const wrapper = mountWithIntl(
<ClientSideUserIdsSetting {...props}/>,
);
wrapper.find('input').simulate('change', {target: {value: ''}});
expect(props.onChange).toBeCalledWith(baseProps.id, []);
});
test('called on change to one item', () => {
const props = {
...baseProps,
onChange: jest.fn(),
};
const wrapper = mountWithIntl(
<ClientSideUserIdsSetting {...props}/>,
);
wrapper.find('input').simulate('change', {target: {value: ' id2 '}});
expect(props.onChange).toBeCalledWith(baseProps.id, ['id2']);
});
test('called on change to two items', () => {
const props = {
...baseProps,
onChange: jest.fn(),
};
const wrapper = mountWithIntl(
<ClientSideUserIdsSetting {...props}/>,
);
wrapper.find('input').simulate('change', {target: {value: 'id1, id99'}});
expect(props.onChange).toBeCalledWith(baseProps.id, ['id1', 'id99']);
});
});
test('renders properly when disabled', () => {
const props = {
...baseProps,
disabled: true,
};
const wrapper = mountWithIntl(
<ClientSideUserIdsSetting {...props}/>,
);
expect(wrapper).toMatchSnapshot();
});
test('renders properly when set by environment variable', () => {
const props = {
...baseProps,
setByEnv: true,
};
const wrapper = mountWithIntl(
<ClientSideUserIdsSetting {...props}/>,
);
expect(wrapper).toMatchSnapshot();
});
});

View File

@ -0,0 +1,81 @@
// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved.
// See LICENSE.txt for license information.
import React, {PureComponent} from 'react';
import type {ChangeEvent} from 'react';
import {defineMessage, FormattedMessage} from 'react-intl';
import LocalizedPlaceholderInput from 'components/localized_placeholder_input';
import Setting from './setting';
type Props = {
id: string;
value: string[];
onChange: (id: string, valueAsArray: string[]) => void;
disabled: boolean;
setByEnv: boolean;
}
type State = {
value: string;
}
export default class ClientSideUserIdsSetting extends PureComponent<Props, State> {
constructor(props: Props) {
super(props);
this.state = {
value: this.arrayToString(props.value),
};
}
stringToArray = (str: string): string[] => {
return str.split(',').map((s) => s.trim()).filter(Boolean);
};
arrayToString = (arr: string[]): string => {
return arr.join(',');
};
handleChange = (e: ChangeEvent<HTMLInputElement>): void => {
const valueAsArray = this.stringToArray(e.target.value);
this.props.onChange(this.props.id, valueAsArray);
this.setState({
value: e.target.value,
});
};
render() {
return (
<Setting
label={
<FormattedMessage
id='admin.customization.clientSideUserIds'
defaultMessage='Client side user ids:'
/>
}
helpText={
<FormattedMessage
id='admin.customization.clientSideUserIdsDesc'
defaultMessage='Set the user ids you want to track for client side metrics. Separate values with a comma.'
/>
}
inputId={this.props.id}
setByEnv={this.props.setByEnv}
>
<LocalizedPlaceholderInput
id={this.props.id}
className='form-control'
type='text'
placeholder={defineMessage({id: 'admin.customization.clientSideUserIdsPlaceholder', defaultMessage: 'E.g.: "userid1,userid2"'})}
value={this.state.value}
onChange={this.handleChange}
disabled={this.props.disabled || this.props.setByEnv}
/>
</Setting>
);
}
}

View File

@ -40,6 +40,10 @@ export default class FileUploadSetting extends React.PureComponent<Props, State>
};
}
handleChooseClick = () => {
this.fileInputRef.current?.click();
};
handleChange = () => {
const files = this.fileInputRef.current?.files;
if (files && files.length > 0) {
@ -92,6 +96,7 @@ export default class FileUploadSetting extends React.PureComponent<Props, State>
type='button'
className='btn btn-tertiary'
disabled={this.props.disabled}
onClick={this.handleChooseClick}
>
<FormattedMessage
id='admin.file_upload.chooseFile'

View File

@ -123,7 +123,9 @@ table.adminConsoleListTable {
font-weight: 400;
line-height: 20px;
&:hover {
&.clickable:hover {
background-color: $sysCenterChannelColorWith8Alpha;
cursor: pointer;
@ -207,6 +209,20 @@ table.adminConsoleListTable {
tfoot {
border-top: 1px solid rgba(var(--center-channel-color-rgb), 0.08);
}
.dragHandle {
position: absolute;
top: 0;
left: 0;
display: inline-flex;
width: 24px;
height: calc(100% - 1px);
color: rgba(var(--center-channel-color-rgb), 0.64);
svg {
align-self: center;
}
}
}
.adminConsoleListTabletOptionalFoot,

View File

@ -6,10 +6,14 @@ import {flexRender} from '@tanstack/react-table';
import classNames from 'classnames';
import React, {useMemo} from 'react';
import type {AriaAttributes, MouseEvent, ReactNode} from 'react';
import type {DropResult} from 'react-beautiful-dnd';
import {DragDropContext, Draggable, Droppable} from 'react-beautiful-dnd';
import {FormattedMessage, defineMessages, useIntl} from 'react-intl';
import ReactSelect, {components} from 'react-select';
import type {IndicatorContainerProps, ValueType} from 'react-select';
import {DragVerticalIcon} from '@mattermost/compass-icons/components';
import LoadingSpinner from 'components/widgets/loading/loading_spinner';
import {Pagination} from './pagination';
@ -56,6 +60,7 @@ export type TableMeta = {
loadingState?: LoadingStates;
emptyDataMessage?: ReactNode;
onRowClick?: (row: string) => void;
onReorder?: (prev: number, next: number) => void;
disablePrevPage?: boolean;
disableNextPage?: boolean;
disablePaginationControls?: boolean;
@ -117,6 +122,14 @@ export function ListTable<TableType extends TableMandatoryTypes>(
}
}
const handleDragEnd = (result: DropResult) => {
const {source, destination} = result;
if (!destination) {
return;
}
tableMeta.onReorder?.(source.index, destination.index);
};
const colCount = props.table.getAllColumns().length;
const rowCount = props.table.getRowModel().rows.length;
@ -164,6 +177,7 @@ export function ListTable<TableType extends TableMandatoryTypes>(
})}
disabled={header.column.getCanSort() && tableMeta.loadingState === LoadingStates.Loading}
onClick={header.column.getToggleSortingHandler()}
style={{width: header.column.getSize()}}
>
{header.isPlaceholder ? null : flexRender(header.column.columnDef.header, header.getContext())}
@ -193,69 +207,104 @@ export function ListTable<TableType extends TableMandatoryTypes>(
</tr>
))}
</thead>
<tbody>
{props.table.getRowModel().rows.map((row) => (
<tr
id={`${rowIdPrefix}${row.original.id}`}
key={row.id}
onClick={handleRowClick}
>
{row.getVisibleCells().map((cell) => (
<td
key={cell.id}
id={`${cellIdPrefix}${cell.id}`}
headers={`${headerIdPrefix}${cell.column.id}`}
className={classNames(`${cell.column.id}`, {
[PINNED_CLASS]: cell.column.getCanPin(),
})}
>
{cell.getIsPlaceholder() ? null : flexRender(cell.column.columnDef.cell, cell.getContext())}
</td>
))}
</tr>
))}
{/* State where it is initially loading the data */}
{(tableMeta.loadingState === LoadingStates.Loading && rowCount === 0) && (
<tr>
<td
colSpan={colCount}
className='noRows'
disabled={true}
<DragDropContext onDragEnd={handleDragEnd}>
<Droppable droppableId='table-body'>
{(provided, snap) => (
<tbody
ref={provided.innerRef}
{...provided.droppableProps}
>
<LoadingSpinner
text={formatMessage({id: 'adminConsole.list.table.genericLoading', defaultMessage: 'Loading'})}
/>
</td>
</tr>
)}
{props.table.getRowModel().rows.map((row) => (
<Draggable
draggableId={row.original.id}
key={row.original.id}
index={row.index}
isDragDisabled={!tableMeta.onReorder}
>
{(provided) => {
return (
<tr
id={`${rowIdPrefix}${row.original.id}`}
key={row.id}
onClick={handleRowClick}
className={classNames({clickable: Boolean(tableMeta.onRowClick) && !snap.isDraggingOver})}
ref={provided.innerRef}
{...provided.draggableProps}
>
{row.getVisibleCells().map((cell, i) => (
<td
key={cell.id}
id={`${cellIdPrefix}${cell.id}`}
headers={`${headerIdPrefix}${cell.column.id}`}
className={classNames(`${cell.column.id}`, {
[PINNED_CLASS]: cell.column.getCanPin(),
})}
style={{width: cell.column.getSize()}}
>
{tableMeta.onReorder && i === 0 && (
<span
className='dragHandle'
{...provided.dragHandleProps}
>
<DragVerticalIcon size={18}/>
</span>
)}
{cell.getIsPlaceholder() ? null : flexRender(cell.column.columnDef.cell, cell.getContext())}
</td>
))}
</tr>
);
}}
</Draggable>
{/* State where there is no data */}
{(tableMeta.loadingState === LoadingStates.Loaded && rowCount === 0) && (
<tr>
<td
colSpan={colCount}
className='noRows'
disabled={true}
>
{tableMeta.emptyDataMessage || formatMessage({id: 'adminConsole.list.table.genericNoData', defaultMessage: 'No data'})}
</td>
</tr>
)}
))}
{/* State where there is an error loading the data */}
{tableMeta.loadingState === LoadingStates.Failed && (
<tr>
<td
colSpan={colCount}
className='noRows'
disabled={true}
>
{formatMessage({id: 'adminConsole.list.table.genericError', defaultMessage: 'There was an error loading the data, please try again'})}
</td>
</tr>
)}
</tbody>
{provided.placeholder}
{/* State where it is initially loading the data */}
{(tableMeta.loadingState === LoadingStates.Loading && rowCount === 0) && (
<tr>
<td
colSpan={colCount}
className='noRows'
disabled={true}
>
<LoadingSpinner
text={formatMessage({id: 'adminConsole.list.table.genericLoading', defaultMessage: 'Loading'})}
/>
</td>
</tr>
)}
{/* State where there is no data */}
{(tableMeta.loadingState === LoadingStates.Loaded && rowCount === 0) && (
<tr>
<td
colSpan={colCount}
className='noRows'
disabled={true}
>
{tableMeta.emptyDataMessage || formatMessage({id: 'adminConsole.list.table.genericNoData', defaultMessage: 'No data'})}
</td>
</tr>
)}
{/* State where there is an error loading the data */}
{tableMeta.loadingState === LoadingStates.Failed && (
<tr>
<td
colSpan={colCount}
className='noRows'
disabled={true}
>
{formatMessage({id: 'adminConsole.list.table.genericError', defaultMessage: 'There was an error loading the data, please try again'})}
</td>
</tr>
)}
</tbody>
)}
</Droppable>
</DragDropContext>
<tfoot>
{props.table.getFooterGroups().map((footerGroup) => (
<tr key={footerGroup.id}>

View File

@ -7,7 +7,7 @@ import React, {useEffect, useMemo, useState} from 'react';
import {FormattedMessage, useIntl} from 'react-intl';
import styled, {css} from 'styled-components';
import {PlusIcon, TextBoxOutlineIcon, TrashCanOutlineIcon} from '@mattermost/compass-icons/components';
import {MenuVariantIcon, PlusIcon, TrashCanOutlineIcon} from '@mattermost/compass-icons/components';
import type {UserPropertyField} from '@mattermost/types/properties';
import {collectionToArray} from '@mattermost/types/utilities';
@ -30,6 +30,7 @@ type Props = {
type FieldActions = {
updateField: (field: UserPropertyField) => void;
deleteField: (id: string) => void;
reorderField: (field: UserPropertyField, nextOrder: number) => void;
}
export const useUserPropertiesTable = (): SectionHook => {
@ -53,6 +54,7 @@ export const useUserPropertiesTable = (): SectionHook => {
data={userPropertyFields}
updateField={itemOps.update}
deleteField={itemOps.delete}
reorderField={itemOps.reorder}
/>
{nonDeletedCount < Constants.MAX_CUSTOM_ATTRIBUTES && (
<LinkButton onClick={itemOps.create}>
@ -78,13 +80,14 @@ export const useUserPropertiesTable = (): SectionHook => {
};
};
export function UserPropertiesTable({data: collection, updateField, deleteField}: Props & FieldActions) {
export function UserPropertiesTable({data: collection, updateField, deleteField, reorderField}: Props & FieldActions) {
const {formatMessage} = useIntl();
const data = collectionToArray(collection);
const col = createColumnHelper<UserPropertyField>();
const columns = useMemo<Array<ColumnDef<UserPropertyField, any>>>(() => {
return [
col.accessor('name', {
size: 180,
header: () => {
return (
<ColHeaderLeft>
@ -150,6 +153,7 @@ export function UserPropertiesTable({data: collection, updateField, deleteField}
enableSorting: false,
}),
col.accessor('type', {
size: 100,
header: () => {
return (
<ColHeaderLeft>
@ -166,7 +170,7 @@ export function UserPropertiesTable({data: collection, updateField, deleteField}
if (type === 'text') {
type = (
<>
<TextBoxOutlineIcon
<MenuVariantIcon
size={18}
color={'rgba(var(--center-channel-color-rgb), 0.64)'}
/>
@ -187,8 +191,17 @@ export function UserPropertiesTable({data: collection, updateField, deleteField}
enableHiding: false,
enableSorting: false,
}),
col.display({
id: 'options',
size: 300,
header: () => <></>,
cell: () => <></>,
enableHiding: false,
enableSorting: false,
}),
col.display({
id: 'actions',
size: 100,
header: () => {
return (
<ColHeaderRight>
@ -202,7 +215,6 @@ export function UserPropertiesTable({data: collection, updateField, deleteField}
cell: ({row}) => (
<Actions
field={row.original}
updateField={updateField}
deleteField={deleteField}
/>
),
@ -215,9 +227,6 @@ export function UserPropertiesTable({data: collection, updateField, deleteField}
const table = useReactTable({
data,
columns,
initialState: {
sorting: [],
},
getCoreRowModel: getCoreRowModel<UserPropertyField>(),
getSortedRowModel: getSortedRowModel<UserPropertyField>(),
enableSortingRemoval: false,
@ -226,6 +235,9 @@ export function UserPropertiesTable({data: collection, updateField, deleteField}
meta: {
tableId: 'userProperties',
disablePaginationControls: true,
onReorder: (prev: number, next: number) => {
reorderField(collection.data[collection.order[prev]], next);
},
},
manualPagination: true,
});
@ -283,7 +295,7 @@ const TableWrapper = styled.div`
}
`;
const Actions = ({field, deleteField}: {field: UserPropertyField} & FieldActions) => {
const Actions = ({field, deleteField}: {field: UserPropertyField} & Pick<FieldActions, 'deleteField'>) => {
const {promptDelete} = useUserPropertyFieldDelete();
const {formatMessage} = useIntl();

View File

@ -3,7 +3,7 @@
import {act} from '@testing-library/react-hooks';
import type {UserPropertyField} from '@mattermost/types/properties';
import type {UserPropertyField, UserPropertyFieldPatch} from '@mattermost/types/properties';
import type {DeepPartial} from '@mattermost/types/utilities';
import {Client4} from 'mattermost-redux/client';
@ -48,11 +48,11 @@ describe('useUserPropertyFields', () => {
const deleteField = jest.spyOn(Client4, 'deleteCustomProfileAttributeField');
const createField = jest.spyOn(Client4, 'createCustomProfileAttributeField');
const baseField = {type: 'text' as const, group_id: 'custom_profile_attributes' as const, create_at: 1736541716295, delete_at: 0, update_at: 0};
const field0: UserPropertyField = {id: 'f0', name: 'test attribute 0', ...baseField};
const field1: UserPropertyField = {id: 'f1', name: 'test attribute 1', ...baseField};
const field2: UserPropertyField = {id: 'f2', name: 'test attribute 2', ...baseField};
const field3: UserPropertyField = {id: 'f3', name: 'test attribute 3', ...baseField};
const baseField = {type: 'text', group_id: 'custom_profile_attributes', create_at: 1736541716295, delete_at: 0, update_at: 0} as const;
const field0: UserPropertyField = {...baseField, id: 'f0', name: 'test attribute 0'};
const field1: UserPropertyField = {...baseField, id: 'f1', name: 'test attribute 1'};
const field2: UserPropertyField = {...baseField, id: 'f2', name: 'test attribute 2'};
const field3: UserPropertyField = {...baseField, id: 'f3', name: 'test attribute 3'};
getFields.mockResolvedValue([field0, field1, field2, field3]);
@ -134,6 +134,59 @@ describe('useUserPropertyFields', () => {
expect(fields4.data[field1.id].name).toBe('changed attribute value');
});
it('should successfully handle reordering', async () => {
patchField.mockImplementation((id: string, patch: UserPropertyFieldPatch) => Promise.resolve({...baseField, ...patch, id, update_at: Date.now()} as UserPropertyField));
const {result, rerender, waitFor} = renderHookWithContext(() => {
return useUserPropertyFields();
}, getBaseState());
act(() => {
jest.runAllTimers();
});
rerender();
await waitFor(() => {
const [, read] = result.current;
expect(read.loading).toBe(false);
});
const [fields2,,, ops2] = result.current;
act(() => {
ops2.reorder(fields2.data[field1.id], 0);
});
rerender();
const [fields3, readIO3, pendingIO3] = result.current;
// expect 2 changed fields to be in the correct order
expect(fields3.data[field1.id]?.attrs?.sort_order).toBe(0);
expect(fields3.data[field0.id]?.attrs?.sort_order).toBe(1);
expect(pendingIO3.hasChanges).toBe(true);
await act(async () => {
const data = await pendingIO3.commit();
if (data) {
readIO3.setData(data);
}
jest.runAllTimers();
rerender();
});
await waitFor(() => {
const [,, pending] = result.current;
expect(pending.saving).toBe(false);
});
expect(patchField).toHaveBeenCalledWith(field1.id, {type: 'text', name: 'test attribute 1', attrs: {sort_order: 0}});
expect(patchField).toHaveBeenCalledWith(field0.id, {type: 'text', name: 'test attribute 0', attrs: {sort_order: 1}});
const [fields4,, pendingIO4] = result.current;
expect(pendingIO4.hasChanges).toBe(false);
expect(pendingIO4.error).toBe(undefined);
expect(fields4.order).toEqual(['f1', 'f0', 'f2', 'f3']);
});
it('should successfully handle deletes', async () => {
const {result, rerender, waitFor} = renderHookWithContext(() => {
return useUserPropertyFields();
@ -224,8 +277,8 @@ describe('useUserPropertyFields', () => {
expect(pendingIO4.saving).toBe(false);
});
expect(createField).toHaveBeenCalledWith({type: 'text', name: 'Text'});
expect(createField).toHaveBeenCalledWith({type: 'text', name: 'Text 2'});
expect(createField).toHaveBeenCalledWith({type: 'text', name: 'Text', attrs: {sort_order: 4}});
expect(createField).toHaveBeenCalledWith({type: 'text', name: 'Text 2', attrs: {sort_order: 5}});
const [fields4,,,] = result.current;
expect(Object.values(fields4.data)).toEqual(expect.arrayContaining([

View File

@ -11,6 +11,7 @@ import {collectionAddItem, collectionFromArray, collectionRemoveItem, collection
import type {PartialExcept, IDMappedCollection, IDMappedObjects} from '@mattermost/types/utilities';
import {Client4} from 'mattermost-redux/client';
import {insertWithoutDuplicates} from 'mattermost-redux/utils/array_utils';
import {generateId} from 'utils/utils';
@ -26,7 +27,8 @@ export const useUserPropertyFields = () => {
const [fieldCollection, readIO] = useThing<UserPropertyFields>(useMemo(() => ({
get: async () => {
const data = await Client4.getCustomProfileAttributeFields();
return collectionFromArray(data);
const sorted = data.sort((a, b) => (a.attrs?.sort_order ?? 0) - (b.attrs?.sort_order ?? 0));
return collectionFromArray(sorted);
},
// eslint-disable-next-line @typescript-eslint/no-unused-vars
select: (state) => {
@ -65,7 +67,7 @@ export const useUserPropertyFields = () => {
errors: {}, // start with errors cleared; don't keep stale errors
};
// delete - all
// delete
await Promise.all(process.delete.map(async ({id}) => {
return Client4.deleteCustomProfileAttributeField(id).
then(() => {
@ -80,11 +82,11 @@ export const useUserPropertyFields = () => {
});
}));
// update - all
// update
await Promise.all(process.edit.map(async (pendingItem) => {
const {id, name, type} = pendingItem;
const {id, name, type, attrs} = pendingItem;
return Client4.patchCustomProfileAttributeField(id, {name, type}).
return Client4.patchCustomProfileAttributeField(id, {name, type, attrs}).
then((nextItem) => {
// data:updated
next.data[id] = nextItem;
@ -94,12 +96,11 @@ export const useUserPropertyFields = () => {
});
}));
// create - each, to preserve created/sort ordering
for (const pendingItem of process.create) {
const {id, name, type} = pendingItem;
// create
await Promise.all(process.create.map(async (pendingItem) => {
const {id, name, type, attrs} = pendingItem;
// eslint-disable-next-line no-await-in-loop
await Client4.createCustomProfileAttributeField({name, type}).
return Client4.createCustomProfileAttributeField({name, type, attrs}).
then((newItem) => {
// data:created (delete pending data)
Reflect.deleteProperty(next.data, id);
@ -111,7 +112,7 @@ export const useUserPropertyFields = () => {
catch((reason: ClientError) => {
next.errors = {...next.errors, [id]: reason};
});
}
}));
if (isEmpty(next.errors)) {
Reflect.deleteProperty(next, 'errors');
@ -175,11 +176,34 @@ export const useUserPropertyFields = () => {
},
create: () => {
pendingIO.apply((pending) => {
const nextOrder = Object.values(pending.data).filter((x) => !isDeletePending(x)).length;
const name = getIncrementedName('Text', pending);
const field = newPendingField({name, type: 'text'});
const field = newPendingField({name, type: 'text', attrs: {sort_order: nextOrder}});
return collectionAddItem(pending, field);
});
},
reorder: ({id}, nextItemOrder) => {
pendingIO.apply((pending) => {
const nextOrder = insertWithoutDuplicates(pending.order, id, nextItemOrder);
if (nextOrder === pending.order) {
return pending;
}
const nextItems = Object.values(pending.data).reduce<UserPropertyField[]>((changedItems, item) => {
const itemCurrentOrder = item.attrs?.sort_order;
const itemNextOrder = nextOrder.indexOf(item.id);
if (itemNextOrder !== itemCurrentOrder) {
changedItems.push({...item, attrs: {sort_order: itemNextOrder}});
}
return changedItems;
}, []);
return collectionReplaceItem({...pending, order: nextOrder}, ...nextItems);
});
},
delete: (id: string) => {
pendingIO.apply((pending) => {
const field = pending.data[id];

View File

@ -201,6 +201,11 @@ Object {
<fieldset
class="mm-modal-generic-section-item__fieldset-radio"
>
<legend
class="hidden-label"
>
Notify me about…
</legend>
<label
class="mm-modal-generic-section-item__label-radio"
>
@ -208,7 +213,7 @@ Object {
checked=""
data-testid="desktopNotification-all"
id="desktopNotification-all"
name="desktopNotification-all"
name="desktop"
type="radio"
value="all"
/>
@ -221,7 +226,7 @@ Object {
<input
data-testid="desktopNotification-mention"
id="desktopNotification-mention"
name="desktopNotification-mention"
name="desktop"
type="radio"
value="mention"
/>
@ -233,7 +238,7 @@ Object {
<input
data-testid="desktopNotification-none"
id="desktopNotification-none"
name="desktopNotification-none"
name="desktop"
type="radio"
value="none"
/>
@ -637,6 +642,11 @@ Object {
<fieldset
class="mm-modal-generic-section-item__fieldset-radio"
>
<legend
class="hidden-label"
>
Notify me about…
</legend>
<label
class="mm-modal-generic-section-item__label-radio"
>
@ -644,7 +654,7 @@ Object {
checked=""
data-testid="desktopNotification-all"
id="desktopNotification-all"
name="desktopNotification-all"
name="desktop"
type="radio"
value="all"
/>
@ -657,7 +667,7 @@ Object {
<input
data-testid="desktopNotification-mention"
id="desktopNotification-mention"
name="desktopNotification-mention"
name="desktop"
type="radio"
value="mention"
/>
@ -669,7 +679,7 @@ Object {
<input
data-testid="desktopNotification-none"
id="desktopNotification-none"
name="desktopNotification-none"
name="desktop"
type="radio"
value="none"
/>
@ -836,13 +846,18 @@ Object {
<fieldset
class="mm-modal-generic-section-item__fieldset-radio"
>
<legend
class="hidden-label"
>
Notify me about…
</legend>
<label
class="mm-modal-generic-section-item__label-radio"
>
<input
data-testid="MobileNotification-all"
id="MobileNotification-all"
name="MobileNotification-all"
name="push"
type="radio"
value="all"
/>
@ -855,7 +870,7 @@ Object {
checked=""
data-testid="MobileNotification-mention"
id="MobileNotification-mention"
name="MobileNotification-mention"
name="push"
type="radio"
value="mention"
/>
@ -868,7 +883,7 @@ Object {
<input
data-testid="MobileNotification-none"
id="MobileNotification-none"
name="MobileNotification-none"
name="push"
type="radio"
value="none"
/>
@ -1530,6 +1545,11 @@ Object {
<fieldset
class="mm-modal-generic-section-item__fieldset-radio"
>
<legend
class="hidden-label"
>
Notify me about…
</legend>
<label
class="mm-modal-generic-section-item__label-radio"
>
@ -1537,7 +1557,7 @@ Object {
checked=""
data-testid="desktopNotification-all"
id="desktopNotification-all"
name="desktopNotification-all"
name="desktop"
type="radio"
value="all"
/>
@ -1550,7 +1570,7 @@ Object {
<input
data-testid="desktopNotification-mention"
id="desktopNotification-mention"
name="desktopNotification-mention"
name="desktop"
type="radio"
value="mention"
/>
@ -1562,7 +1582,7 @@ Object {
<input
data-testid="desktopNotification-none"
id="desktopNotification-none"
name="desktopNotification-none"
name="desktop"
type="radio"
value="none"
/>
@ -2047,6 +2067,11 @@ Object {
<fieldset
class="mm-modal-generic-section-item__fieldset-radio"
>
<legend
class="hidden-label"
>
Notify me about…
</legend>
<label
class="mm-modal-generic-section-item__label-radio"
>
@ -2054,7 +2079,7 @@ Object {
checked=""
data-testid="desktopNotification-all"
id="desktopNotification-all"
name="desktopNotification-all"
name="desktop"
type="radio"
value="all"
/>
@ -2067,7 +2092,7 @@ Object {
<input
data-testid="desktopNotification-mention"
id="desktopNotification-mention"
name="desktopNotification-mention"
name="desktop"
type="radio"
value="mention"
/>
@ -2079,7 +2104,7 @@ Object {
<input
data-testid="desktopNotification-none"
id="desktopNotification-none"
name="desktopNotification-none"
name="desktop"
type="radio"
value="none"
/>
@ -2564,6 +2589,11 @@ Object {
<fieldset
class="mm-modal-generic-section-item__fieldset-radio"
>
<legend
class="hidden-label"
>
Notify me about…
</legend>
<label
class="mm-modal-generic-section-item__label-radio"
>
@ -2571,7 +2601,7 @@ Object {
checked=""
data-testid="desktopNotification-all"
id="desktopNotification-all"
name="desktopNotification-all"
name="desktop"
type="radio"
value="all"
/>
@ -2584,7 +2614,7 @@ Object {
<input
data-testid="desktopNotification-mention"
id="desktopNotification-mention"
name="desktopNotification-mention"
name="desktop"
type="radio"
value="mention"
/>
@ -2596,7 +2626,7 @@ Object {
<input
data-testid="desktopNotification-none"
id="desktopNotification-none"
name="desktopNotification-none"
name="desktop"
type="radio"
value="none"
/>

View File

@ -240,22 +240,22 @@ export default function ChannelNotificationsModal(props: Props) {
handleChange={(e) => handleChange({push: e.target.value})}
/>
{props.collapsedReplyThreads && settings.push === 'mention' &&
<CheckboxSettingItem
dataTestId='mobile-reply-threads-checkbox-section'
title={formatMessage({
id: 'channel_notifications.ThreadsReplyTitle',
defaultMessage: 'Thread reply notifications',
})}
inputFieldTitle={
<FormattedMessage
id='channel_notifications.checkbox.threadsReplyTitle'
defaultMessage="Notify me about replies to threads I\'m following"
/>
}
inputFieldValue={settings.push_threads === 'all'}
inputFieldData={utils.MobileReplyThreadsInputFieldData}
handleChange={(e) => handleChange({push_threads: e ? 'all' : 'mention'})}
/>}
<CheckboxSettingItem
dataTestId='mobile-reply-threads-checkbox-section'
title={formatMessage({
id: 'channel_notifications.ThreadsReplyTitle',
defaultMessage: 'Thread reply notifications',
})}
inputFieldTitle={
<FormattedMessage
id='channel_notifications.checkbox.threadsReplyTitle'
defaultMessage="Notify me about replies to threads I\'m following"
/>
}
inputFieldValue={settings.push_threads === 'all'}
inputFieldData={utils.MobileReplyThreadsInputFieldData}
handleChange={(e) => handleChange({push_threads: e ? 'all' : 'mention'})}
/>}
</>
)}
</>

View File

@ -2,7 +2,7 @@
// See LICENSE.txt for license information.
import React from 'react';
import {FormattedMessage} from 'react-intl';
import {defineMessage, FormattedMessage} from 'react-intl';
import type {FieldsetCheckbox} from 'components/widgets/modals/components/checkbox_setting_item';
import type {FieldsetRadio} from 'components/widgets/modals/components/radio_setting_item';
@ -41,69 +41,70 @@ export const AutoFollowThreadsInputFieldData: FieldsetCheckbox = {
dataTestId: 'autoFollowThreads',
};
export const desktopNotificationInputFieldData = (defaultOption: string): FieldsetRadio => {
return {
options: [
{
dataTestId: `desktopNotification-${NotificationLevels.ALL}`,
title: (
<FormattedMessage
id='channelNotifications.desktopNotification.allMessages'
defaultMessage='All new messages {optionalDefault}'
values={{
optionalDefault: defaultOption === NotificationLevels.ALL ? (
<FormattedMessage
id='channel_notifications.default'
defaultMessage='(default)'
/>) : undefined,
}}
/>
),
name: `desktopNotification-${NotificationLevels.ALL}`,
key: `desktopNotification-${NotificationLevels.ALL}`,
value: NotificationLevels.ALL,
},
{
dataTestId: `desktopNotification-${NotificationLevels.MENTION}`,
title: (
<FormattedMessage
id='channelNotifications.desktopNotification.mention'
defaultMessage='Mentions, direct messages, and keywords only {optionalDefault}'
values={{
optionalDefault: defaultOption === NotificationLevels.MENTION ? (
<FormattedMessage
id='channel_notifications.default'
defaultMessage='(default)'
/>) : undefined,
}}
/>
),
name: `desktopNotification-${NotificationLevels.MENTION}`,
key: `desktopNotification-${NotificationLevels.MENTION}`,
value: NotificationLevels.MENTION,
},
{
dataTestId: `desktopNotification-${NotificationLevels.NONE}`,
title: (
<FormattedMessage
id='channelNotifications.desktopNotification.nothing'
defaultMessage='Nothing {optionalDefault}'
values={{
optionalDefault: defaultOption === NotificationLevels.NONE ? (
<FormattedMessage
id='channel_notifications.default'
defaultMessage='(default)'
/>) : undefined,
}}
/>
),
name: `desktopNotification-${NotificationLevels.NONE}`,
key: `desktopNotification-${NotificationLevels.NONE}`,
value: NotificationLevels.NONE,
},
],
};
};
const defaultMessage = defineMessage({
id: 'channel_notifications.default',
defaultMessage: '(default)',
});
export const desktopNotificationInputFieldData = (defaultOption: string): FieldsetRadio => ({
options: [
{
dataTestId: `desktopNotification-${NotificationLevels.ALL}`,
title: (
<FormattedMessage
id='channelNotifications.desktopNotification.allMessages'
defaultMessage='All new messages {optionalDefault}'
values={{
optionalDefault: defaultOption === NotificationLevels.ALL ? (
<FormattedMessage
{...defaultMessage}
/>) : undefined,
}}
/>
),
name: 'desktop',
key: `desktopNotification-${NotificationLevels.ALL}`,
value: NotificationLevels.ALL,
},
{
dataTestId: `desktopNotification-${NotificationLevels.MENTION}`,
title: (
<FormattedMessage
id='channelNotifications.desktopNotification.mention'
defaultMessage='Mentions, direct messages, and keywords only {optionalDefault}'
values={{
optionalDefault: defaultOption === NotificationLevels.MENTION ? (
<FormattedMessage
{...defaultMessage}
/>) : undefined,
}}
/>
),
name: 'desktop',
key: `desktopNotification-${NotificationLevels.MENTION}`,
value: NotificationLevels.MENTION,
},
{
dataTestId: `desktopNotification-${NotificationLevels.NONE}`,
title: (
<FormattedMessage
id='channelNotifications.desktopNotification.nothing'
defaultMessage='Nothing {optionalDefault}'
values={{
optionalDefault: defaultOption === NotificationLevels.NONE ? (
<FormattedMessage
{...defaultMessage}
/>) : undefined,
}}
/>
),
name: 'desktop',
key: `desktopNotification-${NotificationLevels.NONE}`,
value: NotificationLevels.NONE,
},
],
}
);
export const desktopNotificationSoundsCheckboxFieldData: FieldsetCheckbox = {
name: 'desktopNotificationSoundsCheckbox',
@ -116,69 +117,66 @@ export const desktopNotificationSoundsSelectFieldData: FieldsetReactSelect = {
options: optionsOfMessageNotificationSoundsSelect,
};
export const mobileNotificationInputFieldData = (defaultOption: string): FieldsetRadio => {
return {
options: [
{
dataTestId: `MobileNotification-${NotificationLevels.ALL}`,
title: (
<FormattedMessage
id='channelNotifications.mobileNotification.newMessages'
defaultMessage='All new messages {optionalDefault}'
values={{
optionalDefault: defaultOption === NotificationLevels.ALL ? (
<FormattedMessage
id='channel_notifications.default'
defaultMessage='(default)'
/>) : undefined,
}}
/>
),
name: `MobileNotification-${NotificationLevels.ALL}`,
key: `MobileNotification-${NotificationLevels.ALL}`,
value: NotificationLevels.ALL,
},
{
dataTestId: `MobileNotification-${NotificationLevels.MENTION}`,
title: (
<FormattedMessage
id='channelNotifications.mobileNotification.mention'
defaultMessage='Mentions, direct messages, and keywords only {optionalDefault}'
values={{
optionalDefault: defaultOption === NotificationLevels.MENTION ? (
<FormattedMessage
id='channel_notifications.default'
defaultMessage='(default)'
/>) : undefined,
}}
/>
),
name: `MobileNotification-${NotificationLevels.MENTION}`,
key: `MobileNotification-${NotificationLevels.MENTION}`,
value: NotificationLevels.MENTION,
},
{
dataTestId: `MobileNotification-${NotificationLevels.NONE}`,
title: (
<FormattedMessage
id='channelNotifications.mobileNotification.nothing'
defaultMessage='Nothing {optionalDefault}'
values={{
optionalDefault: defaultOption === NotificationLevels.NONE ? (
<FormattedMessage
id='channel_notifications.default'
defaultMessage='(default)'
/>) : undefined,
}}
/>
),
name: `MobileNotification-${NotificationLevels.NONE}`,
key: `MobileNotification-${NotificationLevels.NONE}`,
value: NotificationLevels.NONE,
},
],
};
};
export const mobileNotificationInputFieldData = (defaultOption: string): FieldsetRadio => ({
options: [
{
dataTestId: `MobileNotification-${NotificationLevels.ALL}`,
title: (
<FormattedMessage
id='channelNotifications.mobileNotification.newMessages'
defaultMessage='All new messages {optionalDefault}'
values={{
optionalDefault: defaultOption === NotificationLevels.ALL ? (
<FormattedMessage
{...defaultMessage}
/>) : undefined,
}}
/>
),
name: 'push',
key: `MobileNotification-${NotificationLevels.ALL}`,
value: NotificationLevels.ALL,
},
{
dataTestId: `MobileNotification-${NotificationLevels.MENTION}`,
title: (
<FormattedMessage
id='channelNotifications.mobileNotification.mention'
defaultMessage='Mentions, direct messages, and keywords only {optionalDefault}'
values={{
optionalDefault: defaultOption === NotificationLevels.MENTION ? (
<FormattedMessage
{...defaultMessage}
/>) : undefined,
}}
/>
),
name: 'push',
key: `MobileNotification-${NotificationLevels.MENTION}`,
value: NotificationLevels.MENTION,
},
{
dataTestId: `MobileNotification-${NotificationLevels.NONE}`,
title: (
<FormattedMessage
id='channelNotifications.mobileNotification.nothing'
defaultMessage='Nothing {optionalDefault}'
values={{
optionalDefault: defaultOption === NotificationLevels.NONE ? (
<FormattedMessage
{...defaultMessage}
/>) : undefined,
}}
/>
),
name: 'push',
key: `MobileNotification-${NotificationLevels.NONE}`,
value: NotificationLevels.NONE,
},
],
}
);
export default {
desktopNotificationInputFieldData,

View File

@ -14,6 +14,7 @@
&__main {
position: absolute;
top: 56px;
display: flex;
overflow: auto;
width: 100%;
@ -68,6 +69,10 @@
position: relative;
height: 100%;
}
.DraftList.Drafts__main {
top: 0;
}
}
}

View File

@ -58,7 +58,7 @@ const AllowedDomainsSelect = ({allowedDomains, setAllowedDomains, setHasChanges,
defaultMessage: 'When enabled, users can only join the team if their email matches a specific domain (e.g. "mattermost.org")',
})}
descriptionAboveContent={true}
inputFieldData={{name: 'name'}}
inputFieldData={{name: 'showAllowedDomains'}}
inputFieldValue={showAllowedDomains}
handleChange={handleEnableAllowedDomains}
/>

View File

@ -57,10 +57,10 @@ const OpenInvite = ({isGroupConstrained, allowOpenInvite, setAllowOpenInvite}: P
inputFieldTitle={
<FormattedMessage
id='general_tab.openInviteTitle'
defaultMessage='Allow only users with a specific email domain to join this team'
defaultMessage='Allow any user with an account on this server to join this team'
/>
}
inputFieldData={{name: 'name'}}
inputFieldData={{name: 'allowOpenInvite'}}
inputFieldValue={allowOpenInvite}
handleChange={setAllowOpenInvite}
title={formatMessage({

View File

@ -120,4 +120,31 @@ describe('components/TeamSettings', () => {
id: defaultProps.team?.id,
});
});
test('MM-62891 should toggle the right checkboxes when their labels are clicked on', () => {
renderWithContext(<AccessTab {...defaultProps}/>);
expect(screen.getByRole('checkbox', {name: 'Allow only users with a specific email domain to join this team'})).not.toBeChecked();
expect(screen.getByRole('checkbox', {name: 'Allow any user with an account on this server to join this team'})).not.toBeChecked();
userEvent.click(screen.getByText('Allow only users with a specific email domain to join this team'));
expect(screen.getByRole('checkbox', {name: 'Allow only users with a specific email domain to join this team'})).toBeChecked();
expect(screen.getByRole('checkbox', {name: 'Allow any user with an account on this server to join this team'})).not.toBeChecked();
userEvent.click(screen.getByText('Allow only users with a specific email domain to join this team'));
expect(screen.getByRole('checkbox', {name: 'Allow only users with a specific email domain to join this team'})).not.toBeChecked();
expect(screen.getByRole('checkbox', {name: 'Allow any user with an account on this server to join this team'})).not.toBeChecked();
userEvent.click(screen.getByText('Allow any user with an account on this server to join this team'));
expect(screen.getByRole('checkbox', {name: 'Allow only users with a specific email domain to join this team'})).not.toBeChecked();
expect(screen.getByRole('checkbox', {name: 'Allow any user with an account on this server to join this team'})).toBeChecked();
userEvent.click(screen.getByText('Allow any user with an account on this server to join this team'));
expect(screen.getByRole('checkbox', {name: 'Allow only users with a specific email domain to join this team'})).not.toBeChecked();
expect(screen.getByRole('checkbox', {name: 'Allow any user with an account on this server to join this team'})).not.toBeChecked();
});
});

View File

@ -31,6 +31,7 @@ function mapStateToProps(state: GlobalState, {location: {pathname}}: Props) {
unreadStatus: getUnreadStatus(state),
inGlobalThreads: matchPath(pathname, {path: '/:team/threads/:threadIdentifier?'}) != null,
inDrafts: matchPath(pathname, {path: '/:team/drafts'}) != null,
inScheduledPosts: matchPath(pathname, {path: '/:team/scheduled_posts'}) != null,
};
}

View File

@ -47,6 +47,7 @@ describe('components/UnreadsStatusHandler', () => {
currentTeammate: null,
inGlobalThreads: false,
inDrafts: false,
inScheduledPosts: false,
};
test('set correctly the title when needed', () => {
@ -83,6 +84,38 @@ describe('components/UnreadsStatusHandler', () => {
currentTeammate: {} as Props['currentTeammate']});
instance.updateTitle();
expect(document.title).toBe('Mattermost - Join a team');
wrapper.setProps({
inDrafts: false,
inScheduledPosts: true,
unreadStatus: 0,
});
instance.updateTitle();
expect(document.title).toBe('Scheduled - Test team display name');
wrapper.setProps({
inDrafts: false,
inScheduledPosts: true,
unreadStatus: 10,
});
instance.updateTitle();
expect(document.title).toBe('(10) Scheduled - Test team display name');
wrapper.setProps({
inDrafts: true,
inScheduledPosts: false,
unreadStatus: 0,
});
instance.updateTitle();
expect(document.title).toBe('Drafts - Test team display name');
wrapper.setProps({
inDrafts: true,
inScheduledPosts: false,
unreadStatus: 10,
});
instance.updateTitle();
expect(document.title).toBe('(10) Drafts - Test team display name');
});
test('should set correct title on mentions on safari', () => {
@ -149,4 +182,18 @@ describe('components/UnreadsStatusHandler', () => {
expect(document.title).toBe('Drafts - Test team display name');
});
test('should display correct title when in scheduled posts tab', () => {
const wrapper = shallowWithIntl(
<UnreadsStatusHandler
{...defaultProps}
inScheduledPosts={true}
currentChannel={undefined}
siteName={undefined}
/>,
) as unknown as ShallowWrapper<Props, any, UnreadsStatusHandlerClass>;
wrapper.instance().updateTitle();
expect(document.title).toBe('Scheduled - Test team display name');
});
});

View File

@ -46,6 +46,7 @@ type Props = {
currentTeammate: Channel | null;
inGlobalThreads: boolean;
inDrafts: boolean;
inScheduledPosts: boolean;
};
export class UnreadsStatusHandlerClass extends React.PureComponent<Props> {
@ -90,6 +91,7 @@ export class UnreadsStatusHandlerClass extends React.PureComponent<Props> {
unreadStatus,
inGlobalThreads,
inDrafts,
inScheduledPosts,
} = this.props;
const {formatMessage} = this.props.intl;
@ -126,6 +128,15 @@ export class UnreadsStatusHandlerClass extends React.PureComponent<Props> {
displayName: currentTeam.display_name,
siteName: currentSiteName,
});
} else if (currentTeam && inScheduledPosts) {
document.title = formatMessage({
id: 'scheduledPosts.title',
defaultMessage: '{prefix}Scheduled - {displayName} {siteName}',
}, {
prefix: `${mentionTitle}${unreadTitle}`,
displayName: currentTeam.display_name,
siteName: currentSiteName,
});
} else {
document.title = formatMessage({id: 'sidebar.team_select', defaultMessage: '{siteName} - Join a team'}, {siteName: currentSiteName || 'Mattermost'});
}

View File

@ -54,7 +54,12 @@ function RadioSettingItem({
});
const content = (
<fieldset className='mm-modal-generic-section-item__fieldset-radio'>
<fieldset
className='mm-modal-generic-section-item__fieldset-radio'
>
<legend className='hidden-label'>
{title}
</legend>
{[...fields]}
</fieldset>
);

View File

@ -656,6 +656,9 @@
"admin.customization.announcement.enableBannerTitle": "Enable System-wide Notifications:",
"admin.customization.appDownloadLinkDesc": "Add a link to a download page for the Mattermost apps. When a link is present, an option to \"Download Mattermost Apps\" will be added in the Product Menu so users can find the download page. Leave this field blank to hide the option from the Product Menu.",
"admin.customization.appDownloadLinkTitle": "Mattermost Apps Download Page Link:",
"admin.customization.clientSideUserIds": "Client side user ids:",
"admin.customization.clientSideUserIdsDesc": "Set the user ids you want to track for client side metrics. Separate values with a comma.",
"admin.customization.clientSideUserIdsPlaceholder": "E.g.: \"userid1,userid2\"",
"admin.customization.customUrlSchemes": "Custom URL Schemes:",
"admin.customization.customUrlSchemesDesc": "Allows message text to link if it begins with any of the comma-separated URL schemes listed. By default, the following schemes will create links: \"http\", \"https\", \"ftp\", \"tel\", and \"mailto\".",
"admin.customization.customUrlSchemesPlaceholder": "E.g.: \"git,smtp\"",
@ -4934,6 +4937,7 @@
"scheduled_posts.row_title_channel.placeholder": "In: {icon} No Destination",
"scheduled_posts.row_title_thread.placeholder": "Thread to: {icon} No Destination",
"scheduled_posts.row_title_thread.placeholder_tooltip": "The channel either doesnt exist or you do not have access to it.",
"scheduledPosts.title": "{prefix}Scheduled - {displayName} {siteName}",
"search_bar.channels": "Channels",
"search_bar.clear": "Clear",
"search_bar.file_types": "File types",

View File

@ -11,6 +11,11 @@
padding: 4px;
border: 1px solid transparent;
border-radius: 4px;
// Counteract the margins added to ul in _typography.scss
margin-top: 0;
margin-bottom: 0;
list-style: none;
white-space: normal;
}

View File

@ -335,12 +335,10 @@
right: 0;
font-size: 13px;
a {
vertical-align: top;
}
.search-item__jump {
position: relative;
display: block;
height: 100%;
padding: 5px 4px 0;
border-radius: 4px;
color: rgba(var(--center-channel-color-rgb), 0.4);

View File

@ -31,5 +31,6 @@ export type UserPropertyFieldGroupID = 'custom_profile_attributes';
export type UserPropertyField = PropertyField & {
type: UserPropertyFieldType;
group_id: UserPropertyFieldGroupID;
attrs?: {sort_order?: number};
}
export type UserPropertyFieldPatch = Partial<Pick<UserPropertyField, 'name' | 'attrs' | 'type'>>;
export type UserPropertyFieldPatch = Partial<Pick<UserPropertyField, 'name' | 'attrs' | 'type' | 'attrs'>>;

View File

@ -90,19 +90,19 @@ export const collectionFromArray = <T extends {id: string}>(arr: T[] = []): IDMa
current.data = {...current.data, [item.id]: item};
current.order.push(item.id);
return current;
}, {data: {} as IDMappedObjects<T>, order: []} as IDMappedCollection<T>);
}, {data: {} as IDMappedObjects<T>, order: [] as string[]});
};
export const collectionToArray = <T extends {id: string}>({data, order}: IDMappedCollection<T>): T[] => {
return order.map((id) => data[id]);
};
export const collectionReplaceItem = <T extends {id: string}>(collection: IDMappedCollection<T>, item: T) => {
return {...collection, data: {...collection.data, [item.id]: item}};
export const collectionReplaceItem = <T extends {id: string}>(collection: IDMappedCollection<T>, ...items: T[]) => {
return {...collection, data: idMappedObjectsFromArr(items, collection.data)};
};
export const collectionAddItem = <T extends {id: string}>(collection: IDMappedCollection<T>, item: T) => {
return {...collection, data: {...collection.data, [item.id]: item}, order: [...collection.order, item.id]};
export const collectionAddItem = <T extends {id: string}>(collection: IDMappedCollection<T>, ...items: T[]) => {
return {...collectionReplaceItem(collection, ...items), order: [...collection.order, ...items.map(({id}) => id)]};
};
export const collectionRemoveItem = <T extends {id: string}>(collection: IDMappedCollection<T>, item: T) => {
@ -111,3 +111,7 @@ export const collectionRemoveItem = <T extends {id: string}>(collection: IDMappe
const order = collection.order.filter((id) => id !== item.id);
return {...collection, data, order};
};
export const idMappedObjectsFromArr = <T extends {id: string}>(items: T[], current?: IDMappedObjects<T>) => {
return items.reduce((r, item) => ({...r, [item.id]: item}), {...current} as IDMappedObjects<T>);
};