Merge branch 'master' into MM-28014_Improve_Channel_Intros

This commit is contained in:
Matthew Birtch 2024-03-08 10:05:20 -05:00
commit 80fcc92595
49 changed files with 1175 additions and 593 deletions

83
.github/workflows/e2e-fulltests-ci.yml vendored Normal file
View File

@ -0,0 +1,83 @@
---
name: E2E Tests
on:
# For PRs, this workflow gets triggered from the Argo Events platform.
# Check the following repo for details: https://github.com/mattermost/delivery-platform
workflow_dispatch:
inputs:
ref:
type: string
required: false
REPORT_TYPE:
type: choice
description: The context this report is being generated in
options:
- PR
- RELEASE
- MASTER
- MASTER_UNSTABLE
- CLOUD
- CLOUD_UNSTABLE
- NONE
default: NONE
jobs:
generate-test-variables:
runs-on: ubuntu-22.04
outputs:
status_check_context: "${{ steps.generate.outputs.status_check_context }}"
workers_number: "${{ steps.generate.outputs.workers_number }}"
ENABLED_DOCKER_SERVICES: "${{ steps.generate.outputs.ENABLED_DOCKER_SERVICES }}"
TEST_FILTER: "${{ steps.generate.outputs.TEST_FILTER }}"
env:
# We could exclude the @smoke group for PRs, but then we wouldn't have it in the report
TEST_FILTER_PR: >-
--stage="@prod"
--excludeGroup="@te_only,@cloud_only,@high_availability"
--sortFirst="@compliance_export,@elasticsearch,@ldap_group,@ldap"
--sortLast="@saml,@keycloak,@plugin,@plugins_uninstall,@mfa,@license_removal"
steps:
- name: ci/generate-test-variables
id: generate
shell: bash
run: |
case "${{ inputs.REPORT_TYPE }}" in
NONE | PR)
echo "status_check_context=E2E Tests/test" >> $GITHUB_OUTPUT
echo "workers_number=20" >> $GITHUB_OUTPUT
echo "ENABLED_DOCKER_SERVICES=postgres inbucket minio openldap elasticsearch keycloak" >> $GITHUB_OUTPUT
echo "TEST_FILTER=$TEST_FILTER_PR" >> $GITHUB_OUTPUT
;;
*)
# TODO implement other test types, in the future
echo "Fatal: unimplemented test type. Aborting."
exit 1
esac
e2e-fulltest:
needs:
- generate-test-variables
uses: ./.github/workflows/e2e-tests-ci-template.yml
strategy:
matrix:
type:
- name: PR
with:
ref: "${{ inputs.ref || github.sha }}"
status_check_context: "${{ needs.generate-test-variables.outputs.status_check_context }}"
workers_number: "${{ needs.generate-test-variables.outputs.workers_number }}"
testcase_failure_fatal: false
run_preflight_checks: false
enable_reporting: true
ENABLED_DOCKER_SERVICES: "${{ needs.generate-test-variables.outputs.ENABLED_DOCKER_SERVICES }}"
TEST_FILTER: "${{ needs.generate-test-variables.outputs.TEST_FILTER }}"
REPORT_TYPE: "${{ inputs.REPORT_TYPE }}"
secrets:
MM_LICENSE: "${{ secrets.MM_E2E_TEST_LICENSE_ONPREM_ENT }}"
AUTOMATION_DASHBOARD_URL: "${{ secrets.MM_E2E_AUTOMATION_DASHBOARD_URL }}"
AUTOMATION_DASHBOARD_TOKEN: "${{ secrets.MM_E2E_AUTOMATION_DASHBOARD_TOKEN }}"
PUSH_NOTIFICATION_SERVER: "${{ secrets.MM_E2E_PUSH_NOTIFICATION_SERVER }}"
REPORT_WEBHOOK_URL: "${{ secrets.MM_E2E_REPORT_WEBHOOK_URL }}"
### These are disabled until release tests are implemented
#REPORT_TM4J_API_KEY: "${{ secrets.MM_E2E_TM4J_API_KEY }}"
#REPORT_TEST_CYCLE_LINK_PREFIX: "${{ secrets.MM_E2E_TEST_CYCLE_LINK_PREFIX }}"

View File

@ -0,0 +1,390 @@
---
name: E2E Tests Template
on:
workflow_call:
inputs:
# NB: this does not support using branch names that belong to forks.
# In those cases, you should specify directly the commit SHA that you want to test, or
# some wrapper workflow that does it for you (e.g. the slash command for initiating a PR test)
ref:
type: string
required: true
status_check_context:
type: string
required: true
workers_number:
type: string # Should ideally be a number; see https://github.com/orgs/community/discussions/67182
required: false
default: "1"
testcase_failure_fatal:
type: boolean
required: false
default: true
# NB: the following toggles will skip individual steps, rather than the whole jobs,
# to let the dependent jobs run even if these are false
run_preflight_checks:
type: boolean
required: false
default: true
enable_reporting:
type: boolean
required: false
default: false
ENABLED_DOCKER_SERVICES:
type: string
required: false
TEST_FILTER:
type: string
required: false
MM_ENV:
type: string
required: false
REPORT_TYPE:
type: string
required: false
secrets:
MM_LICENSE:
required: false
AUTOMATION_DASHBOARD_URL:
required: false
AUTOMATION_DASHBOARD_TOKEN:
required: false
PUSH_NOTIFICATION_SERVER:
required: false
REPORT_WEBHOOK_URL:
required: false
REPORT_TM4J_API_KEY:
required: false
REPORT_TM4J_TEST_CYCLE_LINK_PREFIX:
required: false
jobs:
resolve-ref:
runs-on: ubuntu-22.04
outputs:
commit_sha: "${{ steps.resolve-ref.outputs.commit_sha }}"
steps:
- name: ci/checkout-repo
uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1
with:
ref: ${{ inputs.ref }}
fetch-depth: 0
- name: ci/resolve-ref
id: resolve-ref
run: |
echo "commit_sha=$(git rev-parse --verify HEAD)" >> $GITHUB_OUTPUT
update-initial-status:
runs-on: ubuntu-22.04
needs:
- resolve-ref
steps:
- uses: mattermost/actions/delivery/update-commit-status@main
env:
GITHUB_TOKEN: ${{ github.token }}
with:
repository_full_name: ${{ github.repository }}
commit_sha: ${{ needs.resolve-ref.outputs.commit_sha }}
context: ${{ inputs.status_check_context }}
description: E2E tests for mattermost server app
status: pending
cypress-check:
runs-on: ubuntu-22.04
needs:
- update-initial-status
defaults:
run:
working-directory: e2e-tests/cypress
steps:
- name: ci/checkout-repo
if: "${{ inputs.run_preflight_checks }}"
uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1
with:
ref: ${{ inputs.ref }}
fetch-depth: 0
- name: ci/setup-node
if: "${{ inputs.run_preflight_checks }}"
uses: actions/setup-node@60edb5dd545a775178f52524783378180af0d1f8 # v4.0.2
id: setup_node
with:
node-version-file: ".nvmrc"
cache: npm
cache-dependency-path: "e2e-tests/cypress/package-lock.json"
- name: ci/cypress/npm-install
if: "${{ inputs.run_preflight_checks }}"
run: |
npm ci
- name: ci/cypress/npm-check
if: "${{ inputs.run_preflight_checks }}"
run: |
npm run check
playwright-check:
runs-on: ubuntu-22.04
needs:
- update-initial-status
defaults:
run:
working-directory: e2e-tests/playwright
steps:
- name: ci/checkout-repo
if: "${{ inputs.run_preflight_checks }}"
uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1
with:
ref: ${{ inputs.ref }}
fetch-depth: 0
- name: ci/setup-node
if: "${{ inputs.run_preflight_checks }}"
uses: actions/setup-node@60edb5dd545a775178f52524783378180af0d1f8 # v4.0.2
id: setup_node
with:
node-version-file: ".nvmrc"
cache: npm
cache-dependency-path: "e2e-tests/playwright/package-lock.json"
- name: ci/get-webapp-node-modules
if: "${{ inputs.run_preflight_checks }}"
working-directory: webapp
# requires build of client and types
run: |
make node_modules
- name: ci/playwright/npm-install
if: "${{ inputs.run_preflight_checks }}"
run: |
npm ci
- name: ci/playwright/npm-check
if: "${{ inputs.run_preflight_checks }}"
run: |
npm run check
generate-build-variables:
runs-on: ubuntu-22.04
needs:
- resolve-ref
- update-initial-status
outputs:
workers: "${{ steps.workers.outputs.workers }}"
BRANCH: "${{ steps.branch.outputs.BRANCH }}"
BUILD_ID: "${{ steps.build-id.outputs.BUILD_ID }}"
steps:
- name: ci/checkout-repo
uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1
with:
ref: ${{ inputs.ref }}
fetch-depth: 0
- name: ci/generate-workers
id: workers
env:
WORKERS: ${{ inputs.workers_number }}
run: |
[ "$WORKERS" -gt "0" ] # Assert that the workers number is an integer greater than 0
echo "workers="$(jq --slurp --compact-output '[range('"$WORKERS"')] | map(tostring)' /dev/null) >> $GITHUB_OUTPUT
- name: ci/generate-branch
id: branch
run: |
PR_LIST=$(curl -L -H "Accept: application/vnd.github+json" \
-H "Authorization: Bearer ${{ github.token }}" \
-H "X-GitHub-Api-Version: 2022-11-28" \
"${{ github.api_url }}/repos/${{ github.repository }}/commits/${{ needs.resolve-ref.outputs.commit_sha }}/pulls" 2>/dev/null || echo -n "")
PR_NAMES_LIST=$(echo -n "$PR_LIST" | jq -r 'map(.number) | join("-")')
if [ -n "$PR_NAMES_LIST" ]; then
# This commit belongs to one or more PRs.
# Prefer displaying the PR names in the Automation Dashboard, instead of the branch name
BRANCH="server-pr-${PR_NAMES_LIST}"
else
# This commit does not belong to a PR. Use the given input ref as-is
BRANCH="${{ inputs.ref }}"
fi
echo "Generated branch identifier for E2E run: ${BRANCH}"
echo "BRANCH=${BRANCH}" >> "$GITHUB_OUTPUT"
- name: ci/calculate-build-id
id: build-id
run: |
BUILD_ID="$(git rev-parse --short=7 HEAD)-${{ github.run_id }}-${{ github.run_attempt }}-ent"
echo "Generated BUILD_ID for E2E run: ${BUILD_ID}"
echo "BUILD_ID=${BUILD_ID}" >> "$GITHUB_OUTPUT"
generate-test-cycle:
runs-on: ubuntu-22.04
needs:
- generate-build-variables
defaults:
run:
shell: bash
working-directory: e2e-tests
steps:
- name: ci/checkout-repo
uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1
with:
ref: ${{ inputs.ref }}
fetch-depth: 0
- name: ci/setup-node
uses: actions/setup-node@60edb5dd545a775178f52524783378180af0d1f8 # v4.0.2
id: setup_node
with:
node-version-file: ".nvmrc"
cache: npm
cache-dependency-path: "e2e-tests/cypress/package-lock.json"
- name: ci/e2e-test-gencycle
env:
AUTOMATION_DASHBOARD_URL: "${{ secrets.AUTOMATION_DASHBOARD_URL }}"
AUTOMATION_DASHBOARD_TOKEN: "${{ secrets.AUTOMATION_DASHBOARD_TOKEN }}"
BRANCH: "${{ needs.generate-build-variables.outputs.BRANCH }}"
BUILD_ID: "${{ needs.generate-build-variables.outputs.BUILD_ID }}"
TEST_FILTER: "${{ inputs.TEST_FILTER }}"
run: |
make generate-test-cycle
test:
continue-on-error: true # Individual runner failures shouldn't prevent the completion of an E2E run
strategy:
fail-fast: false # Individual runner failures shouldn't prevent the completion of an E2E run
matrix:
#
# Note that E2E tests should be run only on ubuntu, for QA purposes.
# But it's useful to be able to run and debug the E2E tests for different OSes.
# Notes:
# - For MacOS: works on developer machines, but uses too many resources to be able to run on Github Actions
# - for Windows: cannot currently run on Github Actions, since the runners do not support running linux containers, at the moment
#
#os: [ubuntu-latest-8-cores, windows-2022, macos-12-xl]
os: [ubuntu-latest-8-cores]
worker_index: ${{ fromJSON(needs.generate-build-variables.outputs.workers) }} # https://docs.github.com/en/actions/learn-github-actions/expressions#example-returning-a-json-object
runs-on: "${{ matrix.os }}"
timeout-minutes: 60
needs:
- cypress-check
- playwright-check
- generate-build-variables
- generate-test-cycle
defaults:
run:
shell: bash
working-directory: e2e-tests
env:
AUTOMATION_DASHBOARD_URL: "${{ secrets.AUTOMATION_DASHBOARD_URL }}"
AUTOMATION_DASHBOARD_TOKEN: "${{ secrets.AUTOMATION_DASHBOARD_TOKEN }}"
MM_LICENSE: "${{ secrets.MM_LICENSE }}"
ENABLED_DOCKER_SERVICES: "${{ inputs.ENABLED_DOCKER_SERVICES }}"
TEST_FILTER: "${{ inputs.TEST_FILTER }}"
MM_ENV: "${{ inputs.MM_ENV }}"
BRANCH: "${{ needs.generate-build-variables.outputs.BRANCH }}"
BUILD_ID: "${{ needs.generate-build-variables.outputs.BUILD_ID }}"
CI_BASE_URL: "http://localhost:8065/?worker_index=${{ matrix.worker_index }}"
CYPRESS_pushNotificationServer: "${{ secrets.PUSH_NOTIFICATION_SERVER }}"
steps:
- name: ci/checkout-repo
uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1
with:
ref: ${{ inputs.ref }}
fetch-depth: 0
- name: ci/setup-macos-docker
if: runner.os == 'macos'
# https://github.com/actions/runner-images/issues/17#issuecomment-1537238473
run: |
brew install docker docker-compose
colima start
mkdir -p ~/.docker/cli-plugins
ln -sfn /usr/local/opt/docker-compose/bin/docker-compose ~/.docker/cli-plugins/docker-compose
sudo ln -sf $HOME/.colima/default/docker.sock /var/run/docker.sock
- name: ci/setup-node
uses: actions/setup-node@60edb5dd545a775178f52524783378180af0d1f8 # v4.0.2
id: setup_node
with:
node-version-file: ".nvmrc"
cache: npm
cache-dependency-path: "e2e-tests/cypress/package-lock.json"
- name: ci/e2e-test
run: |
make
- name: ci/e2e-test-store-results
uses: actions/upload-artifact@5d5d22a31266ced268874388b861e4b58bb5c2f3 # v4.3.1
with:
name: e2e-test-results-${{ matrix.os }}-${{ matrix.worker_index }}
path: |
e2e-tests/cypress/logs/
e2e-tests/cypress/results/
- name: ci/e2e-test-assert-results
if: ${{ inputs.testcase_failure_fatal }}
run: |
# Assert that the run contained 0 failures
CYPRESS_FAILURES=$(find cypress/results -name '*.json' | xargs -l jq -r '.stats.failures' | jq -s add)
echo "Cypress run completed with $CYPRESS_FAILURES failures"
[ "$CYPRESS_FAILURES" = "0" ]
publish-report:
runs-on: ubuntu-22.04
needs:
- generate-build-variables
- test
defaults:
run:
shell: bash
working-directory: e2e-tests
steps:
- name: ci/checkout-repo
if: "${{ inputs.enable_reporting }}"
uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1
with:
ref: ${{ inputs.ref }}
fetch-depth: 0
- name: ci/setup-node
if: "${{ inputs.enable_reporting }}"
uses: actions/setup-node@60edb5dd545a775178f52524783378180af0d1f8 # v4.0.2
id: setup_node
with:
node-version-file: ".nvmrc"
cache: npm
cache-dependency-path: "e2e-tests/cypress/package-lock.json"
- name: ci/download-artifacts
if: "${{ inputs.enable_reporting }}"
uses: actions/download-artifact@eaceaf801fd36c7dee90939fad912460b18a1ffe # v4.1.2
with:
path: e2e-tests/cypress/
merge-multiple: true
- name: ci/publish-report
if: "${{ inputs.enable_reporting }}"
env:
TYPE: "${{ inputs.REPORT_TYPE }}"
WEBHOOK_URL: "${{ secrets.REPORT_WEBHOOK_URL }}"
BRANCH: "${{ needs.generate-build-variables.outputs.BRANCH }}"
BUILD_ID: "${{ needs.generate-build-variables.outputs.BUILD_ID }}"
MM_ENV: "${{ inputs.MM_ENV }}"
TM4J_API_KEY: "${{ secrets.REPORT_TM4J_API_KEY }}"
TEST_CYCLE_LINK_PREFIX: "${{ secrets.REPORT_TM4J_TEST_CYCLE_LINK_PREFIX }}"
run: |
make publish-report
update-failure-final-status:
runs-on: ubuntu-22.04
if: failure() || cancelled()
needs:
- resolve-ref
- publish-report
steps:
- uses: mattermost/actions/delivery/update-commit-status@main
env:
GITHUB_TOKEN: ${{ github.token }}
with:
repository_full_name: ${{ github.repository }}
commit_sha: ${{ needs.resolve-ref.outputs.commit_sha }}
context: ${{ inputs.status_check_context }}
description: E2E tests for mattermost server app
status: failure
update-success-final-status:
runs-on: ubuntu-22.04
if: success()
needs:
- resolve-ref
- publish-report
steps:
- uses: mattermost/actions/delivery/update-commit-status@main
env:
GITHUB_TOKEN: ${{ github.token }}
with:
repository_full_name: ${{ github.repository }}
commit_sha: ${{ needs.resolve-ref.outputs.commit_sha }}
context: ${{ inputs.status_check_context }}
description: E2E tests for mattermost server app
status: success

View File

@ -1,4 +1,5 @@
name: E2E Tests
---
name: E2E Smoketests
on:
# For PRs, this workflow gets triggered from the Argo Events platform.
# Check the following repo for details: https://github.com/mattermost/delivery-platform
@ -9,155 +10,8 @@ on:
required: false
jobs:
update-initial-status:
runs-on: ubuntu-22.04
steps:
- uses: mattermost/actions/delivery/update-commit-status@main
env:
GITHUB_TOKEN: ${{ github.token }}
with:
repository_full_name: ${{ github.repository }}
commit_sha: ${{ inputs.commit_sha || github.sha }}
context: E2E Tests/smoketests
description: E2E tests for mattermost server app
status: pending
cypress-check:
runs-on: ubuntu-22.04
needs:
- update-initial-status
defaults:
run:
working-directory: e2e-tests/cypress
steps:
- name: ci/checkout-repo
uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1
with:
ref: ${{ inputs.commit_sha || github.sha }}
- name: ci/setup-node
uses: actions/setup-node@60edb5dd545a775178f52524783378180af0d1f8 # v4.0.2
id: setup_node
with:
node-version-file: ".nvmrc"
cache: npm
cache-dependency-path: "e2e-tests/cypress/package-lock.json"
- name: ci/cypress/npm-install
run: |
npm ci
- name: ci/cypress/npm-check
run: |
npm run check
playwright-check:
runs-on: ubuntu-22.04
needs:
- update-initial-status
defaults:
run:
working-directory: e2e-tests/playwright
steps:
- name: ci/checkout-repo
uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1
with:
ref: ${{ inputs.commit_sha || github.sha }}
- name: ci/setup-node
uses: actions/setup-node@60edb5dd545a775178f52524783378180af0d1f8 # v4.0.2
id: setup_node
with:
node-version-file: ".nvmrc"
cache: npm
cache-dependency-path: "e2e-tests/playwright/package-lock.json"
- name: ci/get-webapp-node-modules
working-directory: webapp
# requires build of client and types
run: |
make node_modules
- name: ci/playwright/npm-install
run: |
npm ci
- name: ci/playwright/npm-check
run: |
npm run check
smoketests:
strategy:
matrix:
#
# Note that smoketests should be run only on ubuntu, for QA purposes.
# But it's useful to be able to run and debug the smoketests for different OSes.
# Notes:
# - For MacOS: works on developer machines, but uses too many resources to be able to run on Github Actions
# - for Windows: cannot currently run on Github Actions, since the runners do not support running linux containers, at the moment
#
#os: [ubuntu-latest-8-cores, windows-2022, macos-12-xl]
os: [ubuntu-latest-8-cores]
runs-on: "${{ matrix.os }}"
needs:
- cypress-check
- playwright-check
defaults:
run:
shell: bash
working-directory: e2e-tests
steps:
- name: ci/checkout-repo
uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1
with:
ref: ${{ inputs.commit_sha || github.sha }}
- name: ci/setup-macos-docker
if: runner.os == 'macos'
# https://github.com/actions/runner-images/issues/17#issuecomment-1537238473
run: |
brew install docker docker-compose
colima start
mkdir -p ~/.docker/cli-plugins
ln -sfn /usr/local/opt/docker-compose/bin/docker-compose ~/.docker/cli-plugins/docker-compose
sudo ln -sf $HOME/.colima/default/docker.sock /var/run/docker.sock
- name: ci/e2e-smoketests
run: |
make
- name: ci/e2e-smoketests-store-results
uses: actions/upload-artifact@26f96dfa697d77e81fd5907df203aa23a56210a8 # v4.3.0
with:
name: e2e-smoketests-results-${{ matrix.os }}
path: |
e2e-tests/cypress/logs/
e2e-tests/cypress/results/
- name: ci/e2e-smoketests-assert-results
run: |
# Assert that the run contained 0 failures
CYPRESS_FAILURES=$(find cypress/results -name '*.json' | xargs -l jq -r '.stats.failures' | jq -s add)
echo "Cypress run completed with $CYPRESS_FAILURES failures"
[ "$CYPRESS_FAILURES" = "0" ]
update-failure-final-status:
runs-on: ubuntu-22.04
if: failure() || cancelled()
needs:
- smoketests
steps:
- uses: mattermost/actions/delivery/update-commit-status@main
env:
GITHUB_TOKEN: ${{ github.token }}
with:
repository_full_name: ${{ github.repository }}
commit_sha: ${{ inputs.commit_sha || github.sha }}
context: E2E Tests/smoketests
description: E2E tests for mattermost server app
status: failure
update-success-final-status:
runs-on: ubuntu-22.04
if: success()
needs:
- smoketests
steps:
- uses: mattermost/actions/delivery/update-commit-status@main
env:
GITHUB_TOKEN: ${{ github.token }}
with:
repository_full_name: ${{ github.repository }}
commit_sha: ${{ inputs.commit_sha || github.sha }}
context: E2E Tests/smoketests
description: E2E tests for mattermost server app
status: success
e2e-smoketest:
uses: ./.github/workflows/e2e-tests-ci-template.yml
with:
ref: "${{ inputs.commit_sha || github.sha }}"
status_check_context: "E2E Tests/smoketests"

View File

@ -1,37 +1,5 @@
# shellcheck disable=SC2148,SC2155
# Utility variables
# NB: these assume you `source` them from the directory this file is in
# Set up base docker compose file and export
export MME2E_DC_SERVER="docker compose -p mmserver -f ./server.yml"
export MME2E_DC_DASHBOARD="docker compose -p mmdashboard -f ./dashboard/docker/docker-compose.yml -f ./dashboard.override.yml"
export MME2E_UID=$(id -u)
export MME2E_OSTYPE=$(docker version -f '{{ .Client.Os }}')
export MME2E_ARCHTYPE=$(docker version -f '{{ .Client.Arch }}')
export NODE_VERSION_REQUIRED=$(cat ../../.nvmrc)
# Default values for optional variables
export SERVER_IMAGE_DEFAULT="mattermostdevelopment/mattermost-enterprise-edition:$(git rev-parse --short=7 HEAD)"
export BROWSER_DEFAULT="chrome"
export SERVER_DEFAULT="self-hosted"
export TEST_DEFAULT="cypress"
export ENABLED_DOCKER_SERVICES_DEFAULT="postgres inbucket"
export TEST_FILTER_DEFAULT='--stage=@prod --group=@smoke'
# OS specific defaults overrides
case $MME2E_OSTYPE in
darwin )
BROWSER_DEFAULT="electron" ;;
* )
esac
# Populate the optional variables that are used in the docker-compose file
export SERVER_IMAGE=${SERVER_IMAGE:-$SERVER_IMAGE_DEFAULT}
export BROWSER=${BROWSER:-$BROWSER_DEFAULT}
export SERVER=${SERVER:-$SERVER_DEFAULT}
export TEST=${TEST:-$TEST_DEFAULT}
export ENABLED_DOCKER_SERVICES=${ENABLED_DOCKER_SERVICES:-$ENABLED_DOCKER_SERVICES_DEFAULT}
export TEST_FILTER=${TEST_FILTER:-$TEST_FILTER_DEFAULT}
# Function definitions
mme2e_log () { echo "[$(date +%Y-%m-%dT%H:%M:%S%Z)]" "$@"; }
mme2e_get_current_shopt_arg () {
@ -98,10 +66,48 @@ mme2e_is_token_in_list() {
grep -qE "(^| )$TOKEN( |$)" <<<"$LIST"
}
# Utility alias, for interactive shell usage. Can be reversed with 'unalias docker-compose' in your shell
# NB: this won't work in the script
# Call prerequisite utility functions
mme2e_load_env_file
# Utility variables
# NB: these assume you `source` them from the directory this file is in
# Set up base docker compose file and export
export MME2E_DC_SERVER="docker compose -p mmserver -f ./server.yml"
export MME2E_DC_DASHBOARD="docker compose -p mmdashboard -f ./dashboard/docker/docker-compose.yml -f ./dashboard.override.yml"
export MME2E_UID=$(id -u)
export MME2E_OSTYPE=$(docker version -f '{{ .Client.Os }}')
export MME2E_ARCHTYPE=$(docker version -f '{{ .Client.Arch }}')
export NODE_VERSION_REQUIRED=$(cat ../../.nvmrc)
# Utility alias, for interactive shell usage. Can be reversed with 'unalias docker-compose-mmserver' in your shell
# NB: this only works in interactive shells
alias docker-compose-mmserver='${MME2E_DC_SERVER}'
alias docker-compose-mmdashboard='${MME2E_DC_DASHBOARD}'
# Call prerequisite utility functions
mme2e_load_env_file
# Default values for optional variables
export SERVER_IMAGE_DEFAULT="mattermostdevelopment/mattermost-enterprise-edition:$(git rev-parse --short=7 HEAD)"
export BROWSER_DEFAULT="chrome"
export SERVER_DEFAULT="self-hosted"
export TEST_DEFAULT="cypress"
export ENABLED_DOCKER_SERVICES_DEFAULT="postgres inbucket"
export TEST_FILTER_DEFAULT='--stage=@prod --group=@smoke'
export BRANCH_DEFAULT=$(git branch --show-current || echo -n "unknown")
export BUILD_ID_DEFAULT=$(date +%s)
# OS specific defaults overrides
case $MME2E_OSTYPE in
darwin )
BROWSER_DEFAULT="electron" ;;
* )
esac
# Populate the optional variables that are used in the docker-compose file
export SERVER_IMAGE=${SERVER_IMAGE:-$SERVER_IMAGE_DEFAULT}
export BROWSER=${BROWSER:-$BROWSER_DEFAULT}
export SERVER=${SERVER:-$SERVER_DEFAULT}
export TEST=${TEST:-$TEST_DEFAULT}
export ENABLED_DOCKER_SERVICES=${ENABLED_DOCKER_SERVICES:-$ENABLED_DOCKER_SERVICES_DEFAULT}
export TEST_FILTER=${TEST_FILTER:-$TEST_FILTER_DEFAULT}
export BRANCH_UNSANITIZED=${BRANCH:-$BRANCH_DEFAULT}
export BRANCH=${BRANCH_UNSANITIZED::50} # The automation dashboard only accepts branch names up to 50 characters
export BUILD_ID_UNSANITIZED=${BUILD_ID:-$BUILD_ID_DEFAULT}
export BUILD_ID=${BUILD_ID_UNSANITIZED::64} # The automation dashboard only accepts build IDs up to 64 characters

View File

@ -11,7 +11,13 @@ fi
set -a
. .env.cypress
if [ -z "${AUTOMATION_DASHBOARD_URL:-}" ]; then
mme2e_log "AUTOMATION_DASHBOARD_URL is unset. Skipping test cycle generation."
exit 0
fi
mme2e_log "Generating the test cycle on the Automation Dashboard"
cd ../cypress
npm i
# shellcheck disable=SC2086
exec node --trace-warnings generate_test_cycle.js $TEST_FILTER

65
e2e-tests/.ci/report.publish.sh Executable file
View File

@ -0,0 +1,65 @@
#!/bin/bash
# SC2034: <variable> appears unused.
# https://www.shellcheck.net/wiki/SC2034
# shellcheck disable=SC2034
set -e -u -o pipefail
cd "$(dirname "$0")"
. .e2erc
# Default required variables, assert that they are set, and document optional variables
: ${FULL_REPORT:=false} # Valid values: true, false
: ${TYPE:=NONE} # Valid values: PR, RELEASE, MASTER, MASTER_UNSTABLE, CLOUD, CLOUD_UNSTABLE, NONE (which is the same as omitting it)
: ${WEBHOOK_URL:-} # Optional. Mattermost webhook to post the report back to
: ${RELEASE_DATE:-} # Optional. If set, its value will be included in the report as the release date of the tested artifact
# Env vars used during the test. Their values will be included in the report
: ${BRANCH:?}
: ${BUILD_ID:?}
: ${MM_ENV:-}
# Populate intermediate variables
export BUILD_TAG="${SERVER_IMAGE##*/}"
export MM_DOCKER_IMAGE="${BUILD_TAG%%:*}" # NB: the 'mattermostdevelopment/' prefix is assumed
export MM_DOCKER_TAG="${BUILD_TAG##*:}"
export SERVER_TYPE="${SERVER}"
# NB: assume that BRANCH follows the convention 'server-pr-${PR_NUMBER}'. If multiple PRs match, the last one is used to generate the link
# Only needed if TYPE=PR
export PULL_REQUEST="https://github.com/mattermost/mattermost/pull/${BRANCH##*-}"
if [ -n "${TM4J_API_KEY:-}" ]; then
export TM4J_ENABLE=true
export JIRA_PROJECT_KEY=MM
export TM4J_ENVIRONMENT_NAME="${TEST}/${BROWSER}/${SERVER}"
case "${SERVER}" in
cloud)
export TM4J_FOLDER_ID="2014474" ;;
*)
export TM4J_FOLDER_ID="2014475" ;;
esac
: ${TEST_CYCLE_LINK_PREFIX:?}
: ${TM4J_CYCLE_KEY:-}
: ${TM4J_CYCLE_NAME:-}
mme2e_log "TMJ4 integration enabled."
fi
if [ -n "${DIAGNOSTIC_WEBHOOK_URL:-}" ]; then
: ${DIAGNOSTIC_USER_ID:?}
: ${DIAGNOSTIC_TEAM_ID:?}
mme2e_log "Diagnostic report upload enabled."
fi
if [ -n "${AWS_S3_BUCKET:-}" ]; then
: ${AWS_ACCESS_KEY_ID:?}
: ${AWS_SECRET_ACCESS_KEY:?}
mme2e_log "S3 report upload enabled."
fi
cd ../cypress/
if [ ! -d "results/" ]; then
mme2e_log "Error: 'results/' directory does not exist. Aborting report generation." >&2
exit 1
fi
npm i
node save_report.js

View File

@ -61,7 +61,7 @@ services:
MM_TEAMSETTINGS_ENABLEOPENSERVER: "true"
MM_SQLSETTINGS_DATASOURCE: "postgres://mmuser:mostest@localhost:5432/mattermost_test?sslmode=disable&connect_timeout=10&binary_parameters=yes"
MM_SQLSETTINGS_DRIVERNAME: "postgres"
MM_EMAILSETTINGS_SMTPSERVER: "inbucket"
MM_EMAILSETTINGS_SMTPSERVER: "localhost"
MM_CLUSTERSETTINGS_READONLYCONFIG: "false"
MM_SERVICESETTINGS_ENABLEONBOARDINGFLOW: "false"
MM_FEATUREFLAGS_ONBOARDINGTOURTIPS: "false"
@ -259,9 +259,7 @@ EOL
generate_env_files() {
# Generate .env.server
mme2e_log "Generating .env.server"
mme2e_generate_envfile_from_var_names >.env.server <<-EOF
MM_LICENSE
EOF
truncate --size=0 .env.server
# Setting SERVER-specific variables
case "$SERVER" in
@ -282,16 +280,15 @@ generate_env_files() {
done
# Generating TEST-specific env files
BRANCH_DEFAULT=$(git branch --show-current)
BUILD_ID_DEFAULT=$(date +%s)
export BRANCH=${BRANCH:-$BRANCH_DEFAULT}
export BUILD_ID=${BUILD_ID:-$BUILD_ID_DEFAULT}
export CI_BASE_URL="${CI_BASE_URL:-localhost}"
# Some are defaulted in .e2erc due to being needed to other scripts as well
export CI_BASE_URL="${CI_BASE_URL:-http://localhost:8065}"
export REPO=mattermost # Static, but declared here for making generate_test_cycle.js easier to run
export HEADLESS=true # Static, but declared here for making generate_test_cycle.js easier to run
case "$TEST" in
cypress)
mme2e_log "Cypress: Generating .env.cypress"
truncate --size=0 .env.cypress
mme2e_generate_envfile_from_var_names >.env.cypress <<-EOF
BRANCH
BUILD_ID
@ -299,6 +296,7 @@ generate_env_files() {
BROWSER
HEADLESS
REPO
CYPRESS_pushNotificationServer
EOF
# Adding service-specific cypress variables
for SERVICE in $ENABLED_DOCKER_SERVICES; do
@ -327,8 +325,14 @@ generate_env_files() {
echo "CYPRESS_serverEdition=E20" >>.env.cypress
;;
esac
# If the dashboard is running, load .env.dashboard into .env.cypress
if DC_COMMAND="$MME2E_DC_DASHBOARD" mme2e_wait_service_healthy dashboard 1; then
# Add Automation Dashboard related variables to cypress container
if [ -n "${AUTOMATION_DASHBOARD_URL:-}" ]; then
mme2e_log "Automation dashboard URL is set: loading related variables into the Cypress container"
mme2e_generate_envfile_from_var_names >>.env.cypress <<-EOF
AUTOMATION_DASHBOARD_URL
AUTOMATION_DASHBOARD_TOKEN
EOF
elif DC_COMMAND="$MME2E_DC_DASHBOARD" mme2e_wait_service_healthy dashboard 1; then
mme2e_log "Detected a running automation dashboard: loading its access variables into the Cypress container"
cat >>.env.cypress <.env.dashboard
fi

View File

@ -3,6 +3,13 @@ set -e -u -o pipefail
cd "$(dirname "$0")"
. .e2erc
if [ -n "${MM_LICENSE:-}" ]; then
# We prefer uploading the license here, instead of setting the env var for the server
# This is to retain the flexibility of being able to remove it programmatically, if the tests require it
mme2e_log "Uploading license to server"
${MME2E_DC_SERVER} exec -T -- server mmctl --local license upload-string "$MM_LICENSE"
fi
if [ "$TEST" = "cypress" ]; then
mme2e_log "Prepare Cypress: install dependencies"
${MME2E_DC_SERVER} exec -T -u 0 -- cypress bash -c "id $MME2E_UID || useradd -u $MME2E_UID -m nodeci" # Works around the node image's assumption that the app files are owned by user 1000

View File

@ -39,6 +39,10 @@ cloud-init: requirecmd-jq requirecmd-curl
cloud-teardown:
bash ./.ci/server.cloud_teardown.sh
.PHONY: publish-report
publish-report: requirecmd-node
bash ./.ci/report.publish.sh
.PHONY: fmt-node fmt-shell fmt
requirecmd-%:
@which "$(*)" >/dev/null || { echo "Error, missing required CLI tool: $(*). Aborting." >&2; exit 1; }

View File

@ -38,6 +38,7 @@ Instructions, detailed:
* When running with `SERVER=cloud`, this will automatically create a cloud customer against the specified `CWS_URL` service, and delete that user after the run is complete.
* If you want to run the Playwright tests instead of the Cypress ones, you can run `TEST=playwright make`
* If you just want to run a local server instance, without any further testing, you can run `TEST=none make`
* If you're using the automation dashboard, you have the option of sharding the E2E test run: you can launch the `make` command in parallel on different machiness (NB: you must use the same `BUILD_ID` and `BRANCH` values that you used for `make generate-test-cycle`) to distribute running the test cases across them. When doing this, you should also set on each machine the `CI_BASE_URL` variable to a value that uniquely identifies the instance where `make` is running.
4. `make stop`: tears down the server (and the dashboard, if running)
* `make clean` will also remove any generated environment or docker-compose files, in addition to stopping the containers.
@ -50,6 +51,7 @@ Notes:
* If their value is fixed (e.g. a static server configuration), these may be simply added to the `docker_compose_generator.sh` file, to the appropriate container.
* If you need to introduce variables that you want to control from `.ci/env`: you need to update the scripts under the `.ci/` dir, and configure them to write the new variables' values over to the appropriate `.env.*` file. In particular, avoid defining variables that depend on other variables within the docker-compose override files: this is to ensure uniformity in their availability, and simplifies the question of what container has access to which variable considerably.
* Exceptions are of course accepted wherever it makes sense (e.g. if you need to group variables based on some common functionality)
- The `publish-report` Make target is meant for internal usage. Usage and variables are documented in the respective scripts.
##### For code changes:
* `make fmt-ci` to format and check yaml files and shell scripts.

View File

@ -33,8 +33,8 @@ describe('If plugins fail to start, they can be disabled', () => {
it('MM-T2391 If plugins fail to start, they can be disabled', () => {
const mimeType = 'application/gzip';
cy.fixture(gitlabPlugin.filename, 'binary').
then(Cypress.Blob.binaryStringToBlob).
cy.fixture(gitlabPlugin.filename, null).
then(Cypress.Blob.arrayBufferToBlob).
then((fileContent) => {
cy.get('input[type=file]').attachFile({fileContent, fileName: gitlabPlugin.filename, mimeType});
});

View File

@ -246,7 +246,7 @@ function generateTitle() {
let dockerImageLink = '';
if (MM_DOCKER_IMAGE && MM_DOCKER_TAG) {
dockerImageLink = ` with [${MM_DOCKER_IMAGE}:${MM_DOCKER_TAG}](https://hub.docker.com/r/mattermost/${MM_DOCKER_IMAGE}/tags?name=${MM_DOCKER_TAG})`;
dockerImageLink = ` with [${MM_DOCKER_IMAGE}:${MM_DOCKER_TAG}](https://hub.docker.com/r/mattermostdevelopment/${MM_DOCKER_IMAGE}/tags?name=${MM_DOCKER_TAG})`;
}
let releaseDate = '';

View File

@ -8,7 +8,7 @@ SHELL ["/bin/bash", "-o", "pipefail", "-c"]
ENV PATH="/mattermost/bin:${PATH}"
ARG PUID=2000
ARG PGID=2000
ARG MM_PACKAGE="https://releases.mattermost.com/9.5.1/mattermost-9.5.1-linux-amd64.tar.gz?src=docker"
ARG MM_PACKAGE="https://releases.mattermost.com/9.5.2/mattermost-9.5.2-linux-amd64.tar.gz?src=docker"
# # Install needed packages and indirect dependencies
RUN apt-get update \

View File

@ -2894,6 +2894,31 @@ func TestUpdateChannelRoles(t *testing.T) {
func TestUpdateChannelMemberSchemeRoles(t *testing.T) {
th := Setup(t).InitBasic()
defer th.TearDown()
enableGuestAccounts := *th.App.Config().GuestAccountsSettings.Enable
defer func() {
th.App.UpdateConfig(func(cfg *model.Config) { *cfg.GuestAccountsSettings.Enable = enableGuestAccounts })
th.App.Srv().RemoveLicense()
}()
th.App.UpdateConfig(func(cfg *model.Config) { *cfg.GuestAccountsSettings.Enable = true })
th.App.Srv().SetLicense(model.NewTestLicense())
id := model.NewId()
guest := &model.User{
Email: th.GenerateTestEmail(),
Nickname: "nn_" + id,
FirstName: "f_" + id,
LastName: "l_" + id,
Password: "Pa$$word11",
EmailVerified: true,
}
guest, appError := th.App.CreateGuest(th.Context, guest)
require.Nil(t, appError)
_, _, appError = th.App.AddUserToTeam(th.Context, th.BasicTeam.Id, guest.Id, "")
th.AddUserToChannel(guest, th.BasicChannel)
require.Nil(t, appError)
SystemAdminClient := th.SystemAdminClient
WebSocketClient, err := th.CreateWebSocketClient()
WebSocketClient.Listen()
@ -2944,6 +2969,11 @@ func TestUpdateChannelMemberSchemeRoles(t *testing.T) {
assert.Equal(t, true, tm2.SchemeUser)
assert.Equal(t, false, tm2.SchemeAdmin)
//cannot set Guest to User for single channel
resp, err := SystemAdminClient.UpdateChannelMemberSchemeRoles(context.Background(), th.BasicChannel.Id, guest.Id, s2)
require.Error(t, err)
CheckBadRequestStatus(t, resp)
s3 := &model.SchemeRoles{
SchemeAdmin: true,
SchemeUser: false,
@ -2977,21 +3007,17 @@ func TestUpdateChannelMemberSchemeRoles(t *testing.T) {
SchemeUser: false,
SchemeGuest: true,
}
_, err = SystemAdminClient.UpdateChannelMemberSchemeRoles(context.Background(), th.BasicChannel.Id, th.BasicUser.Id, s5)
require.NoError(t, err)
tm5, _, err := SystemAdminClient.GetChannelMember(context.Background(), th.BasicChannel.Id, th.BasicUser.Id, "")
require.NoError(t, err)
assert.Equal(t, true, tm5.SchemeGuest)
assert.Equal(t, false, tm5.SchemeUser)
assert.Equal(t, false, tm5.SchemeAdmin)
// cannot set user to guest for a single channel
resp, err = SystemAdminClient.UpdateChannelMemberSchemeRoles(context.Background(), th.BasicChannel.Id, th.BasicUser.Id, s5)
require.Error(t, err)
CheckBadRequestStatus(t, resp)
s6 := &model.SchemeRoles{
SchemeAdmin: false,
SchemeUser: true,
SchemeGuest: true,
}
resp, err := SystemAdminClient.UpdateChannelMemberSchemeRoles(context.Background(), th.BasicChannel.Id, th.BasicUser.Id, s6)
resp, err = SystemAdminClient.UpdateChannelMemberSchemeRoles(context.Background(), th.BasicChannel.Id, th.BasicUser.Id, s6)
require.Error(t, err)
CheckBadRequestStatus(t, resp)

View File

@ -131,7 +131,7 @@ func TestCreatePost(t *testing.T) {
require.NoError(t, err)
// Message with no channel mentions should result in no ephemeral message
timeout := time.After(2 * time.Second)
timeout := time.After(5 * time.Second)
waiting := true
for waiting {
select {
@ -157,7 +157,7 @@ func TestCreatePost(t *testing.T) {
_, _, err = client.CreatePost(context.Background(), post)
require.NoError(t, err)
timeout = time.After(2 * time.Second)
timeout = time.After(5 * time.Second)
eventsToGo := 3 // 3 Posts created with @ mentions should result in 3 websocket events
for eventsToGo > 0 {
select {
@ -1081,7 +1081,7 @@ func TestCreatePostSendOutOfChannelMentions(t *testing.T) {
require.NoError(t, err)
CheckCreatedStatus(t, resp)
timeout := time.After(2 * time.Second)
timeout := time.After(5 * time.Second)
waiting := true
for waiting {
select {
@ -1100,7 +1100,7 @@ func TestCreatePostSendOutOfChannelMentions(t *testing.T) {
require.NoError(t, err)
CheckCreatedStatus(t, resp)
timeout = time.After(2 * time.Second)
timeout = time.After(5 * time.Second)
waiting = true
for waiting {
select {
@ -2796,7 +2796,7 @@ func TestDeletePostEvent(t *testing.T) {
require.NoError(t, err)
received = true
}
case <-time.After(2 * time.Second):
case <-time.After(5 * time.Second):
exit = true
}
if exit {
@ -3594,7 +3594,7 @@ func TestSetPostUnreadWithoutCollapsedThreads(t *testing.T) {
caught = true
data = ev.GetData()
}
case <-time.After(1 * time.Second):
case <-time.After(5 * time.Second):
exit = true
}
if exit {
@ -3866,7 +3866,7 @@ func TestCreatePostNotificationsWithCRT(t *testing.T) {
require.EqualValues(t, "[\""+th.BasicUser.Id+"\"]", users)
}
}
case <-time.After(1 * time.Second):
case <-time.After(5 * time.Second):
return
}
}
@ -3980,7 +3980,7 @@ func TestPostReminder(t *testing.T) {
require.Equal(t, th.BasicTeam.Name, parsedPost.GetProp("team_name").(string))
return
}
case <-time.After(1 * time.Second):
case <-time.After(5 * time.Second):
return
}
}

View File

@ -715,7 +715,7 @@ type AppIface interface {
GetLogs(rctx request.CTX, page, perPage int) ([]string, *model.AppError)
GetLogsSkipSend(rctx request.CTX, page, perPage int, logFilter *model.LogFilter) ([]string, *model.AppError)
GetMemberCountsByGroup(rctx request.CTX, channelID string, includeTimezones bool) ([]*model.ChannelMemberCountByGroup, *model.AppError)
GetMessageForNotification(post *model.Post, translateFunc i18n.TranslateFunc) string
GetMessageForNotification(post *model.Post, teamName, siteUrl string, translateFunc i18n.TranslateFunc) string
GetMultipleEmojiByName(c request.CTX, names []string) ([]*model.Emoji, *model.AppError)
GetNewUsersForTeamPage(teamID string, page, perPage int, asAdmin bool, viewRestrictions *model.ViewUsersRestrictions) ([]*model.User, *model.AppError)
GetNextPostIdFromPostList(postList *model.PostList, collapsedThreads bool) string

View File

@ -1207,14 +1207,18 @@ func (a *App) UpdateChannelMemberSchemeRoles(c request.CTX, channelID string, us
return nil, err
}
if member.SchemeGuest {
return nil, model.NewAppError("UpdateChannelMemberSchemeRoles", "api.channel.update_channel_member_roles.guest.app_error", nil, "", http.StatusBadRequest)
}
if isSchemeGuest {
return nil, model.NewAppError("UpdateChannelMemberSchemeRoles", "api.channel.update_channel_member_roles.user_and_guest.app_error", nil, "", http.StatusBadRequest)
}
member.SchemeAdmin = isSchemeAdmin
member.SchemeUser = isSchemeUser
member.SchemeGuest = isSchemeGuest
if member.SchemeUser && member.SchemeGuest {
return nil, model.NewAppError("UpdateChannelMemberSchemeRoles", "api.channel.update_channel_member_roles.guest_and_user.app_error", nil, "", http.StatusBadRequest)
}
// If the migration is not completed, we also need to check the default channel_admin/channel_user roles are not present in the roles field.
if err = a.IsPhase2MigrationCompleted(); err != nil {
member.ExplicitRoles = RemoveRoles([]string{model.ChannelGuestRoleId, model.ChannelUserRoleId, model.ChannelAdminRoleId}, member.ExplicitRoles)

View File

@ -332,12 +332,13 @@ func (es *Service) sendBatchedEmailNotification(userID string, notifications []*
channelDisplayName = truncateUserNames(channel.DisplayName, 11)
}
postMessage := es.GetMessageForNotification(notification.post, notification.teamName, siteURL, translateFunc)
postsData = append(postsData, &postData{
SenderPhoto: senderPhoto,
SenderName: truncateUserNames(sender.GetDisplayName(displayNameFormat), 22),
Time: t,
ChannelName: channelDisplayName,
Message: template.HTML(es.GetMessageForNotification(notification.post, translateFunc)),
Message: template.HTML(postMessage),
MessageURL: MessageURL,
ShowChannelIcon: showChannelIcon,
OtherChannelMembersCount: otherChannelMembersCount,

View File

@ -13,6 +13,8 @@ import (
model "github.com/mattermost/mattermost/server/public/model"
store "github.com/mattermost/mattermost/server/v8/channels/store"
templates "github.com/mattermost/mattermost/server/v8/platform/shared/templates"
throttled "github.com/throttled/throttled"
@ -65,13 +67,37 @@ func (_m *ServiceInterface) CreateVerifyEmailToken(userID string, newEmail strin
return r0, r1
}
// GetMessageForNotification provides a mock function with given fields: post, translateFunc
func (_m *ServiceInterface) GetMessageForNotification(post *model.Post, translateFunc i18n.TranslateFunc) string {
ret := _m.Called(post, translateFunc)
// GenerateHyperlinkForChannels provides a mock function with given fields: postMessage, teamName, teamURL
func (_m *ServiceInterface) GenerateHyperlinkForChannels(postMessage string, teamName string, teamURL string) (string, error) {
ret := _m.Called(postMessage, teamName, teamURL)
var r0 string
if rf, ok := ret.Get(0).(func(*model.Post, i18n.TranslateFunc) string); ok {
r0 = rf(post, translateFunc)
var r1 error
if rf, ok := ret.Get(0).(func(string, string, string) (string, error)); ok {
return rf(postMessage, teamName, teamURL)
}
if rf, ok := ret.Get(0).(func(string, string, string) string); ok {
r0 = rf(postMessage, teamName, teamURL)
} else {
r0 = ret.Get(0).(string)
}
if rf, ok := ret.Get(1).(func(string, string, string) error); ok {
r1 = rf(postMessage, teamName, teamURL)
} else {
r1 = ret.Error(1)
}
return r0, r1
}
// GetMessageForNotification provides a mock function with given fields: post, teamName, siteUrl, translateFunc
func (_m *ServiceInterface) GetMessageForNotification(post *model.Post, teamName string, siteUrl string, translateFunc i18n.TranslateFunc) string {
ret := _m.Called(post, teamName, siteUrl, translateFunc)
var r0 string
if rf, ok := ret.Get(0).(func(*model.Post, string, string, i18n.TranslateFunc) string); ok {
r0 = rf(post, teamName, siteUrl, translateFunc)
} else {
r0 = ret.Get(0).(string)
}
@ -608,6 +634,11 @@ func (_m *ServiceInterface) SendWelcomeEmail(userID string, _a1 string, verified
return r0
}
// SetStore provides a mock function with given fields: st
func (_m *ServiceInterface) SetStore(st store.Store) {
_m.Called(st)
}
// Stop provides a mock function with given fields:
func (_m *ServiceInterface) Stop() {
_m.Called()

View File

@ -4,6 +4,7 @@
package email
import (
"fmt"
"html"
"html/template"
"net/url"
@ -28,9 +29,9 @@ type EmailMessageAttachment struct {
FieldRows []FieldRow
}
func (es *Service) GetMessageForNotification(post *model.Post, translateFunc i18n.TranslateFunc) string {
func (es *Service) GetMessageForNotification(post *model.Post, teamName, siteUrl string, translateFunc i18n.TranslateFunc) string {
if strings.TrimSpace(post.Message) != "" || len(post.FileIds) == 0 {
return post.Message
return es.prepareNotificationMessageForEmail(post.Message, teamName, siteUrl)
}
// extract the filenames from their paths and determine what type of files are attached
@ -134,3 +135,49 @@ func prepareTextForEmail(text, siteURL string) template.HTML {
return template.HTML(markdownText)
}
func (es *Service) prepareNotificationMessageForEmail(postMessage, teamName, siteURL string) string {
postMessage = html.EscapeString(postMessage)
mdPostMessage, mdErr := utils.MarkdownToHTML(postMessage, siteURL)
if mdErr != nil {
mlog.Warn("Encountered error while converting markdown to HTML", mlog.Err(mdErr))
mdPostMessage = postMessage
}
landingURL := siteURL + "/landing#/" + teamName
normalizedPostMessage, err := es.GenerateHyperlinkForChannels(mdPostMessage, teamName, landingURL)
if err != nil {
mlog.Warn("Encountered error while generating hyperlink for channels", mlog.String("team_name", teamName), mlog.Err(err))
normalizedPostMessage = mdPostMessage
}
return normalizedPostMessage
}
func (es *Service) GenerateHyperlinkForChannels(postMessage, teamName, landingURL string) (string, error) {
channelNames := model.ChannelMentions(postMessage)
if len(channelNames) == 0 {
return postMessage, nil
}
team, err := es.Store().Team().GetByName(teamName)
if err != nil {
mlog.Error("Team not found with the name", mlog.String("team_name", teamName), mlog.Err(err))
return postMessage, nil
}
channels, err := es.store.Channel().GetByNames(team.Id, channelNames, true)
if err != nil {
return "", err
}
visited := make(map[string]bool)
for _, ch := range channels {
if !visited[ch.Id] && ch.Type == model.ChannelTypeOpen {
channelURL := landingURL + "/channels/" + ch.Name
channelHyperLink := fmt.Sprintf("<a href='%s'>%s</a>", channelURL, "~"+ch.Name)
postMessage = strings.Replace(postMessage, "~"+ch.Name, channelHyperLink, -1)
visited[ch.Id] = true
}
}
return postMessage, nil
}

View File

@ -162,14 +162,24 @@ type ServiceInterface interface {
SendNoCardPaymentFailedEmail(email string, locale string, siteURL string) error
SendRemoveExpiredLicenseEmail(ctaText, ctaLink, email, locale, siteURL string) error
AddNotificationEmailToBatch(user *model.User, post *model.Post, team *model.Team) *model.AppError
GetMessageForNotification(post *model.Post, translateFunc i18n.TranslateFunc) string
GetMessageForNotification(post *model.Post, teamName, siteUrl string, translateFunc i18n.TranslateFunc) string
GenerateHyperlinkForChannels(postMessage, teamName, teamURL string) (string, error)
InitEmailBatching()
SendChangeUsernameEmail(newUsername, email, locale, siteURL string) error
CreateVerifyEmailToken(userID string, newEmail string) (*model.Token, error)
SendIPFiltersChangedEmail(email string, userWhoChangedFilter *model.User, siteURL, portalURL, locale string, isWorkspaceOwner bool) error
SetStore(st store.Store)
Stop()
}
func (es *Service) Store() store.Store {
return es.store
}
func (es *Service) SetStore(st store.Store) {
es.store = st
}
func (es *Service) GetPerDayEmailRateLimiter() *throttled.GCRARateLimiter {
return es.perDayEmailRateLimiter
}

View File

@ -1417,7 +1417,7 @@ func (a *App) getChannelsForPosts(teams map[string]*model.Team, data []*imports.
channelName := strings.ToLower(*postData.Channel)
if channel, ok := teamChannels[teamName][channelName]; !ok || channel == nil {
var err error
channel, err = a.Srv().Store().Channel().GetByName(teams[teamName].Id, *postData.Channel, true)
channel, err = a.Srv().Store().Channel().GetByNameIncludeDeleted(teams[teamName].Id, *postData.Channel, true)
if err != nil {
return nil, model.NewAppError("BulkImport", "app.import.import_post.channel_not_found.error", map[string]any{"ChannelName": *postData.Channel}, "", http.StatusBadRequest).Wrap(err)
}

View File

@ -1512,7 +1512,6 @@ func TestImportImportUser(t *testing.T) {
// see https://mattermost.atlassian.net/browse/MM-56986
// Test importing deleted guest with a valid team & valid channel name in apply mode.
// mlog.Debug("TESTING GUEST")
// username = model.NewId()
// deleteAt = model.GetMillis()
// deletedGuestData := &imports.UserImportData{
@ -1537,7 +1536,6 @@ func TestImportImportUser(t *testing.T) {
// user, appErr = th.App.GetUserByUsername(*deletedGuestData.Username)
// require.Nil(t, appErr, "Failed to get user from database.")
// mlog.Debug(user.Roles)
// teamMember, appErr = th.App.GetTeamMember(th.Context, team.Id, user.Id)
// require.Nil(t, appErr, "Failed to get the team member")

View File

@ -9,7 +9,6 @@ import (
"html"
"html/template"
"io"
"strings"
"github.com/pkg/errors"
@ -232,20 +231,8 @@ func (a *App) getNotificationEmailBody(c request.CTX, recipient *model.User, pos
}
if emailNotificationContentsType == model.EmailNotificationContentsFull {
postMessage := a.GetMessageForNotification(post, translateFunc)
postMessage = html.EscapeString(postMessage)
mdPostMessage, mdErr := utils.MarkdownToHTML(postMessage, a.GetSiteURL())
if mdErr != nil {
c.Logger().Warn("Encountered error while converting markdown to HTML", mlog.Err(mdErr))
mdPostMessage = postMessage
}
normalizedPostMessage, err := a.generateHyperlinkForChannels(c, mdPostMessage, teamName, landingURL)
if err != nil {
c.Logger().Warn("Encountered error while generating hyperlink for channels", mlog.String("team_name", teamName), mlog.Err(err))
normalizedPostMessage = mdPostMessage
}
pData.Message = template.HTML(normalizedPostMessage)
postMessage := a.GetMessageForNotification(post, teamName, a.GetSiteURL(), translateFunc)
pData.Message = template.HTML(postMessage)
pData.Time = translateFunc("app.notification.body.dm.time", messageTime)
pData.MessageAttachments = email.ProcessMessageAttachments(post, a.GetSiteURL())
}
@ -309,34 +296,6 @@ func (a *App) getNotificationEmailBody(c request.CTX, recipient *model.User, pos
return a.Srv().TemplatesContainer().RenderToString("messages_notification", data)
}
func (a *App) generateHyperlinkForChannels(c request.CTX, postMessage, teamName, teamURL string) (string, *model.AppError) {
team, err := a.GetTeamByName(teamName)
if err != nil {
return "", err
}
channelNames := model.ChannelMentions(postMessage)
if len(channelNames) == 0 {
return postMessage, nil
}
channels, err := a.GetChannelsByNames(c, channelNames, team.Id)
if err != nil {
return "", err
}
visited := make(map[string]bool)
for _, ch := range channels {
if !visited[ch.Id] && ch.Type == model.ChannelTypeOpen {
channelURL := teamURL + "/channels/" + ch.Name
channelHyperLink := fmt.Sprintf("<a href='%s'>%s</a>", channelURL, "~"+ch.Name)
postMessage = strings.Replace(postMessage, "~"+ch.Name, channelHyperLink, -1)
visited[ch.Id] = true
}
}
return postMessage, nil
}
func (a *App) GetMessageForNotification(post *model.Post, translateFunc i18n.TranslateFunc) string {
return a.Srv().EmailService.GetMessageForNotification(post, translateFunc)
func (a *App) GetMessageForNotification(post *model.Post, teamName, siteUrl string, translateFunc i18n.TranslateFunc) string {
return a.Srv().EmailService.GetMessageForNotification(post, teamName, siteUrl, translateFunc)
}

View File

@ -615,7 +615,7 @@ func TestGetNotificationEmailBodyPublicChannelMention(t *testing.T) {
senderName := "user1"
teamName := "testteam"
teamURL := "http://localhost:8065/testteam"
teamURL := th.App.GetSiteURL() + "/landing#" + "/testteam"
emailNotificationContentsType := model.EmailNotificationContentsFull
translateFunc := i18n.GetUserTranslations("en")
@ -628,6 +628,8 @@ func TestGetNotificationEmailBodyPublicChannelMention(t *testing.T) {
channelStoreMock.On("GetByNames", "test", []string{ch.Name}, true).Return([]*model.Channel{ch}, nil)
storeMock.On("Channel").Return(&channelStoreMock)
th.App.Srv().EmailService.SetStore(storeMock)
body, err := th.App.getNotificationEmailBody(th.Context, recipient, post, ch,
ch.Name, senderName, teamName, teamURL,
emailNotificationContentsType, true, translateFunc, "user-avatar.png")
@ -681,7 +683,7 @@ func TestGetNotificationEmailBodyMultiPublicChannelMention(t *testing.T) {
senderName := "user1"
teamName := "testteam"
teamURL := "http://localhost:8065/testteam"
teamURL := th.App.GetSiteURL() + "/landing#" + "/testteam"
emailNotificationContentsType := model.EmailNotificationContentsFull
translateFunc := i18n.GetUserTranslations("en")
@ -694,6 +696,8 @@ func TestGetNotificationEmailBodyMultiPublicChannelMention(t *testing.T) {
channelStoreMock.On("GetByNames", "test", []string{ch.Name, ch2.Name, ch3.Name}, true).Return([]*model.Channel{ch, ch2, ch3}, nil)
storeMock.On("Channel").Return(&channelStoreMock)
th.App.Srv().EmailService.SetStore(storeMock)
body, err := th.App.getNotificationEmailBody(th.Context, recipient, post, ch,
ch.Name, senderName, teamName, teamURL,
emailNotificationContentsType, true, translateFunc, "user-avatar.png")
@ -743,6 +747,8 @@ func TestGetNotificationEmailBodyPrivateChannelMention(t *testing.T) {
channelStoreMock.On("GetByNames", "test", []string{ch.Name}, true).Return([]*model.Channel{ch}, nil)
storeMock.On("Channel").Return(&channelStoreMock)
th.App.Srv().EmailService.SetStore(storeMock)
body, err := th.App.getNotificationEmailBody(th.Context, recipient, post, ch,
ch.Name, senderName, teamName, teamURL,
emailNotificationContentsType, true, translateFunc, "user-avatar.png")
@ -776,8 +782,9 @@ func TestGenerateHyperlinkForChannelsPublic(t *testing.T) {
channelStoreMock.On("GetByNames", "test", []string{ch.Name}, true).Return([]*model.Channel{ch}, nil)
storeMock.On("Channel").Return(&channelStoreMock)
outMessage, err := th.App.generateHyperlinkForChannels(th.Context, message+mention, teamName, teamURL)
require.Nil(t, err)
th.App.Srv().EmailService.SetStore(storeMock)
outMessage, err := th.App.Srv().EmailService.GenerateHyperlinkForChannels(message+mention, teamName, teamURL)
require.NoError(t, err)
channelURL := teamURL + "/channels/" + ch.Name
assert.Equal(t, message+"<a href='"+channelURL+"'>"+mention+"</a>", outMessage)
}
@ -826,8 +833,9 @@ func TestGenerateHyperlinkForChannelsMultiPublic(t *testing.T) {
channelStoreMock.On("GetByNames", "test", []string{ch.Name, ch2.Name, ch3.Name}, true).Return([]*model.Channel{ch, ch2, ch3}, nil)
storeMock.On("Channel").Return(&channelStoreMock)
outMessage, err := th.App.generateHyperlinkForChannels(th.Context, message, teamName, teamURL)
require.Nil(t, err)
th.App.Srv().EmailService.SetStore(storeMock)
outMessage, err := th.App.Srv().EmailService.GenerateHyperlinkForChannels(message, teamName, teamURL)
require.NoError(t, err)
channelURL := teamURL + "/channels/" + ch.Name
channelURL2 := teamURL + "/channels/" + ch2.Name
channelURL3 := teamURL + "/channels/" + ch3.Name
@ -860,8 +868,9 @@ func TestGenerateHyperlinkForChannelsPrivate(t *testing.T) {
channelStoreMock.On("GetByNames", "test", []string{ch.Name}, true).Return([]*model.Channel{ch}, nil)
storeMock.On("Channel").Return(&channelStoreMock)
outMessage, err := th.App.generateHyperlinkForChannels(th.Context, message, teamName, teamURL)
require.Nil(t, err)
th.App.Srv().EmailService.SetStore(storeMock)
outMessage, err := th.App.Srv().EmailService.GenerateHyperlinkForChannels(message, teamName, teamURL)
require.NoError(t, err)
assert.Equal(t, message, outMessage)
}

View File

@ -7401,7 +7401,7 @@ func (a *OpenTracingAppLayer) GetMemberCountsByGroup(rctx request.CTX, channelID
return resultVar0, resultVar1
}
func (a *OpenTracingAppLayer) GetMessageForNotification(post *model.Post, translateFunc i18n.TranslateFunc) string {
func (a *OpenTracingAppLayer) GetMessageForNotification(post *model.Post, teamName string, siteUrl string, translateFunc i18n.TranslateFunc) string {
origCtx := a.ctx
span, newCtx := tracing.StartSpanWithParentByContext(a.ctx, "app.GetMessageForNotification")
@ -7413,7 +7413,7 @@ func (a *OpenTracingAppLayer) GetMessageForNotification(post *model.Post, transl
}()
defer span.Finish()
resultVar0 := a.app.GetMessageForNotification(post, translateFunc)
resultVar0 := a.app.GetMessageForNotification(post, teamName, siteUrl, translateFunc)
return resultVar0
}

View File

@ -17,9 +17,6 @@ func (ps *PlatformService) RegisterClusterHandlers() {
ps.clusterIFace.RegisterClusterMessageHandler(model.ClusterEventPublish, ps.ClusterPublishHandler)
ps.clusterIFace.RegisterClusterMessageHandler(model.ClusterEventUpdateStatus, ps.ClusterUpdateStatusHandler)
ps.clusterIFace.RegisterClusterMessageHandler(model.ClusterEventInvalidateAllCaches, ps.ClusterInvalidateAllCachesHandler)
ps.clusterIFace.RegisterClusterMessageHandler(model.ClusterEventInvalidateCacheForChannelMembersNotifyProps, ps.clusterInvalidateCacheForChannelMembersNotifyPropHandler)
ps.clusterIFace.RegisterClusterMessageHandler(model.ClusterEventInvalidateCacheForChannelByName, ps.clusterInvalidateCacheForChannelByNameHandler)
ps.clusterIFace.RegisterClusterMessageHandler(model.ClusterEventInvalidateCacheForUser, ps.clusterInvalidateCacheForUserHandler)
ps.clusterIFace.RegisterClusterMessageHandler(model.ClusterEventInvalidateCacheForUserTeams, ps.clusterInvalidateCacheForUserTeamsHandler)
ps.clusterIFace.RegisterClusterMessageHandler(model.ClusterEventBusyStateChanged, ps.clusterBusyStateChgHandler)
ps.clusterIFace.RegisterClusterMessageHandler(model.ClusterEventClearSessionCacheForUser, ps.clusterClearSessionCacheForUserHandler)
@ -70,18 +67,6 @@ func (ps *PlatformService) ClusterInvalidateAllCachesHandler(msg *model.ClusterM
ps.InvalidateAllCachesSkipSend()
}
func (ps *PlatformService) clusterInvalidateCacheForChannelMembersNotifyPropHandler(msg *model.ClusterMessage) {
ps.invalidateCacheForChannelMembersNotifyPropsSkipClusterSend(string(msg.Data))
}
func (ps *PlatformService) clusterInvalidateCacheForChannelByNameHandler(msg *model.ClusterMessage) {
ps.invalidateCacheForChannelByNameSkipClusterSend(msg.Props["id"], msg.Props["name"])
}
func (ps *PlatformService) clusterInvalidateCacheForUserHandler(msg *model.ClusterMessage) {
ps.InvalidateCacheForUserSkipClusterSend(string(msg.Data))
}
func (ps *PlatformService) clusterInvalidateCacheForUserTeamsHandler(msg *model.ClusterMessage) {
ps.invalidateWebConnSessionCacheForUser(string(msg.Data))
}
@ -118,23 +103,6 @@ func (ps *PlatformService) clusterBusyStateChgHandler(msg *model.ClusterMessage)
}
}
func (ps *PlatformService) invalidateCacheForChannelMembersNotifyPropsSkipClusterSend(channelID string) {
ps.Store.Channel().InvalidateCacheForChannelMembersNotifyProps(channelID)
}
func (ps *PlatformService) invalidateCacheForChannelByNameSkipClusterSend(teamID, name string) {
if teamID == "" {
teamID = "dm"
}
ps.Store.Channel().InvalidateChannelByName(teamID, name)
}
func (ps *PlatformService) InvalidateCacheForUserSkipClusterSend(userID string) {
ps.Store.Channel().InvalidateAllChannelMembersForUser(userID)
ps.invalidateWebConnSessionCacheForUser(userID)
}
func (ps *PlatformService) invalidateWebConnSessionCacheForUser(userID string) {
hub := ps.GetHubForUserId(userID)
if hub != nil {

View File

@ -165,24 +165,12 @@ func (ps *PlatformService) HubUnregister(webConn *WebConn) {
func (ps *PlatformService) InvalidateCacheForChannel(channel *model.Channel) {
ps.Store.Channel().InvalidateChannel(channel.Id)
ps.invalidateCacheForChannelByNameSkipClusterSend(channel.TeamId, channel.Name)
if ps.clusterIFace != nil {
nameMsg := &model.ClusterMessage{
Event: model.ClusterEventInvalidateCacheForChannelByName,
SendType: model.ClusterSendBestEffort,
Props: make(map[string]string),
}
nameMsg.Props["name"] = channel.Name
if channel.TeamId == "" {
nameMsg.Props["id"] = "dm"
} else {
nameMsg.Props["id"] = channel.TeamId
}
ps.clusterIFace.SendClusterMessage(nameMsg)
teamID := channel.TeamId
if teamID == "" {
teamID = "dm"
}
ps.Store.Channel().InvalidateChannelByName(teamID, channel.Name)
}
func (ps *PlatformService) InvalidateCacheForChannelMembers(channelID string) {
@ -192,16 +180,7 @@ func (ps *PlatformService) InvalidateCacheForChannelMembers(channelID string) {
}
func (ps *PlatformService) InvalidateCacheForChannelMembersNotifyProps(channelID string) {
ps.invalidateCacheForChannelMembersNotifyPropsSkipClusterSend(channelID)
if ps.clusterIFace != nil {
msg := &model.ClusterMessage{
Event: model.ClusterEventInvalidateCacheForChannelMembersNotifyProps,
SendType: model.ClusterSendBestEffort,
Data: []byte(channelID),
}
ps.clusterIFace.SendClusterMessage(msg)
}
ps.Store.Channel().InvalidateCacheForChannelMembersNotifyProps(channelID)
}
func (ps *PlatformService) InvalidateCacheForChannelPosts(channelID string) {
@ -210,19 +189,11 @@ func (ps *PlatformService) InvalidateCacheForChannelPosts(channelID string) {
}
func (ps *PlatformService) InvalidateCacheForUser(userID string) {
ps.InvalidateCacheForUserSkipClusterSend(userID)
ps.Store.Channel().InvalidateAllChannelMembersForUser(userID)
ps.invalidateWebConnSessionCacheForUser(userID)
ps.Store.User().InvalidateProfilesInChannelCacheByUser(userID)
ps.Store.User().InvalidateProfileCacheForUser(userID)
if ps.clusterIFace != nil {
msg := &model.ClusterMessage{
Event: model.ClusterEventInvalidateCacheForUser,
SendType: model.ClusterSendBestEffort,
Data: []byte(userID),
}
ps.clusterIFace.SendClusterMessage(msg)
}
}
func (ps *PlatformService) InvalidateCacheForUserTeams(userID string) {

View File

@ -1791,7 +1791,7 @@ func TestHookPreferencesHaveChanged(t *testing.T) {
require.Nil(t, appErr)
assert.Equal(t, "test_value_third", preference.Value)
}, 1*time.Second, 10*time.Millisecond)
}, 5*time.Second, 100*time.Millisecond)
})
}
@ -1854,11 +1854,12 @@ func TestChannelHasBeenCreated(t *testing.T) {
posts, appErr := th.App.GetPosts(channel.Id, 0, 1)
require.Nil(t, appErr)
assert.True(t, len(posts.Order) > 0)
post := posts.Posts[posts.Order[0]]
assert.Equal(t, channel.Id, post.ChannelId)
assert.Equal(t, "ChannelHasBeenCreated has been called for "+channel.Id, post.Message)
}, 1*time.Second, 10*time.Millisecond)
}, 5*time.Second, 100*time.Millisecond)
})
t.Run("should call hook when a DM is created", func(t *testing.T) {
@ -1879,11 +1880,11 @@ func TestChannelHasBeenCreated(t *testing.T) {
posts, appErr := th.App.GetPosts(channel.Id, 0, 1)
require.Nil(t, appErr)
assert.True(t, len(posts.Order) > 0)
post := posts.Posts[posts.Order[0]]
assert.Equal(t, channel.Id, post.ChannelId)
assert.Equal(t, "ChannelHasBeenCreated has been called for "+channel.Id, post.Message)
}, 1*time.Second, 10*time.Millisecond)
}, 5*time.Second, 100*time.Millisecond)
})
t.Run("should call hook when a GM is created", func(t *testing.T) {
@ -1905,11 +1906,11 @@ func TestChannelHasBeenCreated(t *testing.T) {
posts, appErr := th.App.GetPosts(channel.Id, 0, 1)
require.Nil(t, appErr)
assert.True(t, len(posts.Order) > 0)
post := posts.Posts[posts.Order[0]]
assert.Equal(t, channel.Id, post.ChannelId)
assert.Equal(t, "ChannelHasBeenCreated has been called for "+channel.Id, post.Message)
}, 1*time.Second, 10*time.Millisecond)
}, 5*time.Second, 100*time.Millisecond)
})
}
@ -1987,9 +1988,9 @@ func TestUserHasJoinedChannel(t *testing.T) {
posts, appErr := th.App.GetPosts(channel.Id, 0, 1)
require.Nil(t, appErr)
assert.True(t, len(posts.Order) > 0)
assert.Equal(t, fmt.Sprintf("Test: User %s joined %s", user2.Id, channel.Id), posts.Posts[posts.Order[0]].Message)
}, 1*time.Second, 10*time.Millisecond)
}, 5*time.Second, 100*time.Millisecond)
})
t.Run("should call hook when a user is added to an existing channel", func(t *testing.T) {
@ -2023,7 +2024,6 @@ func TestUserHasJoinedChannel(t *testing.T) {
// Typically, the post we're looking for will be the latest, but there's a race between the plugin and
// "User has joined the channel" post which means the plugin post may not the the latest one
posts, appErr := th.App.GetPosts(channel.Id, 0, 10)
require.Nil(t, appErr)
for _, postId := range posts.Order {
@ -2035,7 +2035,7 @@ func TestUserHasJoinedChannel(t *testing.T) {
}
return false
}, 1*time.Second, 10*time.Millisecond)
}, 5*time.Second, 100*time.Millisecond)
})
t.Run("should not call hook when a regular channel is created", func(t *testing.T) {

View File

@ -286,7 +286,8 @@ func TestUpdateIncomingWebhook(t *testing.T) {
}
func TestCreateWebhookPost(t *testing.T) {
th := Setup(t).InitBasic()
testCluster := &testlib.FakeClusterInterface{}
th := SetupWithClusterMock(t, testCluster).InitBasic()
defer th.TearDown()
th.App.UpdateConfig(func(cfg *model.Config) { *cfg.ServiceSettings.EnableIncomingWebhooks = true })
@ -365,20 +366,18 @@ Date: Thu Mar 1 19:46:48 2018 +0300
assert.Equal(t, expectedText, post.Message)
t.Run("should set webhook creator status to online", func(t *testing.T) {
testCluster := &testlib.FakeClusterInterface{}
th.Server.Platform().SetCluster(testCluster)
defer th.Server.Platform().SetCluster(nil)
testCluster.ClearMessages()
_, appErr := th.App.CreateWebhookPost(th.Context, hook.UserId, th.BasicChannel, "text", "", "", "", model.StringInterface{}, model.PostTypeDefault, "")
require.Nil(t, appErr)
msgs := testCluster.GetMessages()
// The first message is ClusterEventInvalidateCacheForChannelByName so we skip it
ev, err1 := model.WebSocketEventFromJSON(bytes.NewReader(msgs[1].Data))
require.NoError(t, err1)
require.Equal(t, model.WebsocketEventPosted, ev.EventType())
assert.Equal(t, false, ev.GetData()["set_online"])
msgs := testCluster.SelectMessages(func(msg *model.ClusterMessage) bool {
event, err := model.WebSocketEventFromJSON(bytes.NewReader(msg.Data))
return err == nil && event.EventType() == model.WebsocketEventPosted
})
require.Len(t, msgs, 1)
// We know there will be no error from the filter condition.
event, _ := model.WebSocketEventFromJSON(bytes.NewReader(msgs[0].Data))
assert.Equal(t, false, event.GetData()["set_online"])
})
}

View File

@ -48,48 +48,111 @@ func (s *LocalCacheChannelStore) handleClusterInvalidateChannelById(msg *model.C
}
}
func (s *LocalCacheChannelStore) handleClusterInvalidateChannelForUser(msg *model.ClusterMessage) {
if bytes.Equal(msg.Data, clearCacheMessageData) {
s.rootStore.channelMembersForUserCache.Purge()
} else {
s.rootStore.channelMembersForUserCache.Remove(string(msg.Data))
}
}
func (s *LocalCacheChannelStore) handleClusterInvalidateChannelMembersNotifyProps(msg *model.ClusterMessage) {
if bytes.Equal(msg.Data, clearCacheMessageData) {
s.rootStore.channelMembersNotifyPropsCache.Purge()
} else {
s.rootStore.channelMembersNotifyPropsCache.Remove(string(msg.Data))
}
}
func (s *LocalCacheChannelStore) handleClusterInvalidateChannelByName(msg *model.ClusterMessage) {
if bytes.Equal(msg.Data, clearCacheMessageData) {
s.rootStore.channelByNameCache.Purge()
} else {
s.rootStore.channelByNameCache.Remove(string(msg.Data))
}
}
func (s LocalCacheChannelStore) ClearMembersForUserCache() {
s.rootStore.doClearCacheCluster(s.rootStore.channelMembersForUserCache)
}
func (s LocalCacheChannelStore) ClearCaches() {
s.rootStore.doClearCacheCluster(s.rootStore.channelMemberCountsCache)
s.rootStore.doClearCacheCluster(s.rootStore.channelPinnedPostCountsCache)
s.rootStore.doClearCacheCluster(s.rootStore.channelGuestCountCache)
s.rootStore.doClearCacheCluster(s.rootStore.channelByIdCache)
s.ChannelStore.ClearCaches()
s.rootStore.doClearCacheCluster(s.rootStore.channelMembersForUserCache)
s.rootStore.doClearCacheCluster(s.rootStore.channelMembersNotifyPropsCache)
s.rootStore.doClearCacheCluster(s.rootStore.channelByNameCache)
if s.rootStore.metrics != nil {
s.rootStore.metrics.IncrementMemCacheInvalidationCounter("Channel Pinned Post Counts - Purge")
s.rootStore.metrics.IncrementMemCacheInvalidationCounter("Channel Member Counts - Purge")
s.rootStore.metrics.IncrementMemCacheInvalidationCounter("Channel Guest Count - Purge")
s.rootStore.metrics.IncrementMemCacheInvalidationCounter("Channel - Purge")
s.rootStore.metrics.IncrementMemCacheInvalidationCounter(s.rootStore.channelMemberCountsCache.Name())
s.rootStore.metrics.IncrementMemCacheInvalidationCounter(s.rootStore.channelPinnedPostCountsCache.Name())
s.rootStore.metrics.IncrementMemCacheInvalidationCounter(s.rootStore.channelGuestCountCache.Name())
s.rootStore.metrics.IncrementMemCacheInvalidationCounter(s.rootStore.channelByIdCache.Name())
s.rootStore.metrics.IncrementMemCacheInvalidationCounter(s.rootStore.channelMembersForUserCache.Name())
s.rootStore.metrics.IncrementMemCacheInvalidationCounter(s.rootStore.channelMembersNotifyPropsCache.Name())
s.rootStore.metrics.IncrementMemCacheInvalidationCounter(s.rootStore.channelByNameCache.Name())
}
}
func (s LocalCacheChannelStore) InvalidatePinnedPostCount(channelId string) {
s.rootStore.doInvalidateCacheCluster(s.rootStore.channelPinnedPostCountsCache, channelId)
s.rootStore.doInvalidateCacheCluster(s.rootStore.channelPinnedPostCountsCache, channelId, nil)
if s.rootStore.metrics != nil {
s.rootStore.metrics.IncrementMemCacheInvalidationCounter("Channel Pinned Post Counts - Remove by ChannelId")
s.rootStore.metrics.IncrementMemCacheInvalidationCounter(s.rootStore.channelPinnedPostCountsCache.Name())
}
}
func (s LocalCacheChannelStore) InvalidateMemberCount(channelId string) {
s.rootStore.doInvalidateCacheCluster(s.rootStore.channelMemberCountsCache, channelId)
s.rootStore.doInvalidateCacheCluster(s.rootStore.channelMemberCountsCache, channelId, nil)
if s.rootStore.metrics != nil {
s.rootStore.metrics.IncrementMemCacheInvalidationCounter("Channel Member Counts - Remove by ChannelId")
s.rootStore.metrics.IncrementMemCacheInvalidationCounter(s.rootStore.channelMemberCountsCache.Name())
}
}
func (s LocalCacheChannelStore) InvalidateGuestCount(channelId string) {
s.rootStore.doInvalidateCacheCluster(s.rootStore.channelGuestCountCache, channelId)
s.rootStore.doInvalidateCacheCluster(s.rootStore.channelGuestCountCache, channelId, nil)
if s.rootStore.metrics != nil {
s.rootStore.metrics.IncrementMemCacheInvalidationCounter("Channel Guests Count - Remove by channelId")
}
}
func (s LocalCacheChannelStore) InvalidateChannel(channelId string) {
s.rootStore.doInvalidateCacheCluster(s.rootStore.channelByIdCache, channelId)
s.rootStore.doInvalidateCacheCluster(s.rootStore.channelByIdCache, channelId, nil)
if s.rootStore.metrics != nil {
s.rootStore.metrics.IncrementMemCacheInvalidationCounter("Channel - Remove by ChannelId")
}
}
func (s LocalCacheChannelStore) InvalidateAllChannelMembersForUser(userId string) {
s.rootStore.doInvalidateCacheCluster(s.rootStore.channelMembersForUserCache, userId, nil)
s.rootStore.doInvalidateCacheCluster(s.rootStore.channelMembersForUserCache, userId+"_deleted", nil)
if s.rootStore.metrics != nil {
s.rootStore.metrics.IncrementMemCacheInvalidationCounter(s.rootStore.channelMembersForUserCache.Name())
}
}
func (s LocalCacheChannelStore) InvalidateCacheForChannelMembersNotifyProps(channelId string) {
s.rootStore.doInvalidateCacheCluster(s.rootStore.channelMembersNotifyPropsCache, channelId, nil)
if s.rootStore.metrics != nil {
s.rootStore.metrics.IncrementMemCacheInvalidationCounter(s.rootStore.channelMembersNotifyPropsCache.Name())
}
}
func (s LocalCacheChannelStore) InvalidateChannelByName(teamId, name string) {
props := make(map[string]string)
props["name"] = name
if teamId == "" {
props["id"] = "dm"
} else {
props["id"] = teamId
}
s.rootStore.doInvalidateCacheCluster(s.rootStore.channelByNameCache, teamId+name, props)
if s.rootStore.metrics != nil {
s.rootStore.metrics.IncrementMemCacheInvalidationCounter(s.rootStore.channelByNameCache.Name())
}
}
func (s LocalCacheChannelStore) GetMemberCount(channelId string, allowFromCache bool) (int64, error) {
if allowFromCache {
var count int64
@ -205,6 +268,141 @@ func (s LocalCacheChannelStore) GetMany(ids []string, allowFromCache bool) (mode
return append(foundChannels, channels...), nil
}
func (s LocalCacheChannelStore) GetAllChannelMembersForUser(userId string, allowFromCache bool, includeDeleted bool) (map[string]string, error) {
cache_key := userId
if includeDeleted {
cache_key += "_deleted"
}
if allowFromCache {
ids := make(map[string]string)
if err := s.rootStore.doStandardReadCache(s.rootStore.channelMembersForUserCache, cache_key, &ids); err == nil {
return ids, nil
}
}
ids, err := s.ChannelStore.GetAllChannelMembersForUser(userId, allowFromCache, includeDeleted)
if err != nil {
return nil, err
}
if allowFromCache {
s.rootStore.doStandardAddToCache(s.rootStore.channelMembersForUserCache, cache_key, ids)
}
return ids, nil
}
func (s LocalCacheChannelStore) GetAllChannelMembersNotifyPropsForChannel(channelId string, allowFromCache bool) (map[string]model.StringMap, error) {
if allowFromCache {
var cacheItem map[string]model.StringMap
if err := s.rootStore.doStandardReadCache(s.rootStore.channelMembersNotifyPropsCache, channelId, &cacheItem); err == nil {
return cacheItem, nil
}
}
props, err := s.ChannelStore.GetAllChannelMembersNotifyPropsForChannel(channelId, allowFromCache)
if err != nil {
return nil, err
}
if allowFromCache {
s.rootStore.doStandardAddToCache(s.rootStore.channelMembersNotifyPropsCache, channelId, props)
}
return props, nil
}
func (s LocalCacheChannelStore) GetByNamesIncludeDeleted(teamId string, names []string, allowFromCache bool) ([]*model.Channel, error) {
return s.getByNames(teamId, names, allowFromCache, true)
}
func (s LocalCacheChannelStore) GetByNames(teamId string, names []string, allowFromCache bool) ([]*model.Channel, error) {
return s.getByNames(teamId, names, allowFromCache, false)
}
func (s LocalCacheChannelStore) getByNames(teamId string, names []string, allowFromCache, includeArchivedChannels bool) ([]*model.Channel, error) {
var channels []*model.Channel
if allowFromCache {
var misses []string
visited := make(map[string]struct{})
for _, name := range names {
if _, ok := visited[name]; ok {
continue
}
visited[name] = struct{}{}
var cacheItem *model.Channel
if err := s.rootStore.doStandardReadCache(s.rootStore.channelByNameCache, teamId+name, &cacheItem); err == nil {
if includeArchivedChannels || cacheItem.DeleteAt == 0 {
channels = append(channels, cacheItem)
}
} else {
misses = append(misses, name)
}
}
names = misses
}
if len(names) > 0 {
var dbChannels []*model.Channel
var err error
if includeArchivedChannels {
dbChannels, err = s.ChannelStore.GetByNamesIncludeDeleted(teamId, names, allowFromCache)
} else {
dbChannels, err = s.ChannelStore.GetByNames(teamId, names, allowFromCache)
}
if err != nil {
return nil, err
}
for _, channel := range dbChannels {
if allowFromCache {
s.rootStore.doStandardAddToCache(s.rootStore.channelByNameCache, teamId+channel.Name, channel)
}
channels = append(channels, channel)
}
}
return channels, nil
}
func (s LocalCacheChannelStore) GetByNameIncludeDeleted(teamId string, name string, allowFromCache bool) (*model.Channel, error) {
return s.getByName(teamId, name, allowFromCache, true)
}
func (s LocalCacheChannelStore) GetByName(teamId string, name string, allowFromCache bool) (*model.Channel, error) {
return s.getByName(teamId, name, allowFromCache, false)
}
func (s LocalCacheChannelStore) getByName(teamId string, name string, allowFromCache, includeArchivedChannels bool) (*model.Channel, error) {
var channel *model.Channel
if allowFromCache {
if err := s.rootStore.doStandardReadCache(s.rootStore.channelByNameCache, teamId+name, &channel); err == nil {
if includeArchivedChannels || channel.DeleteAt == 0 {
return channel, nil
}
}
}
var err error
if includeArchivedChannels {
channel, err = s.ChannelStore.GetByNameIncludeDeleted(teamId, name, allowFromCache)
} else {
channel, err = s.ChannelStore.GetByName(teamId, name, allowFromCache)
}
if err != nil {
return nil, err
}
if allowFromCache {
s.rootStore.doStandardAddToCache(s.rootStore.channelByNameCache, teamId+name, channel)
}
return channel, nil
}
func (s LocalCacheChannelStore) SaveMember(rctx request.CTX, member *model.ChannelMember) (*model.ChannelMember, error) {
member, err := s.ChannelStore.SaveMember(rctx, member)
if err != nil {

View File

@ -171,10 +171,10 @@ func (es *LocalCacheEmojiStore) removeFromCache(emoji *model.Emoji) {
es.emojiByIdMut.Lock()
es.emojiByIdInvalidations[emoji.Id] = true
es.emojiByIdMut.Unlock()
es.rootStore.doInvalidateCacheCluster(es.rootStore.emojiCacheById, emoji.Id)
es.rootStore.doInvalidateCacheCluster(es.rootStore.emojiCacheById, emoji.Id, nil)
es.emojiByNameMut.Lock()
es.emojiByNameInvalidations[emoji.Name] = true
es.emojiByNameMut.Unlock()
es.rootStore.doInvalidateCacheCluster(es.rootStore.emojiIdCacheByName, emoji.Name)
es.rootStore.doInvalidateCacheCluster(es.rootStore.emojiIdCacheByName, emoji.Name, nil)
}

View File

@ -62,7 +62,7 @@ func (s LocalCacheFileInfoStore) InvalidateFileInfosForPostCache(postId string,
if deleted {
cacheKey += "_deleted"
}
s.rootStore.doInvalidateCacheCluster(s.rootStore.fileInfoCache, cacheKey)
s.rootStore.doInvalidateCacheCluster(s.rootStore.fileInfoCache, cacheKey, nil)
if s.rootStore.metrics != nil {
s.rootStore.metrics.IncrementMemCacheInvalidationCounter("File Info Cache - Remove by PostId")
}

View File

@ -35,8 +35,15 @@ const (
EmojiCacheSize = 5000
EmojiCacheSec = 30 * 60
ChannelPinnedPostsCountsCacheSize = model.ChannelCacheSize
ChannelPinnedPostsCountsCacheSec = 30 * 60
ChannelPinnedPostsCountsCacheSize = model.ChannelCacheSize
ChannelPinnedPostsCountsCacheSec = 30 * 60
AllChannelMembersForUserCacheSize = model.SessionCacheSize
AllChannelMembersForUserCacheDuration = 15 * time.Minute
AllChannelMembersNotifyPropsForChannelCacheSize = model.SessionCacheSize
AllChannelMembersNotifyPropsForChannelCacheDuration = 30 * time.Minute
ChannelCacheDuration = 15 * time.Minute
ChannelMembersCountsCacheSize = model.ChannelCacheSize
ChannelMembersCountsCacheSec = 30 * 60
@ -87,11 +94,14 @@ type LocalCacheStore struct {
emojiCacheById cache.Cache
emojiIdCacheByName cache.Cache
channel LocalCacheChannelStore
channelMemberCountsCache cache.Cache
channelGuestCountCache cache.Cache
channelPinnedPostCountsCache cache.Cache
channelByIdCache cache.Cache
channel LocalCacheChannelStore
channelMemberCountsCache cache.Cache
channelGuestCountCache cache.Cache
channelPinnedPostCountsCache cache.Cache
channelByIdCache cache.Cache
channelMembersForUserCache cache.Cache
channelMembersNotifyPropsCache cache.Cache
channelByNameCache cache.Cache
webhook LocalCacheWebhookStore
webhookCache cache.Cache
@ -240,6 +250,30 @@ func NewLocalCacheLayer(baseStore store.Store, metrics einterfaces.MetricsInterf
}); err != nil {
return
}
if localCacheStore.channelMembersForUserCache, err = cacheProvider.NewCache(&cache.CacheOptions{
Size: AllChannelMembersForUserCacheSize,
Name: "ChannnelMembersForUser",
DefaultExpiry: AllChannelMembersForUserCacheDuration,
InvalidateClusterEvent: model.ClusterEventInvalidateCacheForUser,
}); err != nil {
return
}
if localCacheStore.channelMembersNotifyPropsCache, err = cacheProvider.NewCache(&cache.CacheOptions{
Size: AllChannelMembersNotifyPropsForChannelCacheSize,
Name: "ChannnelMembersNotifyProps",
DefaultExpiry: AllChannelMembersNotifyPropsForChannelCacheDuration,
InvalidateClusterEvent: model.ClusterEventInvalidateCacheForChannelMembersNotifyProps,
}); err != nil {
return
}
if localCacheStore.channelByNameCache, err = cacheProvider.NewCache(&cache.CacheOptions{
Size: model.ChannelCacheSize,
Name: "ChannelByName",
DefaultExpiry: ChannelCacheDuration,
InvalidateClusterEvent: model.ClusterEventInvalidateCacheForChannelByName,
}); err != nil {
return
}
localCacheStore.channel = LocalCacheChannelStore{ChannelStore: baseStore.Channel(), rootStore: &localCacheStore}
// Posts
@ -331,6 +365,9 @@ func NewLocalCacheLayer(baseStore store.Store, metrics einterfaces.MetricsInterf
cluster.RegisterClusterMessageHandler(model.ClusterEventInvalidateCacheForChannelMemberCounts, localCacheStore.channel.handleClusterInvalidateChannelMemberCounts)
cluster.RegisterClusterMessageHandler(model.ClusterEventInvalidateCacheForChannelGuestCount, localCacheStore.channel.handleClusterInvalidateChannelGuestCounts)
cluster.RegisterClusterMessageHandler(model.ClusterEventInvalidateCacheForChannel, localCacheStore.channel.handleClusterInvalidateChannelById)
cluster.RegisterClusterMessageHandler(model.ClusterEventInvalidateCacheForUser, localCacheStore.channel.handleClusterInvalidateChannelForUser)
cluster.RegisterClusterMessageHandler(model.ClusterEventInvalidateCacheForChannelMembersNotifyProps, localCacheStore.channel.handleClusterInvalidateChannelMembersNotifyProps)
cluster.RegisterClusterMessageHandler(model.ClusterEventInvalidateCacheForChannelByName, localCacheStore.channel.handleClusterInvalidateChannelByName)
cluster.RegisterClusterMessageHandler(model.ClusterEventInvalidateCacheForLastPosts, localCacheStore.post.handleClusterInvalidateLastPosts)
cluster.RegisterClusterMessageHandler(model.ClusterEventInvalidateCacheForTermsOfService, localCacheStore.termsOfService.handleClusterInvalidateTermsOfService)
cluster.RegisterClusterMessageHandler(model.ClusterEventInvalidateCacheForProfileByIds, localCacheStore.user.handleClusterInvalidateScheme)
@ -396,7 +433,7 @@ func (s LocalCacheStore) DropAllTables() {
s.Store.DropAllTables()
}
func (s *LocalCacheStore) doInvalidateCacheCluster(cache cache.Cache, key string) {
func (s *LocalCacheStore) doInvalidateCacheCluster(cache cache.Cache, key string, props map[string]string) {
cache.Remove(key)
if s.cluster != nil {
msg := &model.ClusterMessage{
@ -404,6 +441,9 @@ func (s *LocalCacheStore) doInvalidateCacheCluster(cache cache.Cache, key string
SendType: model.ClusterSendBestEffort,
Data: []byte(key),
}
if props != nil {
msg.Props = props
}
s.cluster.SendClusterMessage(msg)
}
}
@ -450,6 +490,9 @@ func (s *LocalCacheStore) Invalidate() {
s.doClearCacheCluster(s.channelPinnedPostCountsCache)
s.doClearCacheCluster(s.channelGuestCountCache)
s.doClearCacheCluster(s.channelByIdCache)
s.doClearCacheCluster(s.channelMembersForUserCache)
s.doClearCacheCluster(s.channelMembersNotifyPropsCache)
s.doClearCacheCluster(s.channelByNameCache)
s.doClearCacheCluster(s.postLastPostsCache)
s.doClearCacheCluster(s.termsOfServiceCache)
s.doClearCacheCluster(s.lastPostTimeCache)

View File

@ -56,11 +56,11 @@ func (s LocalCachePostStore) ClearCaches() {
}
func (s LocalCachePostStore) InvalidateLastPostTimeCache(channelId string) {
s.rootStore.doInvalidateCacheCluster(s.rootStore.lastPostTimeCache, channelId)
s.rootStore.doInvalidateCacheCluster(s.rootStore.lastPostTimeCache, channelId, nil)
// Keys are "{channelid}{limit}" and caching only occurs on limits of 30 and 60
s.rootStore.doInvalidateCacheCluster(s.rootStore.postLastPostsCache, channelId+"30")
s.rootStore.doInvalidateCacheCluster(s.rootStore.postLastPostsCache, channelId+"60")
s.rootStore.doInvalidateCacheCluster(s.rootStore.postLastPostsCache, channelId+"30", nil)
s.rootStore.doInvalidateCacheCluster(s.rootStore.postLastPostsCache, channelId+"60", nil)
s.PostStore.InvalidateLastPostTimeCache(channelId)

View File

@ -24,12 +24,12 @@ func (s *LocalCacheReactionStore) handleClusterInvalidateReaction(msg *model.Clu
}
func (s LocalCacheReactionStore) Save(reaction *model.Reaction) (*model.Reaction, error) {
defer s.rootStore.doInvalidateCacheCluster(s.rootStore.reactionCache, reaction.PostId)
defer s.rootStore.doInvalidateCacheCluster(s.rootStore.reactionCache, reaction.PostId, nil)
return s.ReactionStore.Save(reaction)
}
func (s LocalCacheReactionStore) Delete(reaction *model.Reaction) (*model.Reaction, error) {
defer s.rootStore.doInvalidateCacheCluster(s.rootStore.reactionCache, reaction.PostId)
defer s.rootStore.doInvalidateCacheCluster(s.rootStore.reactionCache, reaction.PostId, nil)
return s.ReactionStore.Delete(reaction)
}

View File

@ -36,7 +36,7 @@ func (s *LocalCacheRoleStore) handleClusterInvalidateRolePermissions(msg *model.
func (s LocalCacheRoleStore) Save(role *model.Role) (*model.Role, error) {
if role.Name != "" {
defer s.rootStore.doInvalidateCacheCluster(s.rootStore.roleCache, role.Name)
defer s.rootStore.doInvalidateCacheCluster(s.rootStore.roleCache, role.Name, nil)
defer s.rootStore.doClearCacheCluster(s.rootStore.rolePermissionsCache)
}
return s.RoleStore.Save(role)
@ -85,7 +85,7 @@ func (s LocalCacheRoleStore) Delete(roleId string) (*model.Role, error) {
role, err := s.RoleStore.Delete(roleId)
if err == nil {
s.rootStore.doInvalidateCacheCluster(s.rootStore.roleCache, role.Name)
s.rootStore.doInvalidateCacheCluster(s.rootStore.roleCache, role.Name, nil)
defer s.rootStore.doClearCacheCluster(s.rootStore.rolePermissionsCache)
}
return role, err

View File

@ -25,7 +25,7 @@ func (s *LocalCacheSchemeStore) handleClusterInvalidateScheme(msg *model.Cluster
func (s LocalCacheSchemeStore) Save(scheme *model.Scheme) (*model.Scheme, error) {
if scheme.Id != "" {
defer s.rootStore.doInvalidateCacheCluster(s.rootStore.schemeCache, scheme.Id)
defer s.rootStore.doInvalidateCacheCluster(s.rootStore.schemeCache, scheme.Id, nil)
}
return s.SchemeStore.Save(scheme)
}
@ -47,7 +47,7 @@ func (s LocalCacheSchemeStore) Get(schemeId string) (*model.Scheme, error) {
}
func (s LocalCacheSchemeStore) Delete(schemeId string) (*model.Scheme, error) {
defer s.rootStore.doInvalidateCacheCluster(s.rootStore.schemeCache, schemeId)
defer s.rootStore.doInvalidateCacheCluster(s.rootStore.schemeCache, schemeId, nil)
defer s.rootStore.doClearCacheCluster(s.rootStore.roleCache)
defer s.rootStore.doClearCacheCluster(s.rootStore.rolePermissionsCache)
return s.SchemeStore.Delete(schemeId)

View File

@ -31,7 +31,7 @@ func (s LocalCacheTeamStore) ClearCaches() {
}
func (s LocalCacheTeamStore) InvalidateAllTeamIdsForUser(userId string) {
s.rootStore.doInvalidateCacheCluster(s.rootStore.teamAllTeamIdsForUserCache, userId)
s.rootStore.doInvalidateCacheCluster(s.rootStore.teamAllTeamIdsForUserCache, userId, nil)
if s.rootStore.metrics != nil {
s.rootStore.metrics.IncrementMemCacheInvalidationCounter("All Team Ids for User - Remove by UserId")
}

View File

@ -40,7 +40,7 @@ func (s LocalCacheTermsOfServiceStore) Save(termsOfService *model.TermsOfService
if err == nil {
s.rootStore.doStandardAddToCache(s.rootStore.termsOfServiceCache, tos.Id, tos)
s.rootStore.doInvalidateCacheCluster(s.rootStore.termsOfServiceCache, LatestKey)
s.rootStore.doInvalidateCacheCluster(s.rootStore.termsOfServiceCache, LatestKey, nil)
}
return tos, err
}

View File

@ -54,7 +54,7 @@ func (s *LocalCacheUserStore) InvalidateProfileCacheForUser(userId string) {
s.userProfileByIdsMut.Lock()
s.userProfileByIdsInvalidations[userId] = true
s.userProfileByIdsMut.Unlock()
s.rootStore.doInvalidateCacheCluster(s.rootStore.userProfileByIdsCache, userId)
s.rootStore.doInvalidateCacheCluster(s.rootStore.userProfileByIdsCache, userId, nil)
if s.rootStore.metrics != nil {
s.rootStore.metrics.IncrementMemCacheInvalidationCounter("Profile By Ids - Remove")
@ -68,7 +68,7 @@ func (s *LocalCacheUserStore) InvalidateProfilesInChannelCacheByUser(userId stri
var userMap map[string]*model.User
if err = s.rootStore.profilesInChannelCache.Get(key, &userMap); err == nil {
if _, userInCache := userMap[userId]; userInCache {
s.rootStore.doInvalidateCacheCluster(s.rootStore.profilesInChannelCache, key)
s.rootStore.doInvalidateCacheCluster(s.rootStore.profilesInChannelCache, key, nil)
if s.rootStore.metrics != nil {
s.rootStore.metrics.IncrementMemCacheInvalidationCounter("Profiles in Channel - Remove by User")
}
@ -79,7 +79,7 @@ func (s *LocalCacheUserStore) InvalidateProfilesInChannelCacheByUser(userId stri
}
func (s *LocalCacheUserStore) InvalidateProfilesInChannelCache(channelID string) {
s.rootStore.doInvalidateCacheCluster(s.rootStore.profilesInChannelCache, channelID)
s.rootStore.doInvalidateCacheCluster(s.rootStore.profilesInChannelCache, channelID, nil)
if s.rootStore.metrics != nil {
s.rootStore.metrics.IncrementMemCacheInvalidationCounter("Profiles in Channel - Remove by Channel")
}

View File

@ -32,7 +32,7 @@ func (s LocalCacheWebhookStore) ClearCaches() {
}
func (s LocalCacheWebhookStore) InvalidateWebhookCache(webhookId string) {
s.rootStore.doInvalidateCacheCluster(s.rootStore.webhookCache, webhookId)
s.rootStore.doInvalidateCacheCluster(s.rootStore.webhookCache, webhookId, nil)
if s.rootStore.metrics != nil {
s.rootStore.metrics.IncrementMemCacheInvalidationCounter("Webhook - Remove by WebhookId")
}

View File

@ -10,7 +10,6 @@ import (
"sort"
"strconv"
"strings"
"time"
"unicode/utf8"
sq "github.com/mattermost/squirrel"
@ -20,17 +19,6 @@ import (
"github.com/mattermost/mattermost/server/public/shared/request"
"github.com/mattermost/mattermost/server/v8/channels/store"
"github.com/mattermost/mattermost/server/v8/einterfaces"
"github.com/mattermost/mattermost/server/v8/platform/services/cache"
)
const (
AllChannelMembersForUserCacheSize = model.SessionCacheSize
AllChannelMembersForUserCacheDuration = 15 * time.Minute // 15 mins
AllChannelMembersNotifyPropsForChannelCacheSize = model.SessionCacheSize
AllChannelMembersNotifyPropsForChannelCacheDuration = 30 * time.Minute // 30 mins
ChannelCacheDuration = 15 * time.Minute // 15 mins
)
type SqlChannelStore struct {
@ -442,30 +430,10 @@ type publicChannel struct {
Purpose string `json:"purpose"`
}
var allChannelMembersForUserCache = cache.NewLRU(cache.LRUOptions{
Size: AllChannelMembersForUserCacheSize,
})
var allChannelMembersNotifyPropsForChannelCache = cache.NewLRU(cache.LRUOptions{
Size: AllChannelMembersNotifyPropsForChannelCacheSize,
})
var channelByNameCache = cache.NewLRU(cache.LRUOptions{
Size: model.ChannelCacheSize,
})
func (s SqlChannelStore) ClearMembersForUserCache() {
allChannelMembersForUserCache.Purge()
}
func (s SqlChannelStore) ClearCaches() {
allChannelMembersForUserCache.Purge()
allChannelMembersNotifyPropsForChannelCache.Purge()
channelByNameCache.Purge()
if s.metrics != nil {
s.metrics.IncrementMemCacheInvalidationCounter("All Channel Members for User - Purge")
s.metrics.IncrementMemCacheInvalidationCounter("All Channel Members Notify Props for Channel - Purge")
s.metrics.IncrementMemCacheInvalidationCounter("Channel By Name - Purge")
}
}
func newSqlChannelStore(sqlStore *SqlStore, metrics einterfaces.MetricsInterface) store.ChannelStore {
@ -820,11 +788,8 @@ func (s SqlChannelStore) GetChannelUnread(channelId, userId string) (*model.Chan
func (s SqlChannelStore) InvalidateChannel(id string) {
}
//nolint:unparam
func (s SqlChannelStore) InvalidateChannelByName(teamId, name string) {
channelByNameCache.Remove(teamId + name)
if s.metrics != nil {
s.metrics.IncrementMemCacheInvalidationCounter("Channel by Name - Remove by TeamId and Name")
}
}
func (s SqlChannelStore) GetPinnedPosts(channelId string) (*model.PostList, error) {
@ -1393,10 +1358,6 @@ func (s SqlChannelStore) GetTeamChannels(teamId string) (model.ChannelList, erro
return data, nil
}
func (s SqlChannelStore) GetByName(teamId string, name string, allowFromCache bool) (*model.Channel, error) {
return s.getByName(teamId, name, false, allowFromCache)
}
func (s SqlChannelStore) GetByNamesIncludeDeleted(teamId string, names []string, allowFromCache bool) ([]*model.Channel, error) {
return s.getByNames(teamId, names, allowFromCache, true)
}
@ -1406,27 +1367,7 @@ func (s SqlChannelStore) GetByNames(teamId string, names []string, allowFromCach
}
func (s SqlChannelStore) getByNames(teamId string, names []string, allowFromCache, includeArchivedChannels bool) ([]*model.Channel, error) {
var channels []*model.Channel
if allowFromCache {
var misses []string
visited := make(map[string]struct{})
for _, name := range names {
if _, ok := visited[name]; ok {
continue
}
visited[name] = struct{}{}
var cacheItem *model.Channel
if err := channelByNameCache.Get(teamId+name, &cacheItem); err == nil {
if includeArchivedChannels || cacheItem.DeleteAt == 0 {
channels = append(channels, cacheItem)
}
} else {
misses = append(misses, name)
}
}
names = misses
}
channels := []*model.Channel{}
if len(names) > 0 {
cond := sq.And{
@ -1450,27 +1391,13 @@ func (s SqlChannelStore) getByNames(teamId string, names []string, allowFromCach
return nil, errors.Wrap(err, "GetByNames_tosql")
}
dbChannels := []*model.Channel{}
if err := s.GetReplicaX().Select(&dbChannels, query, args...); err != nil && err != sql.ErrNoRows {
if err := s.GetReplicaX().Select(&channels, query, args...); err != nil && err != sql.ErrNoRows {
msg := fmt.Sprintf("failed to get channels with names=%v", names)
if teamId != "" {
msg += fmt.Sprintf(" teamId=%s", teamId)
}
return nil, errors.Wrap(err, msg)
}
for _, channel := range dbChannels {
channelByNameCache.SetWithExpiry(teamId+channel.Name, channel, ChannelCacheDuration)
channels = append(channels, channel)
}
// Not all channels are in cache. Increment aggregate miss counter.
if s.metrics != nil {
s.metrics.IncrementMemCacheMissCounter("Channel By Name - Aggregate")
}
} else {
// All of the channel names are in cache. Increment aggregate hit counter.
if s.metrics != nil {
s.metrics.IncrementMemCacheHitCounter("Channel By Name - Aggregate")
}
}
return channels, nil
@ -1480,6 +1407,10 @@ func (s SqlChannelStore) GetByNameIncludeDeleted(teamId string, name string, all
return s.getByName(teamId, name, true, allowFromCache)
}
func (s SqlChannelStore) GetByName(teamId string, name string, allowFromCache bool) (*model.Channel, error) {
return s.getByName(teamId, name, false, allowFromCache)
}
func (s SqlChannelStore) getByName(teamId string, name string, includeDeleted bool, allowFromCache bool) (*model.Channel, error) {
query := s.getQueryBuilder().
Select("*").
@ -1493,35 +1424,21 @@ func (s SqlChannelStore) getByName(teamId string, name string, includeDeleted bo
if !includeDeleted {
query = query.Where(sq.Eq{"DeleteAt": 0})
}
channel := model.Channel{}
if allowFromCache {
var cacheItem *model.Channel
if err := channelByNameCache.Get(teamId+name, &cacheItem); err == nil {
if s.metrics != nil {
s.metrics.IncrementMemCacheHitCounter("Channel By Name")
}
return cacheItem, nil
}
if s.metrics != nil {
s.metrics.IncrementMemCacheMissCounter("Channel By Name")
}
}
queryStr, args, err := query.ToSql()
if err != nil {
return nil, errors.Wrapf(err, "getByName_tosql")
}
if err = s.GetReplicaX().Get(&channel, queryStr, args...); err != nil {
channel := model.Channel{}
if err := s.GetReplicaX().Get(&channel, queryStr, args...); err != nil {
if err == sql.ErrNoRows {
return nil, store.NewErrNotFound("Channel", fmt.Sprintf("TeamId=%s&Name=%s", teamId, name))
}
return nil, errors.Wrapf(err, "failed to find channel with TeamId=%s and Name=%s", teamId, name)
}
err = channelByNameCache.SetWithExpiry(teamId+name, &channel, ChannelCacheDuration)
return &channel, err
return &channel, nil
}
func (s SqlChannelStore) GetDeletedByName(teamId string, name string) (*model.Channel, error) {
@ -2156,11 +2073,6 @@ func (s SqlChannelStore) GetMemberLastViewedAt(ctx context.Context, channelID st
}
func (s SqlChannelStore) InvalidateAllChannelMembersForUser(userId string) {
allChannelMembersForUserCache.Remove(userId)
allChannelMembersForUserCache.Remove(userId + "_deleted")
if s.metrics != nil {
s.metrics.IncrementMemCacheInvalidationCounter("All Channel Members for User - Remove by UserId")
}
}
func (s SqlChannelStore) GetMemberForPost(postId string, userId string, includeArchivedChannels bool) (*model.ChannelMember, error) {
@ -2214,24 +2126,6 @@ func (s SqlChannelStore) GetMemberForPost(postId string, userId string, includeA
}
func (s SqlChannelStore) GetAllChannelMembersForUser(userId string, allowFromCache bool, includeDeleted bool) (_ map[string]string, err error) {
cache_key := userId
if includeDeleted {
cache_key += "_deleted"
}
if allowFromCache {
ids := make(map[string]string)
if err = allChannelMembersForUserCache.Get(cache_key, &ids); err == nil {
if s.metrics != nil {
s.metrics.IncrementMemCacheHitCounter("All Channel Members for User")
}
return ids, nil
}
}
if s.metrics != nil {
s.metrics.IncrementMemCacheMissCounter("All Channel Members for User")
}
query := s.getQueryBuilder().
Select(`
ChannelMembers.ChannelId, ChannelMembers.Roles, ChannelMembers.SchemeGuest,
@ -2282,9 +2176,6 @@ func (s SqlChannelStore) GetAllChannelMembersForUser(userId string, allowFromCac
}
ids := data.ToMapStringString()
if allowFromCache {
allChannelMembersForUserCache.SetWithExpiry(cache_key, ids, AllChannelMembersForUserCacheDuration)
}
return ids, nil
}
@ -2334,10 +2225,6 @@ func (s SqlChannelStore) GetChannelsMemberCount(channelIDs []string) (_ map[stri
}
func (s SqlChannelStore) InvalidateCacheForChannelMembersNotifyProps(channelId string) {
allChannelMembersNotifyPropsForChannelCache.Remove(channelId)
if s.metrics != nil {
s.metrics.IncrementMemCacheInvalidationCounter("All Channel Members Notify Props for Channel - Remove by ChannelId")
}
}
type allChannelMemberNotifyProps struct {
@ -2346,20 +2233,6 @@ type allChannelMemberNotifyProps struct {
}
func (s SqlChannelStore) GetAllChannelMembersNotifyPropsForChannel(channelId string, allowFromCache bool) (map[string]model.StringMap, error) {
if allowFromCache {
var cacheItem map[string]model.StringMap
if err := allChannelMembersNotifyPropsForChannelCache.Get(channelId, &cacheItem); err == nil {
if s.metrics != nil {
s.metrics.IncrementMemCacheHitCounter("All Channel Members Notify Props for Channel")
}
return cacheItem, nil
}
}
if s.metrics != nil {
s.metrics.IncrementMemCacheMissCounter("All Channel Members Notify Props for Channel")
}
data := []allChannelMemberNotifyProps{}
err := s.GetReplicaX().Select(&data, `
SELECT UserId, NotifyProps
@ -2374,8 +2247,6 @@ func (s SqlChannelStore) GetAllChannelMembersNotifyPropsForChannel(channelId str
props[data[i].UserId] = data[i].NotifyProps
}
allChannelMembersNotifyPropsForChannelCache.SetWithExpiry(channelId, props, AllChannelMembersNotifyPropsForChannelCacheDuration)
return props, nil
}

View File

@ -451,6 +451,10 @@
"id": "api.channel.update_channel_member_roles.changing_guest_role.app_error",
"translation": "Invalid channel member update: You can't add or remove the guest role manually."
},
{
"id": "api.channel.update_channel_member_roles.guest.app_error",
"translation": "Invalid channel member update: A guest cannot be made team member or team admin, please promote as a user first."
},
{
"id": "api.channel.update_channel_member_roles.guest_and_user.app_error",
"translation": "Invalid channel member update: A user must be a guest or a user but not both."
@ -459,6 +463,10 @@
"id": "api.channel.update_channel_member_roles.scheme_role.app_error",
"translation": "The provided role is managed by a Scheme and therefore cannot be applied directly to a Channel Member."
},
{
"id": "api.channel.update_channel_member_roles.user_and_guest.app_error",
"translation": "Invalid channel member update: A guest cannot be set for a single channel, a System Admin must promote or demote users to/from guests."
},
{
"id": "api.channel.update_channel_privacy.default_channel_error",
"translation": "The default channel cannot be made private."

View File

@ -15,10 +15,11 @@ import (
// StoreService exposes the underlying database.
type StoreService struct {
initialized bool
api plugin.API
driver plugin.Driver
mutex sync.Mutex
initializedMaster bool
initializedReplica bool
api plugin.API
driver plugin.Driver
mutex sync.Mutex
masterDB *sql.DB
replicaDB *sql.DB
@ -31,7 +32,7 @@ func (s *StoreService) GetMasterDB() (*sql.DB, error) {
s.mutex.Lock()
defer s.mutex.Unlock()
if err := s.initialize(); err != nil {
if err := s.initializeMaster(); err != nil {
return nil, err
}
@ -46,7 +47,7 @@ func (s *StoreService) GetReplicaDB() (*sql.DB, error) {
s.mutex.Lock()
defer s.mutex.Unlock()
if err := s.initialize(); err != nil {
if err := s.initializeReplica(); err != nil {
return nil, err
}
@ -62,20 +63,18 @@ func (s *StoreService) Close() error {
s.mutex.Lock()
defer s.mutex.Unlock()
if !s.initialized {
return nil
}
if err := s.masterDB.Close(); err != nil {
return err
}
if s.replicaDB != nil {
if err := s.replicaDB.Close(); err != nil {
return err
}
}
if s.masterDB != nil {
if err := s.masterDB.Close(); err != nil {
return err
}
}
return nil
}
@ -84,8 +83,8 @@ func (s *StoreService) DriverName() string {
return *s.api.GetConfig().SqlSettings.DriverName
}
func (s *StoreService) initialize() error {
if s.initialized {
func (s *StoreService) initializeMaster() error {
if s.initializedMaster {
return nil
}
@ -93,25 +92,33 @@ func (s *StoreService) initialize() error {
return errors.New("no db driver was provided")
}
config := s.api.GetUnsanitizedConfig()
// Set up master db
db := sql.OpenDB(driver.NewConnector(s.driver, true))
db := sql.OpenDB(driver.NewConnector(s.driver, true /* IsMaster */))
if err := db.Ping(); err != nil {
return errors.Wrap(err, "failed to connect to master db")
}
s.masterDB = db
s.initializedMaster = true
return nil
}
func (s *StoreService) initializeReplica() error {
if s.initializedReplica {
return nil
}
config := s.api.GetUnsanitizedConfig()
// Set up replica db
if len(config.SqlSettings.DataSourceReplicas) > 0 {
db := sql.OpenDB(driver.NewConnector(s.driver, false))
db := sql.OpenDB(driver.NewConnector(s.driver, false /* IsMaster */))
if err := db.Ping(); err != nil {
return errors.Wrap(err, "failed to connect to replica db")
}
s.replicaDB = db
}
s.initialized = true
s.initializedReplica = true
return nil
}

View File

@ -12,18 +12,10 @@ import (
func TestStore(t *testing.T) {
t.Run("master db singleton", func(t *testing.T) {
config := &model.Config{
SqlSettings: model.SqlSettings{
DriverName: model.NewString("test"),
DataSource: model.NewString("TestStore-master-db"),
},
}
api := &plugintest.API{}
defer api.AssertExpectations(t)
api.On("GetUnsanitizedConfig").Return(config)
driver := &plugintest.Driver{}
defer driver.AssertExpectations(t)
driver.On("Conn", true).Return("test", nil)
driver.On("ConnPing", "test").Return(nil)
driver.On("ConnClose", "test").Return(nil)
@ -52,6 +44,7 @@ func TestStore(t *testing.T) {
}
driver := &plugintest.Driver{}
defer driver.AssertExpectations(t)
driver.On("Conn", true).Return("test", nil)
driver.On("ConnPing", "test").Return(nil)
driver.On("ConnClose", "test").Return(nil)
@ -88,7 +81,7 @@ func TestStore(t *testing.T) {
api.On("GetUnsanitizedConfig").Return(config)
driver := &plugintest.Driver{}
driver.On("Conn", true).Return("test", nil)
defer driver.AssertExpectations(t)
driver.On("Conn", false).Return("test", nil)
driver.On("ConnPing", "test").Return(nil)
driver.On("ConnClose", "test").Return(nil)

View File

@ -170,6 +170,15 @@
opacity: 1;
}
}
.shared-channel-icon {
width: 18px;
height: 18px;
margin-right: 6px;
color: rgba(61, 60, 64, 0.75);
font-size: 18px;
line-height: 18px;
}
}
.more-modal__actions {

View File

@ -15,6 +15,7 @@ import MagnifyingGlassSVG from 'components/common/svg_images_components/magnifyi
import LoadingScreen from 'components/loading_screen';
import * as Menu from 'components/menu';
import QuickInput from 'components/quick_input';
import SharedChannelIndicator from 'components/shared_channel_indicator';
import CheckboxCheckedIcon from 'components/widgets/icons/checkbox_checked_icon';
import LoadingWrapper from 'components/widgets/loading/loading_wrapper';
@ -129,6 +130,14 @@ export class SearchableChannelList extends React.PureComponent<Props, State> {
if (isArchivedChannel(channel)) {
channelTypeIcon = <ArchiveOutlineIcon size={18}/>;
} else if (channel.shared) {
channelTypeIcon = (
<SharedChannelIndicator
className='shared-channel-icon'
channelType={channel.type}
withTooltip={true}
/>
);
} else if (isPrivateChannel(channel)) {
channelTypeIcon = <LockOutlineIcon size={18}/>;
} else {

View File

@ -50,7 +50,7 @@
display: inline-flex;
padding: 5px 0;
line-height: 22px;
white-space: normal;
white-space: nowrap;
}
span.MenuItem__text-color {
@ -63,7 +63,7 @@
font-size: 12px;
line-height: 16px;
opacity: 0.7;
white-space: normal;
white-space: nowrap;
@media (max-width: 768px) {
padding-left: 28px !important;