Merge branch 'master' into MM-51858-fix-workspace-deletion-telemetry

This commit is contained in:
Mattermost Build 2023-04-27 17:36:11 +03:00 committed by GitHub
commit a599a5feab
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
149 changed files with 5627 additions and 1204 deletions

View File

@ -4,15 +4,12 @@ on:
workflows: ["Mattermost Build"]
types:
- completed
jobs:
upload-s3:
name: cd/Upload artifacts to S3
runs-on: ubuntu-22.04
env:
REPO_NAME: ${{ github.event.repository.name }}
if: >
github.event.workflow_run.event == 'pull_request' &&
github.event.workflow_run.conclusion == 'success'
if: github.event.workflow_run.event == 'pull_request' && github.event.workflow_run.conclusion == 'success'
steps:
- name: cd/Configure AWS
uses: aws-actions/configure-aws-credentials@07c2f971bac433df982ccc261983ae443861db49 # v1-node16
@ -28,48 +25,46 @@ jobs:
workflow_conclusion: success
name: server-dist-artifact
path: server/dist
# Get Branch name from calling workflow
# Search for the string "pull" and replace it with "PR" in branch-name
- name: cd/Get branch name
run: echo "BRANCH_NAME=$(echo ${{ github.event.workflow_run.head_branch }} | sed 's/^pull\//PR-/g')" >> $GITHUB_ENV
- name: cd/Upload artifacts to S3
env:
REPO_NAME: ${{ github.event.repository.name }}
COMMIT_SHA: ${{ github.event.workflow_run.head_sha }}
run: |
aws s3 cp server/dist/ s3://pr-builds.mattermost.com/$REPO_NAME/$BRANCH_NAME/ --acl public-read --cache-control "no-cache" --recursive --no-progress
aws s3 cp server/dist/ s3://pr-builds.mattermost.com/$REPO_NAME/commit/${{ github.sha }}/ --acl public-read --cache-control "no-cache" --recursive --no-progress
aws s3 cp server/dist/ s3://pr-builds.mattermost.com/$REPO_NAME/commit/$COMMIT_SHA/ --acl public-read --cache-control "no-cache" --recursive --no-progress
build-docker:
name: cd/Build and push docker image
needs: upload-s3
env:
REPO_NAME: ${{ github.event.repository.name }}
runs-on: ubuntu-22.04
if: >
github.event.workflow_run.event == 'pull_request' &&
github.event.workflow_run.conclusion == 'success'
if: github.event.workflow_run.event == 'pull_request' && github.event.workflow_run.conclusion == 'success'
steps:
- name: cd/Login to Docker Hub
uses: docker/login-action@3da7dc6e2b31f99ef2cb9fb4c50fb0971e0d0139 # v2.1.0
with:
username: ${{ secrets.DOCKERHUB_DEV_USERNAME }}
password: ${{ secrets.DOCKERHUB_DEV_TOKEN }}
- name: cd/Download artifacts
uses: dawidd6/action-download-artifact@0c49384d39ceb023b8040f480a25596fd6cf441b # v2.26.0
with:
workflow: ${{ github.event.workflow_run.workflow_id }}
run_id: ${{ github.event.workflow_run.id }}
workflow_conclusion: success
name: server-build-artifact
path: server/build/
- name: cd/Setup Docker Buildx
uses: docker/setup-buildx-action@11e8a2e2910826a92412015c515187a2d6750279 # v2.4
- name: cd/Docker build and push
env:
DOCKER_CLI_EXPERIMENTAL: enabled
run: |
export TAG=$(echo "${{ github.event.pull_request.head.sha || github.sha }}" | cut -c1-7)
cd server/build
export DOCKER_CLI_EXPERIMENTAL=enabled
export MM_PACKAGE=https://pr-builds.mattermost.com/$REPO_NAME/commit/${{ github.sha }}/mattermost-team-linux-amd64.tar.gz
docker buildx build --push --build-arg MM_PACKAGE=$MM_PACKAGE -t mattermostdevelopment/mm-te-test:${TAG} .
- name: cd/Login to Docker Hub
uses: docker/login-action@3da7dc6e2b31f99ef2cb9fb4c50fb0971e0d0139 # v2.1.0
with:
username: ${{ secrets.DOCKERHUB_DEV_USERNAME }}
password: ${{ secrets.DOCKERHUB_DEV_TOKEN }}
- name: cd/Download artifacts
uses: dawidd6/action-download-artifact@0c49384d39ceb023b8040f480a25596fd6cf441b # v2.26.0
with:
workflow: ${{ github.event.workflow_run.workflow_id }}
run_id: ${{ github.event.workflow_run.id }}
workflow_conclusion: success
name: server-build-artifact
path: server/build/
- name: cd/Setup Docker Buildx
uses: docker/setup-buildx-action@11e8a2e2910826a92412015c515187a2d6750279 # v2.4
- name: cd/Docker build and push
env:
DOCKER_CLI_EXPERIMENTAL: enabled
REPO_NAME: ${{ github.event.repository.name }}
COMMIT_SHA: ${{ github.event.workflow_run.head_sha }}
run: |
export TAG=$(echo "${{ github.event.pull_request.head.sha || github.event.workflow_run.head_sha }}" | cut -c1-7)
cd server/build
export DOCKER_CLI_EXPERIMENTAL=enabled
export MM_PACKAGE=https://pr-builds.mattermost.com/$REPO_NAME/commit/$COMMIT_SHA/mattermost-team-linux-amd64.tar.gz
docker buildx build --push --build-arg MM_PACKAGE=$MM_PACKAGE -t mattermostdevelopment/mm-te-test:${TAG} .
# Temporary uploading also to mattermost/mm-te-test:${TAG} except mattermostdevelopment/mm-te-test:${TAG}
# Context: https://community.mattermost.com/private-core/pl/3jzzxzfiji8hx833ewyuthzkjh
build-docker-temp:
@ -77,40 +72,41 @@ jobs:
needs: upload-s3
env:
REPO_NAME: ${{ github.event.repository.name }}
runs-on: ubuntu-22.04
if: >
github.event.workflow_run.event == 'pull_request' &&
github.event.workflow_run.conclusion == 'success'
if: github.event.workflow_run.event == 'pull_request' && github.event.workflow_run.conclusion == 'success'
steps:
- name: cd/Login to Docker Hub
uses: docker/login-action@3da7dc6e2b31f99ef2cb9fb4c50fb0971e0d0139 # v2.1.0
with:
username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
- name: cd/Download artifacts
uses: dawidd6/action-download-artifact@0c49384d39ceb023b8040f480a25596fd6cf441b # v2.26.0
with:
workflow: ${{ github.event.workflow_run.workflow_id }}
run_id: ${{ github.event.workflow_run.id }}
workflow_conclusion: success
name: server-build-artifact
path: server/build/
- name: cd/Setup Docker Buildx
uses: docker/setup-buildx-action@11e8a2e2910826a92412015c515187a2d6750279 # v2.4
- name: cd/Docker build and push
env:
DOCKER_CLI_EXPERIMENTAL: enabled
run: |
export TAG=$(echo "${{ github.event.pull_request.head.sha || github.sha }}" | cut -c1-7)
cd server/build
export DOCKER_CLI_EXPERIMENTAL=enabled
export MM_PACKAGE=https://pr-builds.mattermost.com/$REPO_NAME/commit/${{ github.sha }}/mattermost-team-linux-amd64.tar.gz
docker buildx build --push --build-arg MM_PACKAGE=$MM_PACKAGE -t mattermost/mm-te-test:${TAG} .
- name: cd/Login to Docker Hub
uses: docker/login-action@3da7dc6e2b31f99ef2cb9fb4c50fb0971e0d0139 # v2.1.0
with:
username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
- name: cd/Download artifacts
uses: dawidd6/action-download-artifact@0c49384d39ceb023b8040f480a25596fd6cf441b # v2.26.0
with:
workflow: ${{ github.event.workflow_run.workflow_id }}
run_id: ${{ github.event.workflow_run.id }}
workflow_conclusion: success
name: server-build-artifact
path: server/build/
- name: cd/Setup Docker Buildx
uses: docker/setup-buildx-action@11e8a2e2910826a92412015c515187a2d6750279 # v2.4
- name: cd/Docker build and push
env:
DOCKER_CLI_EXPERIMENTAL: enabled
REPO_NAME: ${{ github.event.repository.name }}
COMMIT_SHA: ${{ github.event.workflow_run.head_sha }}
run: |
export TAG=$(echo "${{ github.event.pull_request.head.sha || github.event.workflow_run.head_sha }}" | cut -c1-7)
cd server/build
export DOCKER_CLI_EXPERIMENTAL=enabled
export MM_PACKAGE=https://pr-builds.mattermost.com/$REPO_NAME/commit/$COMMIT_SHA/mattermost-team-linux-amd64.tar.gz
docker buildx build --push --build-arg MM_PACKAGE=$MM_PACKAGE -t mattermost/mm-te-test:${TAG} .
sentry:
name: Send build info to sentry
if: >
github.event.workflow_run.event == 'pull_request' &&
github.event.workflow_run.conclusion == 'success'
github.event.workflow_run.event == 'push'
runs-on: ubuntu-22.04
env:
SENTRY_AUTH_TOKEN: ${{ secrets.MM_SERVER_SENTRY_AUTH_TOKEN }}

View File

@ -66,7 +66,7 @@ jobs:
run: if [[ -n $(git status --porcelain) ]]; then echo "Please update the serialized files using 'make gen-serialized'"; exit 1; fi
check-mattermost-vet:
name: Check style
runs-on: ubuntu-latest-8-cores
runs-on: ubuntu-22.04
defaults:
run:
working-directory: server

159
.github/workflows/esrupgrade-common.yml vendored Normal file
View File

@ -0,0 +1,159 @@
name: ESR Upgrade
on:
workflow_call:
inputs:
db-dump-url:
required: true
type: string
initial-version:
required: true
type: string
final-version:
required: true
type: string
env:
COMPOSE_PROJECT_NAME: ghactions
BUILD_IMAGE: mattermost/mattermost-enterprise-edition:${{ inputs.final-version }}
MYSQL_CONN_ARGS: -h localhost -P 3306 --protocol=tcp -ummuser -pmostest mattermost_test
DUMP_SERVER_NAME: esr.${{ inputs.initial-version }}-${{ inputs.final-version }}.dump.server.sql
DUMP_SCRIPT_NAME: esr.${{ inputs.initial-version }}-${{ inputs.final-version }}.dump.script.sql
MIGRATION_SCRIPT: esr.${{ inputs.initial-version }}-${{ inputs.final-version }}.mysql.up.sql
CLEANUP_SCRIPT: esr.${{ inputs.initial-version }}-${{ inputs.final-version }}.mysql.cleanup.sql
PREPROCESS_SCRIPT: esr.common.mysql.preprocess.sql
DIFF_NAME: esr.${{ inputs.initial-version }}-${{ inputs.final-version }}.diff
jobs:
esr-upgrade-server:
runs-on: ubuntu-latest-8-cores
timeout-minutes: 30
steps:
- name: Checkout mattermost-server
uses: actions/checkout@ac593985615ec2ede58e132d2e21d2b1cbd6127c # v3.3.0
- name: Run docker compose
run: |
cd server/build
docker-compose --no-ansi run --rm start_dependencies
cat ../tests/test-data.ldif | docker-compose --no-ansi exec -T openldap bash -c 'ldapadd -x -D "cn=admin,dc=mm,dc=test,dc=com" -w mostest';
docker-compose --no-ansi exec -T minio sh -c 'mkdir -p /data/mattermost-test';
docker-compose --no-ansi ps
- name: Wait for docker compose
run: |
until docker network inspect ghactions_mm-test; do echo "Waiting for Docker Compose Network..."; sleep 1; done;
docker run --net ghactions_mm-test appropriate/curl:latest sh -c "until curl --max-time 5 --output - http://mysql:3306; do echo waiting for mysql; sleep 5; done;"
docker run --net ghactions_mm-test appropriate/curl:latest sh -c "until curl --max-time 5 --output - http://elasticsearch:9200; do echo waiting for elasticsearch; sleep 5; done;"
- name: Initialize the database with the source DB dump
run: |
curl ${{ inputs.db-dump-url }} | zcat | docker exec -i ghactions_mysql_1 mysql -AN $MYSQL_CONN_ARGS
- name: Common preprocessing of the DB dump
run: |
cd server/scripts/esrupgrades
docker exec -i ghactions_mysql_1 mysql -AN $MYSQL_CONN_ARGS < $PREPROCESS_SCRIPT
- name: Pull EE image
run: |
docker pull $BUILD_IMAGE
- name: Run migration through server
run: |
mkdir -p client/plugins
cd server/build
# Run the server in the background to trigger the migrations
docker run --name mmserver \
--net ghactions_mm-test \
--ulimit nofile=8096:8096 \
--env-file=dotenv/test.env \
--env MM_SQLSETTINGS_DRIVERNAME="mysql" \
--env MM_SQLSETTINGS_DATASOURCE="mmuser:mostest@tcp(mysql:3306)/mattermost_test?charset=utf8mb4,utf8&multiStatements=true" \
-v ~/work/mattermost-server:/mattermost-server \
-w /mattermost-server/mattermost-server \
$BUILD_IMAGE &
# In parallel, wait for the migrations to finish.
# To verify this, we check that the server has finished the startup job through the log line "Server is listening on"
until docker logs mmserver | grep "Server is listening on"; do\
echo "Waiting for migrations to finish..."; \
sleep 1; \
done;
# Make sure to stop the server. Also, redirect output to null;
# otherwise, the name of the container gets written to the console, which is weird
docker stop mmserver > /dev/null
- name: Cleanup DB
run : |
cd server/scripts/esrupgrades
docker exec -i ghactions_mysql_1 mysql -AN $MYSQL_CONN_ARGS < $CLEANUP_SCRIPT
- name: Dump upgraded database
run: |
# Use --skip-opt to have each INSERT into one line.
# Use --set-gtid-purged=OFF to suppress GTID-related statements.
docker exec -i ghactions_mysql_1 mysqldump \
--skip-opt --set-gtid-purged=OFF \
$MYSQL_CONN_ARGS > $DUMP_SERVER_NAME
- name: Cleanup dump and compress
run: |
# We skip the very last line, which simply contains the date of the dump
head -n -1 ${DUMP_SERVER_NAME} | gzip > ${DUMP_SERVER_NAME}.gz
- name: Upload dump
uses: actions/upload-artifact@v3
with:
name: upgraded-dump-server
path: ${{ env.DUMP_SERVER_NAME }}.gz
esr-upgrade-script:
runs-on: ubuntu-latest-8-cores
timeout-minutes: 30
steps:
- name: Checkout mattermost-server
uses: actions/checkout@ac593985615ec2ede58e132d2e21d2b1cbd6127c # v3.3.0
- name: Run docker compose
run: |
cd server/build
docker-compose --no-ansi run --rm start_dependencies
cat ../tests/test-data.ldif | docker-compose --no-ansi exec -T openldap bash -c 'ldapadd -x -D "cn=admin,dc=mm,dc=test,dc=com" -w mostest';
docker-compose --no-ansi exec -T minio sh -c 'mkdir -p /data/mattermost-test';
docker-compose --no-ansi ps
- name: Wait for docker compose
run: |
until docker network inspect ghactions_mm-test; do echo "Waiting for Docker Compose Network..."; sleep 1; done;
docker run --net ghactions_mm-test appropriate/curl:latest sh -c "until curl --max-time 5 --output - http://mysql:3306; do echo waiting for mysql; sleep 5; done;"
docker run --net ghactions_mm-test appropriate/curl:latest sh -c "until curl --max-time 5 --output - http://elasticsearch:9200; do echo waiting for elasticsearch; sleep 5; done;"
- name: Initialize the database with the source DB dump
run: |
curl ${{ inputs.db-dump-url }} | zcat | docker exec -i ghactions_mysql_1 mysql -AN $MYSQL_CONN_ARGS
- name: Preprocess the DB dump
run: |
cd server/scripts/esrupgrades
docker exec -i ghactions_mysql_1 mysql -AN $MYSQL_CONN_ARGS < $PREPROCESS_SCRIPT
- name: Run migration through script
run : |
cd server/scripts/esrupgrades
docker exec -i ghactions_mysql_1 mysql -AN $MYSQL_CONN_ARGS < $MIGRATION_SCRIPT
- name: Cleanup DB
run : |
cd server/scripts/esrupgrades
docker exec -i ghactions_mysql_1 mysql -AN $MYSQL_CONN_ARGS < $CLEANUP_SCRIPT
- name: Dump upgraded database
run: |
docker exec -i ghactions_mysql_1 mysqldump --skip-opt --set-gtid-purged=OFF $MYSQL_CONN_ARGS > $DUMP_SCRIPT_NAME
- name: Cleanup dump and compress
run: |
# We skip the very last line, which simply contains the date of the dump
head -n -1 ${DUMP_SCRIPT_NAME} | gzip > ${DUMP_SCRIPT_NAME}.gz
- name: Upload dump
uses: actions/upload-artifact@v3
with:
name: upgraded-dump-script
path: ${{ env.DUMP_SCRIPT_NAME }}.gz
esr-upgrade-diff:
runs-on: ubuntu-latest-8-cores
needs:
- esr-upgrade-server
- esr-upgrade-script
steps:
- name: Retrieve dumps
uses: actions/download-artifact@v3
- name: Diff dumps
run: |
gzip -d upgraded-dump-server/${DUMP_SERVER_NAME}.gz
gzip -d upgraded-dump-script/${DUMP_SCRIPT_NAME}.gz
diff upgraded-dump-server/$DUMP_SERVER_NAME upgraded-dump-script/$DUMP_SCRIPT_NAME > $DIFF_NAME
- name: Upload diff
if: failure() # Upload the diff only if the previous step failed; i.e., if the diff is non-empty
uses: actions/upload-artifact@v3
with:
name: dumps-diff
path: ${{ env.DIFF_NAME }}

33
.github/workflows/esrupgrade.yml vendored Normal file
View File

@ -0,0 +1,33 @@
name: ESR Upgrade
on:
pull_request:
paths:
- 'server/scripts/esrupgrades/*'
- '.github/workflows/esr*'
push:
branches:
- master
- cloud
- release-*
jobs:
esr-upgrade-5_37-7_8:
name: Run ESR upgrade script from 5.37 to 7.8
uses: ./.github/workflows/esrupgrade-common.yml
with:
db-dump-url: https://lt-public-data.s3.amazonaws.com/47K_537_mysql_collationfixed.sql.gz
initial-version: 5.37
final-version: 7.8
esr-upgrade-5_37-6_3:
name: Run ESR upgrade script from 5.37 to 6.3
uses: ./.github/workflows/esrupgrade-common.yml
with:
db-dump-url: https://lt-public-data.s3.amazonaws.com/47K_537_mysql_collationfixed.sql.gz
initial-version: 5.37
final-version: 6.3
esr-upgrade-6_3-7_8:
name: Run ESR upgrade script from 6.3 to 7.8
uses: ./.github/workflows/esrupgrade-common.yml
with:
db-dump-url: https://lt-public-data.s3.amazonaws.com/47K_63_mysql.sql.gz
initial-version: 6.3
final-version: 7.8

View File

@ -12,7 +12,6 @@
"@babel/eslint-parser": "7.19.1",
"@babel/eslint-plugin": "7.19.1",
"@cypress/request": "2.88.11",
"@cypress/skip-test": "2.6.1",
"@mattermost/types": "7.4.0",
"@testing-library/cypress": "9.0.0",
"@types/async": "3.2.16",
@ -2250,12 +2249,6 @@
"uuid": "dist/bin/uuid"
}
},
"node_modules/@cypress/skip-test": {
"version": "2.6.1",
"resolved": "https://registry.npmjs.org/@cypress/skip-test/-/skip-test-2.6.1.tgz",
"integrity": "sha512-X+ibefBiuOmC5gKG91wRIT0/OqXeETYvu7zXktjZ3yLeO186Y8ia0K7/gQUpAwuUi28DuqMd1+7tBQVtPkzbPA==",
"dev": true
},
"node_modules/@cypress/xvfb": {
"version": "1.2.4",
"resolved": "https://registry.npmjs.org/@cypress/xvfb/-/xvfb-1.2.4.tgz",
@ -19062,12 +19055,6 @@
}
}
},
"@cypress/skip-test": {
"version": "2.6.1",
"resolved": "https://registry.npmjs.org/@cypress/skip-test/-/skip-test-2.6.1.tgz",
"integrity": "sha512-X+ibefBiuOmC5gKG91wRIT0/OqXeETYvu7zXktjZ3yLeO186Y8ia0K7/gQUpAwuUi28DuqMd1+7tBQVtPkzbPA==",
"dev": true
},
"@cypress/xvfb": {
"version": "1.2.4",
"resolved": "https://registry.npmjs.org/@cypress/xvfb/-/xvfb-1.2.4.tgz",

View File

@ -3,7 +3,6 @@
"@babel/eslint-parser": "7.19.1",
"@babel/eslint-plugin": "7.19.1",
"@cypress/request": "2.88.11",
"@cypress/skip-test": "2.6.1",
"@mattermost/types": "7.4.0",
"@testing-library/cypress": "9.0.0",
"@types/async": "3.2.16",

View File

@ -18,7 +18,6 @@ describe('Verify Accessibility Support in Post', () => {
let otherUser;
let testTeam;
let testChannel;
let emojiPickerEnabled;
before(() => {
cy.apiInitSetup().then(({team, channel, user}) => {
@ -33,10 +32,6 @@ describe('Verify Accessibility Support in Post', () => {
cy.apiAddUserToChannel(testChannel.id, otherUser.id);
});
});
cy.apiGetConfig().then(({config}) => {
emojiPickerEnabled = config.ServiceSettings.EnableEmojiPicker;
});
});
});
@ -179,17 +174,10 @@ describe('Verify Accessibility Support in Post', () => {
cy.get(`#CENTER_time_${postId}`).should('be.focused');
cy.focused().tab();
// eslint-disable-next-line no-negated-condition
if (!emojiPickerEnabled) {
// * Verify focus is on the actions button
cy.get(`#CENTER_button_${postId}`).should('be.focused').and('have.attr', 'aria-label', 'more');
for (let i = 0; i < 3; i++) {
// * Verify focus is on the reactions button
cy.get(`#recent_reaction_${i}`).should('have.class', 'emoticon--post-menu').and('have.attr', 'aria-label');
cy.focused().tab();
} else {
for (let i = 0; i < 3; i++) {
// * Verify focus is on the reactions button
cy.get(`#recent_reaction_${i}`).should('have.class', 'emoticon--post-menu').and('have.attr', 'aria-label');
cy.focused().tab();
}
}
// * Verify focus is on the reactions button
@ -200,15 +188,17 @@ describe('Verify Accessibility Support in Post', () => {
cy.get(`#CENTER_flagIcon_${postId}`).should('be.focused').and('have.attr', 'aria-label', 'save');
cy.focused().tab();
// * Verify focus is on message actions button
cy.get(`#CENTER_actions_button_${postId}`).should('be.focused').and('have.attr', 'aria-label', 'actions');
cy.focused().tab();
// * Verify focus is on the comment button
cy.get(`#CENTER_commentIcon_${postId}`).should('be.focused').and('have.attr', 'aria-label', 'reply');
cy.focused().tab();
if (emojiPickerEnabled) {
// * Verify focus is on the more button
cy.get(`#CENTER_button_${postId}`).should('be.focused').and('have.attr', 'aria-label', 'More');
cy.focused().tab();
}
// * Verify focus is on the more button
cy.get(`#CENTER_button_${postId}`).should('be.focused').and('have.attr', 'aria-label', 'more');
cy.focused().tab();
// * Verify focus is on the post text
cy.get(`#postMessageText_${postId}`).should('be.focused').and('have.attr', 'aria-readonly', 'true');
@ -244,11 +234,13 @@ describe('Verify Accessibility Support in Post', () => {
cy.get(`#rhsPostMessageText_${postId}`).should('be.focused').and('have.attr', 'aria-readonly', 'true');
cy.focused().tab({shift: true});
if (emojiPickerEnabled) {
// * Verify focus is on the actions button
cy.get(`#RHS_COMMENT_button_${postId}`).should('be.focused').and('have.attr', 'aria-label', 'More');
cy.focused().tab({shift: true});
}
// * Verify focus is on the more button
cy.get(`#RHS_COMMENT_button_${postId}`).should('be.focused').and('have.attr', 'aria-label', 'more');
cy.focused().tab({shift: true});
// * Verify focus is on message actions button
cy.get(`#RHS_COMMENT_actions_button_${postId}`).should('be.focused').and('have.attr', 'aria-label', 'actions');
cy.focused().tab({shift: true});
// * Verify focus is on the save icon
cy.get(`#RHS_COMMENT_flagIcon_${postId}`).should('be.focused').and('have.attr', 'aria-label', 'save');
@ -258,15 +250,9 @@ describe('Verify Accessibility Support in Post', () => {
cy.get(`#RHS_COMMENT_reaction_${postId}`).should('be.focused').and('have.attr', 'aria-label', 'add reaction');
cy.focused().tab({shift: true});
// eslint-disable-next-line no-negated-condition
if (!emojiPickerEnabled) {
// * Verify focus is on the actions button
cy.get(`#RHS_COMMENT_button_${postId}`).should('be.focused').and('have.attr', 'aria-label', 'more');
cy.focused().tab({shift: true});
} else {
cy.get('#recent_reaction_0').should('have.class', 'emoticon--post-menu').and('have.attr', 'aria-label');
cy.focused().tab({shift: true});
}
// * Verify focus is on most recent action
cy.get('#recent_reaction_0').should('have.class', 'emoticon--post-menu').and('have.attr', 'aria-label');
cy.focused().tab({shift: true});
// * Verify focus is on the time
cy.get(`#RHS_COMMENT_time_${postId}`).should('be.focused');

View File

@ -15,7 +15,10 @@ import {getRandomId} from '../../../utils';
describe('Leave an archived channel', () => {
let testTeam;
let offTopicUrl;
const channelType = {
public: 'Channel Type: Public',
archived: 'Channel Type: Archived',
};
before(() => {
cy.apiUpdateConfig({
TeamSettings: {
@ -97,7 +100,7 @@ describe('Leave an archived channel', () => {
// # More channels modal opens
cy.get('#moreChannelsModal').should('be.visible').within(() => {
// # Click on dropdown
cy.findByText('Show: Public Channels').should('be.visible').click();
cy.findByText(channelType.public).should('be.visible').click();
// # Click archived channels
cy.findByText('Archived Channels').click();
@ -145,7 +148,7 @@ describe('Leave an archived channel', () => {
// # More channels modal opens
cy.get('.more-modal').should('be.visible').within(() => {
// # Public channel list opens by default
cy.findByText('Show: Public Channels').should('be.visible').click();
cy.findByText(channelType.public).should('be.visible').click();
// # Click on archived channels
cy.findByText('Archived Channels').click();
@ -198,7 +201,7 @@ describe('Leave an archived channel', () => {
// # More channels modal opens
cy.get('.more-modal').should('be.visible').within(() => {
// # Public channels are shown by default
cy.findByText('Show: Public Channels').should('be.visible').click();
cy.findByText(channelType.public).should('be.visible').click();
// # Go to archived channels
cy.findByText('Archived Channels').click();
@ -252,7 +255,7 @@ describe('Leave an archived channel', () => {
// # More channels modal opens
cy.get('.more-modal').should('be.visible').within(() => {
// # Show public channels is visible by default
cy.findByText('Show: Public Channels').should('be.visible').click();
cy.findByText(channelType.public).should('be.visible').click();
// # Go to archived channels
cy.findByText('Archived Channels').click();
@ -286,7 +289,7 @@ describe('Leave an archived channel', () => {
// # More channels modal opens and lands on public channels
cy.get('#moreChannelsModal').should('be.visible').within(() => {
cy.findByText('Show: Public Channels').should('be.visible').click();
cy.findByText(channelType.public).should('be.visible').click();
// # Go to archived channels
cy.findByText('Archived Channels').click();

View File

@ -14,6 +14,11 @@ import * as TIMEOUTS from '../../../fixtures/timeouts';
import {createPrivateChannel} from '../enterprise/elasticsearch_autocomplete/helpers';
const channelType = {
public: 'Channel Type: Public',
archived: 'Channel Type: Archived',
};
describe('Channels', () => {
let testUser;
let otherUser;
@ -65,7 +70,7 @@ describe('Channels', () => {
cy.get('#moreChannelsModal').should('be.visible').within(() => {
// * Dropdown should be visible, defaulting to "Public Channels"
cy.get('#channelsMoreDropdown').should('be.visible').and('contain', 'Show: Public Channels').wait(TIMEOUTS.HALF_SEC);
cy.get('#channelsMoreDropdown').should('be.visible').and('contain', channelType.public).wait(TIMEOUTS.HALF_SEC);
cy.get('#searchChannelsTextbox').should('be.visible').type(testChannel.display_name).wait(TIMEOUTS.HALF_SEC);
cy.get('#moreChannelsList').should('be.visible').children().should('have.length', 1).within(() => {
@ -113,7 +118,7 @@ describe('Channels', () => {
cy.findByText('Archived Channels').should('be.visible').click();
// * Channel test should be visible as an archived channel in the list
cy.wrap(el).should('contain', 'Show: Archived Channels');
cy.wrap(el).should('contain', channelType.archived);
});
cy.get('#searchChannelsTextbox').should('be.visible').type(testChannel.display_name).wait(TIMEOUTS.HALF_SEC);
@ -196,7 +201,7 @@ describe('Channels', () => {
// * Dropdown should be visible, defaulting to "Public Channels"
cy.get('#channelsMoreDropdown').should('be.visible').within((el) => {
cy.wrap(el).should('contain', 'Show: Public Channels');
cy.wrap(el).should('contain', channelType.public);
});
// * Users should be able to type and search
@ -207,12 +212,12 @@ describe('Channels', () => {
cy.get('#moreChannelsModal').should('be.visible').within(() => {
// * Users should be able to switch to "Archived Channels" list
cy.get('#channelsMoreDropdown').should('be.visible').and('contain', 'Show: Public Channels').click().within((el) => {
cy.get('#channelsMoreDropdown').should('be.visible').and('contain', channelType.public).click().within((el) => {
// # Click on archived channels item
cy.findByText('Archived Channels').should('be.visible').click();
// * Modal should show the archived channels list
cy.wrap(el).should('contain', 'Show: Archived Channels');
cy.wrap(el).should('contain', channelType.archived);
}).wait(TIMEOUTS.HALF_SEC);
cy.get('#searchChannelsTextbox').clear();
cy.get('#moreChannelsList').should('be.visible').children().should('have.length', 2);
@ -250,7 +255,7 @@ function verifyMoreChannelsModal(isEnabled) {
// * Verify that the more channels modal is open and with or without option to view archived channels
cy.get('#moreChannelsModal').should('be.visible').within(() => {
if (isEnabled) {
cy.get('#channelsMoreDropdown').should('be.visible').and('have.text', 'Show: Public Channels');
cy.get('#channelsMoreDropdown').should('be.visible').and('have.text', channelType.public);
} else {
cy.get('#channelsMoreDropdown').should('not.exist');
}

View File

@ -142,7 +142,7 @@ describe('System Console - Subscriptions section', () => {
cy.get('.RHS').find('button').should('be.enabled');
// # Change the user seats field to a value smaller than the current number of users
const lessThanUserCount = count - 5;
const lessThanUserCount = 1;
cy.get('#input_UserSeats').clear().type(lessThanUserCount);
// * Ensure that the yearly, monthly, and yearly saving prices match the new user seats value entered

View File

@ -0,0 +1,48 @@
// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved.
// See LICENSE.txt for license information.
// ***************************************************************
// - [#] indicates a test step (e.g. # Go to a page)
// - [*] indicates an assertion (e.g. * Check the title)
// - Use element ID when selecting an element. Create one if none.
// ***************************************************************
// Stage: @prod
describe('Insights', () => {
let teamA;
before(() => {
cy.shouldHaveFeatureFlag('InsightsEnabled', true);
cy.apiInitSetup().then(({team}) => {
teamA = team;
});
});
it('Check all the cards exist', () => {
cy.apiAdminLogin();
// # Go to the Insights view
cy.visit(`/${teamA.name}/activity-and-insights`);
// * Check top channels exists
cy.get('.top-channels-card').should('exist');
// * Check top threads exists
cy.get('.top-threads-card').should('exist');
// * Check top boards exists because product mode is enabled
cy.get('.top-boards-card').should('exist');
// * Check top reactions exists
cy.get('.top-reactions-card').should('exist');
// * Check top dms exists
cy.get('.top-dms-card').should('exist');
// * Check least active channels exists
cy.get('.least-active-channels-card').should('exist');
// * Check top playbooks exists because product mode is enabled
cy.get('.top-playbooks-card').should('exist');
});
});

View File

@ -9,71 +9,45 @@
// Stage: @prod
// Group: @playbooks
import {onlyOn} from '@cypress/skip-test';
describe('channels > App Bar', {testIsolation: true}, () => {
let testTeam;
let testUser;
let testPlaybook;
let appBarEnabled;
before(() => {
cy.apiInitSetup().then(({team, user}) => {
testTeam = team;
testUser = user;
// # Login as testUser
cy.apiLogin(testUser);
// # Create a playbook
cy.apiCreateTestPlaybook({
teamId: testTeam.id,
title: 'Playbook',
userId: testUser.id,
}).then((playbook) => {
testPlaybook = playbook;
// # Start a playbook run
cy.apiRunPlaybook({
teamId: testTeam.id,
playbookId: testPlaybook.id,
playbookRunName: 'Playbook Run',
ownerUserId: testUser.id,
});
});
cy.apiGetConfig(true).then(({config}) => {
appBarEnabled = config.EnableAppBar === 'true';
});
});
});
beforeEach(() => {
// # Size the viewport to show the RHS without covering posts.
cy.viewport('macbook-13');
// # Login as testUser
cy.apiLogin(testUser);
cy.apiAdminLogin();
});
describe('App Bar disabled', () => {
it('should not show the Playbook App Bar icon', () => {
onlyOn(!appBarEnabled);
cy.apiUpdateConfig({ExperimentalSettings: {EnableAppBar: false}});
// # Login as testUser
cy.apiLogin(testUser);
// # Navigate directly to a non-playbook run channel
cy.visit(`/${testTeam.name}/channels/town-square`);
// * Verify App Bar icon is not showing
cy.get('#channel_view').within(() => {
cy.getPlaybooksAppBarIcon().should('not.exist');
});
cy.get('.app-bar').should('not.exist');
});
});
describe('App Bar enabled', () => {
it('should show the Playbook App Bar icon', () => {
onlyOn(appBarEnabled);
beforeEach(() => {
cy.apiUpdateConfig({ExperimentalSettings: {EnableAppBar: true}});
// # Login as testUser
cy.apiLogin(testUser);
});
it('should show the Playbook App Bar icon', () => {
// # Navigate directly to a non-playbook run channel
cy.visit(`/${testTeam.name}/channels/town-square`);
@ -82,8 +56,6 @@ describe('channels > App Bar', {testIsolation: true}, () => {
});
it('should show "Playbooks" tooltip for Playbook App Bar icon', () => {
onlyOn(appBarEnabled);
// # Navigate directly to a non-playbook run channel
cy.visit(`/${testTeam.name}/channels/town-square`);

View File

@ -9,14 +9,11 @@
// Stage: @prod
// Group: @playbooks
import {onlyOn} from '@cypress/skip-test';
describe('channels > channel header', {testIsolation: true}, () => {
let testTeam;
let testUser;
let testPlaybook;
let testPlaybookRun;
let appBarEnabled;
before(() => {
cy.apiInitSetup().then(({team, user}) => {
@ -44,24 +41,16 @@ describe('channels > channel header', {testIsolation: true}, () => {
testPlaybookRun = run;
});
});
cy.apiGetConfig(true).then(({config}) => {
appBarEnabled = config.EnableAppBar === 'true';
});
});
});
beforeEach(() => {
// # Size the viewport to show the RHS without covering posts.
cy.viewport('macbook-13');
// # Login as testUser
cy.apiLogin(testUser);
});
describe('App Bar enabled', () => {
it('webapp should hide the Playbook channel header button', () => {
onlyOn(appBarEnabled);
cy.apiAdminLogin();
cy.apiUpdateConfig({ExperimentalSettings: {EnableAppBar: true}});
// # Login as testUser
cy.apiLogin(testUser);
// # Navigate directly to a non-playbook run channel
cy.visit(`/${testTeam.name}/channels/town-square`);
@ -74,9 +63,15 @@ describe('channels > channel header', {testIsolation: true}, () => {
});
describe('App Bar disabled', () => {
it('webapp should show the Playbook channel header button', () => {
onlyOn(!appBarEnabled);
beforeEach(() => {
cy.apiAdminLogin();
cy.apiUpdateConfig({ExperimentalSettings: {EnableAppBar: false}});
// # Login as testUser
cy.apiLogin(testUser);
});
it('webapp should show the Playbook channel header button', () => {
// # Navigate directly to a non-playbook run channel
cy.visit(`/${testTeam.name}/channels/town-square`);
@ -87,8 +82,6 @@ describe('channels > channel header', {testIsolation: true}, () => {
});
it('tooltip text should show "Playbooks" for Playbook channel header button', () => {
onlyOn(!appBarEnabled);
// # Navigate directly to a non-playbook run channel
cy.visit(`/${testTeam.name}/channels/town-square`);
@ -103,6 +96,11 @@ describe('channels > channel header', {testIsolation: true}, () => {
});
describe('description text', () => {
beforeEach(() => {
// # Login as testUser
cy.apiLogin(testUser);
});
it('should contain a link to the playbook', () => {
// # Navigate directly to a playbook run channel
cy.visit(`/${testTeam.name}/channels/playbook-run`);
@ -112,6 +110,7 @@ describe('channels > channel header', {testIsolation: true}, () => {
expect(href).to.equals(`/playbooks/playbooks/${testPlaybook.id}`);
});
});
it('should contain a link to the overview page', () => {
// # Navigate directly to a playbook run channel
cy.visit(`/${testTeam.name}/channels/playbook-run`);

View File

@ -82,7 +82,7 @@ describe('channels > rhs > status update', {testIsolation: true}, () => {
});
});
it.skip('description link navigates to run overview', () => {
it('description link navigates to run overview', () => {
// # Run the `/playbook update` slash command.
cy.uiPostMessageQuickly('/playbook update');

View File

@ -153,7 +153,7 @@ describe('lhs', {testIsolation: true}, () => {
cy.findByTestId('dropdownmenu').should('be.visible');
});
it.skip('can copy link', () => {
it('can copy link', () => {
// # Visit the playbook run
cy.visit(`/playbooks/runs/${playbookRun.id}`);
stubClipboard().as('clipboard');
@ -295,7 +295,7 @@ describe('lhs', {testIsolation: true}, () => {
});
});
it.skip('leave run, when on rdp of the same run', () => {
it('leave run, when on rdp of the same run', () => {
// # Click on leave menu item
getRunDropdownItemByText('Runs', playbookRun.name, 'Leave and unfollow run').click();

View File

@ -290,7 +290,7 @@ describe('playbooks > edit_metrics', {testIsolation: true}, () => {
});
describe('delete metric', () => {
it.skip('verifies when clicking delete button; saved metrics have different confirmation text; deleted metrics are deleted', () => {
it('verifies when clicking delete button; saved metrics have different confirmation text; deleted metrics are deleted', () => {
// # Visit the selected playbook
cy.visit(`/playbooks/playbooks/${testPlaybook.id}`);

View File

@ -160,9 +160,7 @@ describe('runs > permissions', {testIsolation: true}, () => {
});
describe('should be visible', () => {
// XXX: Skipping this test, since public playbooks currently have no members. This will
// likely change in the future, so keeping the skeleton.
it.skip('to playbook members', () => {
it('to playbook members', () => {
assertRunIsVisible(run, playbookMember);
});
@ -242,9 +240,7 @@ describe('runs > permissions', {testIsolation: true}, () => {
});
describe('should be visible', () => {
// XXX: Skipping this test, since public playbooks currently have no members. This will
// likely change in the future.
it.skip('to playbook members', () => {
it('to playbook members', () => {
assertRunIsVisible(run, playbookMember);
});
@ -332,10 +328,9 @@ describe('runs > permissions', {testIsolation: true}, () => {
assertRunIsVisible(run, runParticipant);
});
// Skipping this test, since followers cannot follow a run with a private channel from
// a private playbook. (But leaving it for clarity in the code.)
it.skip('to run followers', () => {
assertRunIsVisible(run, runFollower);
// Followers cannot follow a run with a private channel from a private playbook
it('to run followers', () => {
assertRunIsNotVisible(run, runFollower);
});
it('to admins in the team', () => {
@ -414,10 +409,9 @@ describe('runs > permissions', {testIsolation: true}, () => {
assertRunIsVisible(run, runParticipant);
});
// Skipping this test, since followers cannot follow a run with a private channel from
// a private playbook. (But leaving it for clarity in the code.)
it.skip('to run followers', () => {
assertRunIsVisible(run, runFollower);
// Followers cannot follow a run with a private channel from a private playbook
it('to run followers', () => {
assertRunIsNotVisible(run, runFollower);
});
it('to admins in the team', () => {

View File

@ -692,7 +692,7 @@ describe('runs > run details page > header', {testIsolation: true}, () => {
});
});
describe.skip('Join action disabled', () => {
describe('Join action disabled', () => {
beforeEach(() => {
cy.apiLogin(testUser);

View File

@ -267,7 +267,7 @@ describe('runs > run details page > status update', {testIsolation: true}, () =>
});
});
it.skip('requests an update and confirm', () => {
it('requests an update and confirm', () => {
// # Click on request update
cy.findByTestId('run-statusupdate-section').
should('be.visible').
@ -281,11 +281,11 @@ describe('runs > run details page > status update', {testIsolation: true}, () =>
cy.visit(`${testTeam.name}/channels/${playbookRunChannelName}`);
// * Assert that message has been sent
cy.getLastPost().contains(`${testUser.username} requested a status update for ${testPublicPlaybook.name}.`);
cy.getLastPost().contains(`${testViewerUser.username} requested a status update for ${testRun.name}.`);
});
});
it.skip('requests an update and cancel', () => {
it('requests an update and cancel', () => {
// # Click request update
cy.findByTestId('run-statusupdate-section').
should('be.visible').

File diff suppressed because one or more lines are too long

View File

@ -1,4 +1,4 @@
.PHONY: build package run stop run-client run-server run-haserver stop-haserver stop-client stop-server restart restart-server restart-client restart-haserver start-docker clean-dist clean nuke check-style check-client-style check-server-style check-unit-tests test dist run-client-tests setup-run-client-tests cleanup-run-client-tests test-client build-linux build-osx build-windows package-prep package-linux package-osx package-windows internal-test-web-client vet run-server-for-web-client-tests diff-config prepackaged-plugins prepackaged-binaries test-server test-server-ee test-server-quick test-server-race new-migration migrations-extract
.PHONY: build package run stop run-client run-server run-haserver stop-haserver stop-client stop-server restart restart-server restart-client restart-haserver start-docker update-docker clean-dist clean nuke check-style check-client-style check-server-style check-unit-tests test dist run-client-tests setup-run-client-tests cleanup-run-client-tests test-client build-linux build-osx build-windows package-prep package-linux package-osx package-windows internal-test-web-client vet run-server-for-web-client-tests diff-config prepackaged-plugins prepackaged-binaries test-server test-server-ee test-server-quick test-server-race new-migration migrations-extract
ROOT := $(dir $(abspath $(lastword $(MAKEFILE_LIST))))
@ -153,7 +153,7 @@ PLUGIN_PACKAGES += mattermost-plugin-nps-v1.3.1
PLUGIN_PACKAGES += mattermost-plugin-todo-v0.6.1
PLUGIN_PACKAGES += mattermost-plugin-welcomebot-v1.2.0
PLUGIN_PACKAGES += mattermost-plugin-zoom-v1.6.0
PLUGIN_PACKAGES += mattermost-plugin-apps-v1.2.0
PLUGIN_PACKAGES += mattermost-plugin-apps-v1.2.1
# Prepares the enterprise build if exists. The IGNORE stuff is a hack to get the Makefile to execute the commands outside a target
ifeq ($(BUILD_ENTERPRISE_READY),true)
@ -237,6 +237,11 @@ else
endif
endif
update-docker: stop-docker ## Updates the docker containers for local development.
@echo Updating docker containers
$(GO) run ./build/docker-compose-generator/main.go $(ENABLED_DOCKER_SERVICES) | docker-compose -f docker-compose.makefile.yml -f /dev/stdin $(DOCKER_COMPOSE_OVERRIDE) up --no-start
run-haserver:
ifeq ($(BUILD_ENTERPRISE_READY),true)
@echo Starting mattermost in an HA topology '(3 node cluster)'
@ -287,7 +292,7 @@ endif
golangci-lint: ## Run golangci-lint on codebase
@# Keep the version in sync with the command in .circleci/config.yml
$(GO) install github.com/golangci/golangci-lint/cmd/golangci-lint@v1.50.1
$(GO) install github.com/golangci/golangci-lint/cmd/golangci-lint@v1.52.2
@echo Running golangci-lint
$(GOBIN)/golangci-lint run ./...

View File

@ -304,7 +304,7 @@ func (a *API) handlePostBlocks(w http.ResponseWriter, r *http.Request) {
// this query param exists when creating template from board, or board from template
sourceBoardID := r.URL.Query().Get("sourceBoardID")
if sourceBoardID != "" {
if updateFileIDsErr := a.app.CopyCardFiles(sourceBoardID, blocks); updateFileIDsErr != nil {
if updateFileIDsErr := a.app.CopyAndUpdateCardFiles(sourceBoardID, userID, blocks, false); updateFileIDsErr != nil {
a.errorResponse(w, r, updateFileIDsErr)
return
}

View File

@ -312,7 +312,7 @@ func (a *API) handleUploadFile(w http.ResponseWriter, r *http.Request) {
auditRec.AddMeta("teamID", board.TeamID)
auditRec.AddMeta("filename", handle.Filename)
fileID, err := a.app.SaveFile(file, board.TeamID, boardID, handle.Filename)
fileID, err := a.app.SaveFile(file, board.TeamID, boardID, handle.Filename, board.IsTemplate)
if err != nil {
a.errorResponse(w, r, err)
return

View File

@ -7,11 +7,9 @@ import (
"errors"
"fmt"
"path/filepath"
"strings"
"github.com/mattermost/mattermost-server/server/v8/boards/model"
"github.com/mattermost/mattermost-server/server/v8/boards/services/notify"
"github.com/mattermost/mattermost-server/server/v8/boards/utils"
"github.com/mattermost/mattermost-server/server/v8/platform/shared/mlog"
)
@ -39,6 +37,11 @@ func (a *App) DuplicateBlock(boardID string, blockID string, userID string, asTe
return nil, err
}
err = a.CopyAndUpdateCardFiles(boardID, userID, blocks, asTemplate)
if err != nil {
return nil, err
}
a.blockChangeNotifier.Enqueue(func() error {
for _, block := range blocks {
a.wsAdapter.BroadcastBlockChange(board.TeamID, block)
@ -286,95 +289,6 @@ func (a *App) InsertBlocksAndNotify(blocks []*model.Block, modifiedByID string,
return blocks, nil
}
func (a *App) CopyCardFiles(sourceBoardID string, copiedBlocks []*model.Block) error {
// Images attached in cards have a path comprising the card's board ID.
// When we create a template from this board, we need to copy the files
// with the new board ID in path.
// Not doing so causing images in templates (and boards created from this
// template) to fail to load.
// look up ID of source sourceBoard, which may be different than the blocks.
sourceBoard, err := a.GetBoard(sourceBoardID)
if err != nil || sourceBoard == nil {
return fmt.Errorf("cannot fetch source board %s for CopyCardFiles: %w", sourceBoardID, err)
}
var destTeamID string
var destBoardID string
for i := range copiedBlocks {
block := copiedBlocks[i]
fileName := ""
isOk := false
switch block.Type {
case model.TypeImage:
fileName, isOk = block.Fields["fileId"].(string)
if !isOk || fileName == "" {
continue
}
case model.TypeAttachment:
fileName, isOk = block.Fields["attachmentId"].(string)
if !isOk || fileName == "" {
continue
}
default:
continue
}
// create unique filename in case we are copying cards within the same board.
ext := filepath.Ext(fileName)
destFilename := utils.NewID(utils.IDTypeNone) + ext
if destBoardID == "" || block.BoardID != destBoardID {
destBoardID = block.BoardID
destBoard, err := a.GetBoard(destBoardID)
if err != nil {
return fmt.Errorf("cannot fetch destination board %s for CopyCardFiles: %w", sourceBoardID, err)
}
destTeamID = destBoard.TeamID
}
sourceFilePath := filepath.Join(sourceBoard.TeamID, sourceBoard.ID, fileName)
destinationFilePath := filepath.Join(destTeamID, block.BoardID, destFilename)
a.logger.Debug(
"Copying card file",
mlog.String("sourceFilePath", sourceFilePath),
mlog.String("destinationFilePath", destinationFilePath),
)
if err := a.filesBackend.CopyFile(sourceFilePath, destinationFilePath); err != nil {
a.logger.Error(
"CopyCardFiles failed to copy file",
mlog.String("sourceFilePath", sourceFilePath),
mlog.String("destinationFilePath", destinationFilePath),
mlog.Err(err),
)
}
if block.Type == model.TypeAttachment {
block.Fields["attachmentId"] = destFilename
parts := strings.Split(fileName, ".")
fileInfoID := parts[0][1:]
fileInfo, err := a.store.GetFileInfo(fileInfoID)
if err != nil {
return fmt.Errorf("CopyCardFiles: cannot retrieve original fileinfo: %w", err)
}
newParts := strings.Split(destFilename, ".")
newFileID := newParts[0][1:]
fileInfo.Id = newFileID
err = a.store.SaveFileInfo(fileInfo)
if err != nil {
return fmt.Errorf("CopyCardFiles: cannot create fileinfo: %w", err)
}
} else {
block.Fields["fileId"] = destFilename
}
}
return nil
}
func (a *App) GetBlockByID(blockID string) (*model.Block, error) {
return a.store.GetBlock(blockID)
}

View File

@ -184,8 +184,13 @@ func (a *App) DuplicateBoard(boardID, userID, toTeam string, asTemplate bool) (*
}
// copy any file attachments from the duplicated blocks.
if err = a.CopyCardFiles(boardID, bab.Blocks); err != nil {
a.logger.Error("Could not copy files while duplicating board", mlog.String("BoardID", boardID), mlog.Err(err))
err = a.CopyAndUpdateCardFiles(boardID, userID, bab.Blocks, asTemplate)
if err != nil {
dbab := model.NewDeleteBoardsAndBlocksFromBabs(bab)
if err = a.store.DeleteBoardsAndBlocks(dbab, userID); err != nil {
a.logger.Error("Cannot delete board after duplication error when updating block's file info", mlog.String("boardID", bab.Boards[0].ID), mlog.Err(err))
}
return nil, nil, fmt.Errorf("could not patch file IDs while duplicating board %s: %w", boardID, err)
}
if !asTemplate {
@ -196,44 +201,6 @@ func (a *App) DuplicateBoard(boardID, userID, toTeam string, asTemplate bool) (*
}
}
// bab.Blocks now has updated file ids for any blocks containing files. We need to store them.
blockIDs := make([]string, 0)
blockPatches := make([]model.BlockPatch, 0)
for _, block := range bab.Blocks {
fieldName := ""
if block.Type == model.TypeImage {
fieldName = "fileId"
} else if block.Type == model.TypeAttachment {
fieldName = "attachmentId"
}
if fieldName != "" {
if fieldID, ok := block.Fields[fieldName]; ok {
blockIDs = append(blockIDs, block.ID)
blockPatches = append(blockPatches, model.BlockPatch{
UpdatedFields: map[string]interface{}{
fieldName: fieldID,
},
})
}
}
}
a.logger.Debug("Duplicate boards patching file IDs", mlog.Int("count", len(blockIDs)))
if len(blockIDs) != 0 {
patches := &model.BlockPatchBatch{
BlockIDs: blockIDs,
BlockPatches: blockPatches,
}
if err = a.store.PatchBlocks(patches, userID); err != nil {
dbab := model.NewDeleteBoardsAndBlocksFromBabs(bab)
if err = a.store.DeleteBoardsAndBlocks(dbab, userID); err != nil {
a.logger.Error("Cannot delete board after duplication error when updating block's file info", mlog.String("boardID", bab.Boards[0].ID), mlog.Err(err))
}
return nil, nil, fmt.Errorf("could not patch file IDs while duplicating board %s: %w", boardID, err)
}
}
a.blockChangeNotifier.Enqueue(func() error {
teamID := ""
for _, board := range bab.Boards {

View File

@ -95,10 +95,10 @@ func (a *App) writeArchiveBoard(zw *zip.Writer, board model.Board, opt model.Exp
if err = a.writeArchiveBlockLine(w, block); err != nil {
return err
}
if block.Type == model.TypeImage {
filename, err2 := extractImageFilename(block)
if block.Type == model.TypeImage || block.Type == model.TypeAttachment {
filename, err2 := extractFilename(block)
if err2 != nil {
return err
return err2
}
files = append(files, filename)
}
@ -208,7 +208,10 @@ func (a *App) writeArchiveFile(zw *zip.Writer, filename string, boardID string,
return err
}
src, err := a.GetFileReader(opt.TeamID, boardID, filename)
_, fileReader, err := a.GetFile(opt.TeamID, boardID, filename)
if err != nil && !model.IsErrNotFound(err) {
return err
}
if err != nil {
// just log this; image file is missing but we'll still export an equivalent board
a.logger.Error("image file missing for export",
@ -218,9 +221,9 @@ func (a *App) writeArchiveFile(zw *zip.Writer, filename string, boardID string,
)
return nil
}
defer src.Close()
defer fileReader.Close()
_, err = io.Copy(dest, src)
_, err = io.Copy(dest, fileReader)
return err
}
@ -239,10 +242,13 @@ func (a *App) getBoardsForArchive(boardIDs []string) ([]model.Board, error) {
return boards, nil
}
func extractImageFilename(imageBlock *model.Block) (string, error) {
f, ok := imageBlock.Fields["fileId"]
func extractFilename(block *model.Block) (string, error) {
f, ok := block.Fields["fileId"]
if !ok {
return "", model.ErrInvalidImageBlock
f, ok = block.Fields["attachmentId"]
if !ok {
return "", model.ErrInvalidImageBlock
}
}
filename, ok := f.(string)

View File

@ -18,12 +18,10 @@ import (
"github.com/mattermost/mattermost-server/server/v8/platform/shared/mlog"
)
const emptyString = "empty"
var errEmptyFilename = errors.New("IsFileArchived: empty filename not allowed")
var ErrFileNotFound = errors.New("file not found")
func (a *App) SaveFile(reader io.Reader, teamID, rootID, filename string) (string, error) {
func (a *App) SaveFile(reader io.Reader, teamID, boardID, filename string, asTemplate bool) (string, error) {
// NOTE: File extension includes the dot
fileExtension := strings.ToLower(filepath.Ext(filename))
if fileExtension == ".jpeg" {
@ -31,44 +29,26 @@ func (a *App) SaveFile(reader io.Reader, teamID, rootID, filename string) (strin
}
createdFilename := utils.NewID(utils.IDTypeNone)
fullFilename := fmt.Sprintf(`%s%s`, createdFilename, fileExtension)
filePath := filepath.Join(utils.GetBaseFilePath(), fullFilename)
newFileName := fmt.Sprintf(`%s%s`, createdFilename, fileExtension)
if asTemplate {
newFileName = filename
}
filePath := getDestinationFilePath(asTemplate, teamID, boardID, newFileName)
fileSize, appErr := a.filesBackend.WriteFile(reader, filePath)
if appErr != nil {
return "", fmt.Errorf("unable to store the file in the files storage: %w", appErr)
}
now := utils.GetMillis()
fileInfo := &mm_model.FileInfo{
Id: createdFilename[1:],
CreatorId: "boards",
PostId: emptyString,
ChannelId: emptyString,
CreateAt: now,
UpdateAt: now,
DeleteAt: 0,
Path: filePath,
ThumbnailPath: emptyString,
PreviewPath: emptyString,
Name: filename,
Extension: fileExtension,
Size: fileSize,
MimeType: emptyString,
Width: 0,
Height: 0,
HasPreviewImage: false,
MiniPreview: nil,
Content: "",
RemoteId: nil,
}
fileInfo := model.NewFileInfo(filename)
fileInfo.Id = getFileInfoID(createdFilename)
fileInfo.Path = filePath
fileInfo.Size = fileSize
err := a.store.SaveFileInfo(fileInfo)
if err != nil {
return "", err
}
return fullFilename, nil
return newFileName, nil
}
func (a *App) GetFileInfo(filename string) (*mm_model.FileInfo, error) {
@ -79,8 +59,7 @@ func (a *App) GetFileInfo(filename string) (*mm_model.FileInfo, error) {
// filename is in the format 7<some-alphanumeric-string>.<extension>
// we want to extract the <some-alphanumeric-string> part of this as this
// will be the fileinfo id.
parts := strings.Split(filename, ".")
fileInfoID := parts[0][1:]
fileInfoID := getFileInfoID(strings.Split(filename, ".")[0])
fileInfo, err := a.store.GetFileInfo(fileInfoID)
if err != nil {
return nil, err
@ -90,10 +69,33 @@ func (a *App) GetFileInfo(filename string) (*mm_model.FileInfo, error) {
}
func (a *App) GetFile(teamID, rootID, fileName string) (*mm_model.FileInfo, filestore.ReadCloseSeeker, error) {
fileInfo, filePath, err := a.GetFilePath(teamID, rootID, fileName)
if err != nil {
a.logger.Error("GetFile: Failed to GetFilePath.", mlog.String("Team", teamID), mlog.String("board", rootID), mlog.String("filename", fileName), mlog.Err(err))
return nil, nil, err
}
exists, err := a.filesBackend.FileExists(filePath)
if err != nil {
a.logger.Error("GetFile: Failed to check if file exists as path. ", mlog.String("Path", filePath), mlog.Err(err))
return nil, nil, err
}
if !exists {
return nil, nil, ErrFileNotFound
}
reader, err := a.filesBackend.Reader(filePath)
if err != nil {
a.logger.Error("GetFile: Failed to get file reader of existing file at path", mlog.String("Path", filePath), mlog.Err(err))
return nil, nil, err
}
return fileInfo, reader, nil
}
func (a *App) GetFilePath(teamID, rootID, fileName string) (*mm_model.FileInfo, string, error) {
fileInfo, err := a.GetFileInfo(fileName)
if err != nil && !model.IsErrNotFound(err) {
a.logger.Error("111")
return nil, nil, err
return nil, "", err
}
var filePath string
@ -104,22 +106,23 @@ func (a *App) GetFile(teamID, rootID, fileName string) (*mm_model.FileInfo, file
filePath = filepath.Join(teamID, rootID, fileName)
}
exists, err := a.filesBackend.FileExists(filePath)
if err != nil {
a.logger.Error(fmt.Sprintf("GetFile: Failed to check if file exists as path. Path: %s, error: %e", filePath, err))
return nil, nil, err
}
return fileInfo, filePath, nil
}
if !exists {
return nil, nil, ErrFileNotFound
func getDestinationFilePath(isTemplate bool, teamID, boardID, filename string) string {
// if saving a file for a template, save using the "old method" that is /teamID/boardID/fileName
// this will prevent template files from being deleted by DataRetention,
// which deletes all files inside the "date" subdirectory
if isTemplate {
return filepath.Join(teamID, boardID, filename)
}
return filepath.Join(utils.GetBaseFilePath(), filename)
}
reader, err := a.filesBackend.Reader(filePath)
if err != nil {
a.logger.Error(fmt.Sprintf("GetFile: Failed to get file reader of existing file at path: %s, error: %e", filePath, err))
return nil, nil, err
}
return fileInfo, reader, nil
func getFileInfoID(fileName string) string {
// Boards ids are 27 characters long with a prefix character.
// removing the prefix, returns the 26 character uuid
return fileName[1:]
}
func (a *App) GetFileReader(teamID, rootID, filename string) (filestore.ReadCloseSeeker, error) {
@ -175,3 +178,121 @@ func (a *App) MoveFile(channelID, teamID, boardID, filename string) error {
}
return nil
}
func (a *App) CopyAndUpdateCardFiles(boardID, userID string, blocks []*model.Block, asTemplate bool) error {
newFileNames, err := a.CopyCardFiles(boardID, blocks, asTemplate)
if err != nil {
a.logger.Error("Could not copy files while duplicating board", mlog.String("BoardID", boardID), mlog.Err(err))
}
// blocks now has updated file ids for any blocks containing files. We need to update the database for them.
blockIDs := make([]string, 0)
blockPatches := make([]model.BlockPatch, 0)
for _, block := range blocks {
if block.Type == model.TypeImage || block.Type == model.TypeAttachment {
if fileID, ok := block.Fields["fileId"].(string); ok {
blockIDs = append(blockIDs, block.ID)
blockPatches = append(blockPatches, model.BlockPatch{
UpdatedFields: map[string]interface{}{
"fileId": newFileNames[fileID],
},
DeletedFields: []string{"attachmentId"},
})
}
}
}
a.logger.Debug("Duplicate boards patching file IDs", mlog.Int("count", len(blockIDs)))
if len(blockIDs) != 0 {
patches := &model.BlockPatchBatch{
BlockIDs: blockIDs,
BlockPatches: blockPatches,
}
if err := a.store.PatchBlocks(patches, userID); err != nil {
return fmt.Errorf("could not patch file IDs while duplicating board %s: %w", boardID, err)
}
}
return nil
}
func (a *App) CopyCardFiles(sourceBoardID string, copiedBlocks []*model.Block, asTemplate bool) (map[string]string, error) {
// Images attached in cards have a path comprising the card's board ID.
// When we create a template from this board, we need to copy the files
// with the new board ID in path.
// Not doing so causing images in templates (and boards created from this
// template) to fail to load.
// look up ID of source sourceBoard, which may be different than the blocks.
sourceBoard, err := a.GetBoard(sourceBoardID)
if err != nil || sourceBoard == nil {
return nil, fmt.Errorf("cannot fetch source board %s for CopyCardFiles: %w", sourceBoardID, err)
}
var destBoard *model.Board
newFileNames := make(map[string]string)
for _, block := range copiedBlocks {
if block.Type != model.TypeImage && block.Type != model.TypeAttachment {
continue
}
fileId, isOk := block.Fields["fileId"].(string)
if !isOk {
fileId, isOk = block.Fields["attachmentId"].(string)
if !isOk {
continue
}
}
// create unique filename
ext := filepath.Ext(fileId)
fileInfoID := utils.NewID(utils.IDTypeNone)
destFilename := fileInfoID + ext
if destBoard == nil || block.BoardID != destBoard.ID {
destBoard = sourceBoard
if block.BoardID != destBoard.ID {
destBoard, err = a.GetBoard(block.BoardID)
if err != nil {
return nil, fmt.Errorf("cannot fetch destination board %s for CopyCardFiles: %w", sourceBoardID, err)
}
}
}
// GetFilePath will retrieve the correct path
// depending on whether FileInfo table is used for the file.
fileInfo, sourceFilePath, err := a.GetFilePath(sourceBoard.TeamID, sourceBoard.ID, fileId)
if err != nil {
return nil, fmt.Errorf("cannot fetch destination board %s for CopyCardFiles: %w", sourceBoardID, err)
}
destinationFilePath := getDestinationFilePath(asTemplate, destBoard.TeamID, destBoard.ID, destFilename)
if fileInfo == nil {
fileInfo = model.NewFileInfo(destFilename)
}
fileInfo.Id = getFileInfoID(fileInfoID)
fileInfo.Path = destinationFilePath
err = a.store.SaveFileInfo(fileInfo)
if err != nil {
return nil, fmt.Errorf("CopyCardFiles: cannot create fileinfo: %w", err)
}
a.logger.Debug(
"Copying card file",
mlog.String("sourceFilePath", sourceFilePath),
mlog.String("destinationFilePath", destinationFilePath),
)
if err := a.filesBackend.CopyFile(sourceFilePath, destinationFilePath); err != nil {
a.logger.Error(
"CopyCardFiles failed to copy file",
mlog.String("sourceFilePath", sourceFilePath),
mlog.String("destinationFilePath", destinationFilePath),
mlog.Err(err),
)
}
newFileNames[fileId] = destFilename
}
return newFileNames, nil
}

View File

@ -15,6 +15,7 @@ import (
"github.com/golang/mock/gomock"
"github.com/stretchr/testify/assert"
"github.com/mattermost/mattermost-server/server/v8/boards/model"
mm_model "github.com/mattermost/mattermost-server/server/v8/model"
"github.com/mattermost/mattermost-server/server/v8/platform/shared/filestore"
"github.com/mattermost/mattermost-server/server/v8/platform/shared/filestore/mocks"
@ -210,7 +211,7 @@ func TestSaveFile(t *testing.T) {
}
mockedFileBackend.On("WriteFile", mockedReadCloseSeek, mock.Anything).Return(writeFileFunc, writeFileErrorFunc)
actual, err := th.App.SaveFile(mockedReadCloseSeek, "1", testBoardID, fileName)
actual, err := th.App.SaveFile(mockedReadCloseSeek, "1", testBoardID, fileName, false)
assert.Equal(t, fileName, actual)
assert.NoError(t, err)
})
@ -234,7 +235,7 @@ func TestSaveFile(t *testing.T) {
}
mockedFileBackend.On("WriteFile", mockedReadCloseSeek, mock.Anything).Return(writeFileFunc, writeFileErrorFunc)
actual, err := th.App.SaveFile(mockedReadCloseSeek, "1", "test-board-id", fileName)
actual, err := th.App.SaveFile(mockedReadCloseSeek, "1", "test-board-id", fileName, false)
assert.NoError(t, err)
assert.NotNil(t, actual)
})
@ -258,7 +259,7 @@ func TestSaveFile(t *testing.T) {
}
mockedFileBackend.On("WriteFile", mockedReadCloseSeek, mock.Anything).Return(writeFileFunc, writeFileErrorFunc)
actual, err := th.App.SaveFile(mockedReadCloseSeek, "1", "test-board-id", fileName)
actual, err := th.App.SaveFile(mockedReadCloseSeek, "1", "test-board-id", fileName, false)
assert.Equal(t, "", actual)
assert.Equal(t, "unable to store the file in the files storage: Mocked File backend error", err.Error())
})
@ -312,7 +313,7 @@ func TestGetFileInfo(t *testing.T) {
func TestGetFile(t *testing.T) {
th, _ := SetupTestHelper(t)
t.Run("when FileInfo exists", func(t *testing.T) {
t.Run("happy path, no errors", func(t *testing.T) {
th.Store.EXPECT().GetFileInfo("fileInfoID").Return(&mm_model.FileInfo{
Id: "fileInfoID",
Path: "/path/to/file/fileName.txt",
@ -337,27 +338,72 @@ func TestGetFile(t *testing.T) {
assert.NotNil(t, seeker)
})
t.Run("when FileInfo doesn't exist", func(t *testing.T) {
th.Store.EXPECT().GetFileInfo("fileInfoID").Return(nil, nil)
t.Run("when GetFilePath() throws error", func(t *testing.T) {
th.Store.EXPECT().GetFileInfo("fileInfoID").Return(nil, errDummy)
fileInfo, seeker, err := th.App.GetFile("teamID", "boardID", "7fileInfoID.txt")
assert.Error(t, err)
assert.Nil(t, fileInfo)
assert.Nil(t, seeker)
})
t.Run("when FileExists returns false", func(t *testing.T) {
th.Store.EXPECT().GetFileInfo("fileInfoID").Return(&mm_model.FileInfo{
Id: "fileInfoID",
Path: "/path/to/file/fileName.txt",
}, nil)
mockedFileBackend := &mocks.FileBackend{}
th.App.filesBackend = mockedFileBackend
mockedReadCloseSeek := &mocks.ReadCloseSeeker{}
readerFunc := func(path string) filestore.ReadCloseSeeker {
return mockedReadCloseSeek
}
readerErrorFunc := func(path string) error {
return nil
}
mockedFileBackend.On("Reader", "teamID/boardID/7fileInfoID.txt").Return(readerFunc, readerErrorFunc)
mockedFileBackend.On("FileExists", "teamID/boardID/7fileInfoID.txt").Return(true, nil)
mockedFileBackend.On("FileExists", "/path/to/file/fileName.txt").Return(false, nil)
fileInfo, seeker, err := th.App.GetFile("teamID", "boardID", "7fileInfoID.txt")
assert.Error(t, err)
assert.Nil(t, fileInfo)
assert.Nil(t, seeker)
})
t.Run("when FileReader throws error", func(t *testing.T) {
th.Store.EXPECT().GetFileInfo("fileInfoID").Return(&mm_model.FileInfo{
Id: "fileInfoID",
Path: "/path/to/file/fileName.txt",
}, nil)
mockedFileBackend := &mocks.FileBackend{}
th.App.filesBackend = mockedFileBackend
mockedFileBackend.On("Reader", "/path/to/file/fileName.txt").Return(nil, errDummy)
mockedFileBackend.On("FileExists", "/path/to/file/fileName.txt").Return(true, nil)
fileInfo, seeker, err := th.App.GetFile("teamID", "boardID", "7fileInfoID.txt")
assert.Error(t, err)
assert.Nil(t, fileInfo)
assert.Nil(t, seeker)
})
}
func TestGetFilePath(t *testing.T) {
th, _ := SetupTestHelper(t)
t.Run("when FileInfo exists", func(t *testing.T) {
path := "/path/to/file/fileName.txt"
th.Store.EXPECT().GetFileInfo("fileInfoID").Return(&mm_model.FileInfo{
Id: "fileInfoID",
Path: path,
}, nil)
fileInfo, filePath, err := th.App.GetFilePath("teamID", "boardID", "7fileInfoID.txt")
assert.NoError(t, err)
assert.NotNil(t, fileInfo)
assert.Equal(t, path, filePath)
})
t.Run("when FileInfo doesn't exist", func(t *testing.T) {
th.Store.EXPECT().GetFileInfo("fileInfoID").Return(nil, nil)
fileInfo, filePath, err := th.App.GetFilePath("teamID", "boardID", "7fileInfoID.txt")
assert.NoError(t, err)
assert.Nil(t, fileInfo)
assert.NotNil(t, seeker)
assert.Equal(t, "teamID/boardID/7fileInfoID.txt", filePath)
})
t.Run("when FileInfo exists but FileInfo.Path is not set", func(t *testing.T) {
@ -366,22 +412,158 @@ func TestGetFile(t *testing.T) {
Path: "",
}, nil)
mockedFileBackend := &mocks.FileBackend{}
th.App.filesBackend = mockedFileBackend
mockedReadCloseSeek := &mocks.ReadCloseSeeker{}
readerFunc := func(path string) filestore.ReadCloseSeeker {
return mockedReadCloseSeek
}
readerErrorFunc := func(path string) error {
return nil
}
mockedFileBackend.On("Reader", "teamID/boardID/7fileInfoID.txt").Return(readerFunc, readerErrorFunc)
mockedFileBackend.On("FileExists", "teamID/boardID/7fileInfoID.txt").Return(true, nil)
fileInfo, seeker, err := th.App.GetFile("teamID", "boardID", "7fileInfoID.txt")
fileInfo, filePath, err := th.App.GetFilePath("teamID", "boardID", "7fileInfoID.txt")
assert.NoError(t, err)
assert.NotNil(t, fileInfo)
assert.NotNil(t, seeker)
assert.Equal(t, "teamID/boardID/7fileInfoID.txt", filePath)
})
}
func TestCopyCard(t *testing.T) {
th, _ := SetupTestHelper(t)
imageBlock := &model.Block{
ID: "imageBlock",
ParentID: "c3zqnh6fsu3f4mr6hzq9hizwske",
CreatedBy: "6k6ynxdp47dujjhhojw9nqhmyh",
ModifiedBy: "6k6ynxdp47dujjhhojw9nqhmyh",
Schema: 1,
Type: "image",
Title: "",
Fields: map[string]interface{}{"fileId": "7fileName.jpg"},
CreateAt: 1680725585250,
UpdateAt: 1680725585250,
DeleteAt: 0,
BoardID: "boardID",
}
t.Run("Board doesn't exist", func(t *testing.T) {
th.Store.EXPECT().GetBoard("boardID").Return(nil, errDummy)
_, err := th.App.CopyCardFiles("boardID", []*model.Block{}, false)
assert.Error(t, err)
})
t.Run("Board exists, image block, with FileInfo", func(t *testing.T) {
path := "/path/to/file/fileName.txt"
fileInfo := &mm_model.FileInfo{
Id: "imageBlock",
Path: path,
}
th.Store.EXPECT().GetBoard("boardID").Return(&model.Board{
ID: "boardID",
IsTemplate: false,
}, nil)
th.Store.EXPECT().GetFileInfo("fileName").Return(fileInfo, nil)
th.Store.EXPECT().SaveFileInfo(fileInfo).Return(nil)
mockedFileBackend := &mocks.FileBackend{}
th.App.filesBackend = mockedFileBackend
mockedFileBackend.On("CopyFile", mock.Anything, mock.Anything).Return(nil)
updatedFileNames, err := th.App.CopyCardFiles("boardID", []*model.Block{imageBlock}, false)
assert.NoError(t, err)
assert.Equal(t, "7fileName.jpg", imageBlock.Fields["fileId"])
assert.NotNil(t, updatedFileNames["7fileName.jpg"])
assert.NotNil(t, updatedFileNames[imageBlock.Fields["fileId"].(string)])
})
t.Run("Board exists, attachment block, with FileInfo", func(t *testing.T) {
attachmentBlock := &model.Block{
ID: "attachmentBlock",
ParentID: "c3zqnh6fsu3f4mr6hzq9hizwske",
CreatedBy: "6k6ynxdp47dujjhhojw9nqhmyh",
ModifiedBy: "6k6ynxdp47dujjhhojw9nqhmyh",
Schema: 1,
Type: "attachment",
Title: "",
Fields: map[string]interface{}{"fileId": "7fileName.jpg"},
CreateAt: 1680725585250,
UpdateAt: 1680725585250,
DeleteAt: 0,
BoardID: "boardID",
}
path := "/path/to/file/fileName.txt"
fileInfo := &mm_model.FileInfo{
Id: "attachmentBlock",
Path: path,
}
th.Store.EXPECT().GetBoard("boardID").Return(&model.Board{
ID: "boardID",
IsTemplate: false,
}, nil)
th.Store.EXPECT().GetFileInfo("fileName").Return(fileInfo, nil)
th.Store.EXPECT().SaveFileInfo(fileInfo).Return(nil)
mockedFileBackend := &mocks.FileBackend{}
th.App.filesBackend = mockedFileBackend
mockedFileBackend.On("CopyFile", mock.Anything, mock.Anything).Return(nil)
updatedFileNames, err := th.App.CopyCardFiles("boardID", []*model.Block{attachmentBlock}, false)
assert.NoError(t, err)
assert.NotNil(t, updatedFileNames[imageBlock.Fields["fileId"].(string)])
})
t.Run("Board exists, image block, without FileInfo", func(t *testing.T) {
// path := "/path/to/file/fileName.txt"
// fileInfo := &mm_model.FileInfo{
// Id: "imageBlock",
// Path: path,
// }
th.Store.EXPECT().GetBoard("boardID").Return(&model.Board{
ID: "boardID",
IsTemplate: false,
}, nil)
th.Store.EXPECT().GetFileInfo(gomock.Any()).Return(nil, nil)
th.Store.EXPECT().SaveFileInfo(gomock.Any()).Return(nil)
mockedFileBackend := &mocks.FileBackend{}
th.App.filesBackend = mockedFileBackend
mockedFileBackend.On("CopyFile", mock.Anything, mock.Anything).Return(nil)
updatedFileNames, err := th.App.CopyCardFiles("boardID", []*model.Block{imageBlock}, false)
assert.NoError(t, err)
assert.NotNil(t, imageBlock.Fields["fileId"].(string))
assert.NotNil(t, updatedFileNames[imageBlock.Fields["fileId"].(string)])
})
}
func TestCopyAndUpdateCardFiles(t *testing.T) {
th, _ := SetupTestHelper(t)
imageBlock := &model.Block{
ID: "imageBlock",
ParentID: "c3zqnh6fsu3f4mr6hzq9hizwske",
CreatedBy: "6k6ynxdp47dujjhhojw9nqhmyh",
ModifiedBy: "6k6ynxdp47dujjhhojw9nqhmyh",
Schema: 1,
Type: "image",
Title: "",
Fields: map[string]interface{}{"fileId": "7fileName.jpg"},
CreateAt: 1680725585250,
UpdateAt: 1680725585250,
DeleteAt: 0,
BoardID: "boardID",
}
t.Run("Board exists, image block, with FileInfo", func(t *testing.T) {
path := "/path/to/file/fileName.txt"
fileInfo := &mm_model.FileInfo{
Id: "imageBlock",
Path: path,
}
th.Store.EXPECT().GetBoard("boardID").Return(&model.Board{
ID: "boardID",
IsTemplate: false,
}, nil)
th.Store.EXPECT().GetFileInfo("fileName").Return(fileInfo, nil)
th.Store.EXPECT().SaveFileInfo(fileInfo).Return(nil)
th.Store.EXPECT().PatchBlocks(gomock.Any(), "userID").Return(nil)
mockedFileBackend := &mocks.FileBackend{}
th.App.filesBackend = mockedFileBackend
mockedFileBackend.On("CopyFile", mock.Anything, mock.Anything).Return(nil)
err := th.App.CopyAndUpdateCardFiles("boardID", "userID", []*model.Block{imageBlock}, false)
assert.NoError(t, err)
assert.NotEqual(t, path, imageBlock.Fields["fileId"])
})
}

View File

@ -44,27 +44,19 @@ func (a *App) ImportArchive(r io.Reader, opt model.ImportArchiveOptions) error {
a.logger.Debug("importing legacy archive")
_, errImport := a.ImportBoardJSONL(br, opt)
go func() {
if err := a.UpdateCardLimitTimestamp(); err != nil {
a.logger.Error(
"UpdateCardLimitTimestamp failed after importing a legacy file",
mlog.Err(err),
)
}
}()
return errImport
}
a.logger.Debug("importing archive")
zr := zipstream.NewReader(br)
boardMap := make(map[string]string) // maps old board ids to new
boardMap := make(map[string]*model.Board) // maps old board ids to new
fileMap := make(map[string]string) // maps old fileIds to new
for {
hdr, err := zr.Next()
if err != nil {
if errors.Is(err, io.EOF) {
a.fixImagesAttachments(boardMap, fileMap, opt.TeamID, opt.ModifiedBy)
a.logger.Debug("import archive - done", mlog.Int("boards_imported", len(boardMap)))
return nil
}
@ -84,14 +76,14 @@ func (a *App) ImportArchive(r io.Reader, opt model.ImportArchiveOptions) error {
return model.NewErrUnsupportedArchiveVersion(ver, archiveVersion)
}
case "board.jsonl":
boardID, err := a.ImportBoardJSONL(zr, opt)
board, err := a.ImportBoardJSONL(zr, opt)
if err != nil {
return fmt.Errorf("cannot import board %s: %w", dir, err)
}
boardMap[dir] = boardID
boardMap[dir] = board
default:
// import file/image; dir is the old board id
boardID, ok := boardMap[dir]
board, ok := boardMap[dir]
if !ok {
a.logger.Warn("skipping orphan image in archive",
mlog.String("dir", dir),
@ -99,33 +91,63 @@ func (a *App) ImportArchive(r io.Reader, opt model.ImportArchiveOptions) error {
)
continue
}
// save file with original filename so it matches name in image block.
filePath := filepath.Join(opt.TeamID, boardID, filename)
_, err := a.filesBackend.WriteFile(zr, filePath)
newFileName, err := a.SaveFile(zr, opt.TeamID, board.ID, filename, board.IsTemplate)
if err != nil {
return fmt.Errorf("cannot import file %s for board %s: %w", filename, dir, err)
}
fileMap[filename] = newFileName
a.logger.Debug("import archive file",
mlog.String("TeamID", opt.TeamID),
mlog.String("boardID", board.ID),
mlog.String("filename", filename),
mlog.String("newFileName", newFileName),
)
}
}
}
// Update image and attachment blocks
func (a *App) fixImagesAttachments(boardMap map[string]*model.Board, fileMap map[string]string, teamID string, userId string) {
blockIDs := make([]string, 0)
blockPatches := make([]model.BlockPatch, 0)
for _, board := range boardMap {
if board.IsTemplate {
continue
}
a.logger.Trace("import archive file",
mlog.String("dir", dir),
mlog.String("filename", filename),
)
opts := model.QueryBlocksOptions{
BoardID: board.ID,
}
newBlocks, err := a.GetBlocks(opts)
if err != nil {
a.logger.Info("cannot retrieve imported blocks for board", mlog.String("BoardID", board.ID), mlog.Err(err))
return
}
go func() {
if err := a.UpdateCardLimitTimestamp(); err != nil {
a.logger.Error(
"UpdateCardLimitTimestamp failed after importing an archive",
mlog.Err(err),
)
for _, block := range newBlocks {
if block.Type == "image" || block.Type == "attachment" {
fieldName := "fileId"
oldId := block.Fields[fieldName]
blockIDs = append(blockIDs, block.ID)
blockPatches = append(blockPatches, model.BlockPatch{
UpdatedFields: map[string]interface{}{
fieldName: fileMap[oldId.(string)],
},
})
}
}()
}
blockPatchBatch := model.BlockPatchBatch{BlockIDs: blockIDs, BlockPatches: blockPatches}
a.PatchBlocks(teamID, &blockPatchBatch, userId)
}
}
// ImportBoardJSONL imports a JSONL file containing blocks for one board. The resulting
// board id is returned.
func (a *App) ImportBoardJSONL(r io.Reader, opt model.ImportArchiveOptions) (string, error) {
func (a *App) ImportBoardJSONL(r io.Reader, opt model.ImportArchiveOptions) (*model.Board, error) {
// TODO: Stream this once `model.GenerateBlockIDs` can take a stream of blocks.
// We don't want to load the whole file in memory, even though it's a single board.
boardsAndBlocks := &model.BoardsAndBlocks{
@ -158,7 +180,7 @@ func (a *App) ImportBoardJSONL(r io.Reader, opt model.ImportArchiveOptions) (str
if !skip {
var archiveLine model.ArchiveLine
if err := json.Unmarshal(line, &archiveLine); err != nil {
return "", fmt.Errorf("error parsing archive line %d: %w", lineNum, err)
return nil, fmt.Errorf("error parsing archive line %d: %w", lineNum, err)
}
// first line must be a board
@ -170,7 +192,7 @@ func (a *App) ImportBoardJSONL(r io.Reader, opt model.ImportArchiveOptions) (str
case "board":
var board model.Board
if err2 := json.Unmarshal(archiveLine.Data, &board); err2 != nil {
return "", fmt.Errorf("invalid board in archive line %d: %w", lineNum, err2)
return nil, fmt.Errorf("invalid board in archive line %d: %w", lineNum, err2)
}
board.ModifiedBy = userID
board.UpdateAt = now
@ -181,20 +203,20 @@ func (a *App) ImportBoardJSONL(r io.Reader, opt model.ImportArchiveOptions) (str
// legacy archives encoded boards as blocks; we need to convert them to real boards.
var block *model.Block
if err2 := json.Unmarshal(archiveLine.Data, &block); err2 != nil {
return "", fmt.Errorf("invalid board block in archive line %d: %w", lineNum, err2)
return nil, fmt.Errorf("invalid board block in archive line %d: %w", lineNum, err2)
}
block.ModifiedBy = userID
block.UpdateAt = now
board, err := a.blockToBoard(block, opt)
if err != nil {
return "", fmt.Errorf("cannot convert archive line %d to block: %w", lineNum, err)
return nil, fmt.Errorf("cannot convert archive line %d to block: %w", lineNum, err)
}
boardsAndBlocks.Boards = append(boardsAndBlocks.Boards, board)
boardID = board.ID
case "block":
var block *model.Block
if err2 := json.Unmarshal(archiveLine.Data, &block); err2 != nil {
return "", fmt.Errorf("invalid block in archive line %d: %w", lineNum, err2)
return nil, fmt.Errorf("invalid block in archive line %d: %w", lineNum, err2)
}
block.ModifiedBy = userID
block.UpdateAt = now
@ -203,11 +225,11 @@ func (a *App) ImportBoardJSONL(r io.Reader, opt model.ImportArchiveOptions) (str
case "boardMember":
var boardMember *model.BoardMember
if err2 := json.Unmarshal(archiveLine.Data, &boardMember); err2 != nil {
return "", fmt.Errorf("invalid board Member in archive line %d: %w", lineNum, err2)
return nil, fmt.Errorf("invalid board Member in archive line %d: %w", lineNum, err2)
}
boardMembers = append(boardMembers, boardMember)
default:
return "", model.NewErrUnsupportedArchiveLineType(lineNum, archiveLine.Type)
return nil, model.NewErrUnsupportedArchiveLineType(lineNum, archiveLine.Type)
}
firstLine = false
}
@ -217,7 +239,7 @@ func (a *App) ImportBoardJSONL(r io.Reader, opt model.ImportArchiveOptions) (str
if errors.Is(errRead, io.EOF) {
break
}
return "", fmt.Errorf("error reading archive line %d: %w", lineNum, errRead)
return nil, fmt.Errorf("error reading archive line %d: %w", lineNum, errRead)
}
lineNum++
}
@ -234,12 +256,12 @@ func (a *App) ImportBoardJSONL(r io.Reader, opt model.ImportArchiveOptions) (str
var err error
boardsAndBlocks, err = model.GenerateBoardsAndBlocksIDs(boardsAndBlocks, a.logger)
if err != nil {
return "", fmt.Errorf("error generating archive block IDs: %w", err)
return nil, fmt.Errorf("error generating archive block IDs: %w", err)
}
boardsAndBlocks, err = a.CreateBoardsAndBlocks(boardsAndBlocks, opt.ModifiedBy, false)
if err != nil {
return "", fmt.Errorf("error inserting archive blocks: %w", err)
return nil, fmt.Errorf("error inserting archive blocks: %w", err)
}
// add users to all the new boards (if not the fake system user).
@ -251,7 +273,7 @@ func (a *App) ImportBoardJSONL(r io.Reader, opt model.ImportArchiveOptions) (str
SchemeAdmin: true,
}
if _, err2 := a.AddMemberToBoard(adminMember); err2 != nil {
return "", fmt.Errorf("cannot add adminMember to board: %w", err2)
return nil, fmt.Errorf("cannot add adminMember to board: %w", err2)
}
for _, boardMember := range boardMembers {
bm := &model.BoardMember{
@ -266,16 +288,16 @@ func (a *App) ImportBoardJSONL(r io.Reader, opt model.ImportArchiveOptions) (str
Synthetic: boardMember.Synthetic,
}
if _, err2 := a.AddMemberToBoard(bm); err2 != nil {
return "", fmt.Errorf("cannot add member to board: %w", err2)
return nil, fmt.Errorf("cannot add member to board: %w", err2)
}
}
}
// find new board id
for _, board := range boardsAndBlocks.Boards {
return board.ID, nil
return board, nil
}
return "", fmt.Errorf("missing board in archive: %w", model.ErrInvalidBoardBlock)
return nil, fmt.Errorf("missing board in archive: %w", model.ErrInvalidBoardBlock)
}
// fixBoardsandBlocks allows the caller of `ImportArchive` to modify or filters boards and blocks being

View File

@ -138,9 +138,76 @@ func TestApp_ImportArchive(t *testing.T) {
th.Store.EXPECT().GetUserByID("hxxzooc3ff8cubsgtcmpn8733e").AnyTimes().Return(user2, nil)
th.Store.EXPECT().GetUserByID("nto73edn5ir6ifimo5a53y1dwa").AnyTimes().Return(user3, nil)
boardID, err := th.App.ImportBoardJSONL(r, opts)
require.Equal(t, board.ID, boardID, "Board ID should be same")
newBoard, err := th.App.ImportBoardJSONL(r, opts)
require.NoError(t, err, "import archive should not fail")
require.Equal(t, board.ID, newBoard.ID, "Board ID should be same")
})
t.Run("fix image and attachment", func(t *testing.T) {
boardMap := map[string]*model.Board{
"test": board,
}
fileMap := map[string]string{
"oldFileName1.jpg": "newFileName1.jpg",
"oldFileName2.jpg": "newFileName2.jpg",
}
imageBlock := &model.Block{
ID: "blockID-1",
ParentID: "c3zqnh6fsu3f4mr6hzq9hizwske",
CreatedBy: "6k6ynxdp47dujjhhojw9nqhmyh",
ModifiedBy: "6k6ynxdp47dujjhhojw9nqhmyh",
Schema: 1,
Type: "image",
Title: "",
Fields: map[string]interface{}{"fileId": "oldFileName1.jpg"},
CreateAt: 1680725585250,
UpdateAt: 1680725585250,
DeleteAt: 0,
BoardID: "board-id",
}
attachmentBlock := &model.Block{
ID: "blockID-2",
ParentID: "c3zqnh6fsu3f4mr6hzq9hizwske",
CreatedBy: "6k6ynxdp47dujjhhojw9nqhmyh",
ModifiedBy: "6k6ynxdp47dujjhhojw9nqhmyh",
Schema: 1,
Type: "attachment",
Title: "",
Fields: map[string]interface{}{"fileId": "oldFileName2.jpg"},
CreateAt: 1680725585250,
UpdateAt: 1680725585250,
DeleteAt: 0,
BoardID: "board-id",
}
blockIDs := []string{"blockID-1", "blockID-2"}
blockPatch := model.BlockPatch{
UpdatedFields: map[string]interface{}{"fileId": "newFileName1.jpg"},
}
blockPatch2 := model.BlockPatch{
UpdatedFields: map[string]interface{}{"fileId": "newFileName2.jpg"},
}
blockPatches := []model.BlockPatch{blockPatch, blockPatch2}
blockPatchesBatch := model.BlockPatchBatch{BlockIDs: blockIDs, BlockPatches: blockPatches}
opts := model.QueryBlocksOptions{
BoardID: board.ID,
}
th.Store.EXPECT().GetBlocks(opts).Return([]*model.Block{imageBlock, attachmentBlock}, nil)
th.Store.EXPECT().GetBlocksByIDs(blockIDs).Return([]*model.Block{imageBlock, attachmentBlock}, nil)
th.Store.EXPECT().GetBlock(blockIDs[0]).Return(imageBlock, nil)
th.Store.EXPECT().GetBlock(blockIDs[1]).Return(attachmentBlock, nil)
th.Store.EXPECT().GetMembersForBoard("board-id").AnyTimes().Return([]*model.BoardMember{}, nil)
th.Store.EXPECT().PatchBlocks(&blockPatchesBatch, "my-userid")
th.App.fixImagesAttachments(boardMap, fileMap, "test-team", "my-userid")
})
}

View File

@ -53,6 +53,7 @@ func TestApp_initializeTemplates(t *testing.T) {
th.Store.EXPECT().GetMembersForBoard(board.ID).AnyTimes().Return([]*model.BoardMember{}, nil)
th.Store.EXPECT().GetBoard(board.ID).AnyTimes().Return(board, nil)
th.Store.EXPECT().GetMemberForBoard(gomock.Any(), gomock.Any()).AnyTimes().Return(boardMember, nil)
th.Store.EXPECT().SaveFileInfo(gomock.Any()).Return(nil).AnyTimes()
th.FilesBackend.On("WriteFile", mock.Anything, mock.Anything).Return(int64(1), nil)

View File

@ -3379,7 +3379,7 @@ func TestPermissionsGetFile(t *testing.T) {
clients := setupClients(th)
testData := setupData(t, th)
newFileID, err := th.Server.App().SaveFile(bytes.NewBuffer([]byte("test")), "test-team", testData.privateBoard.ID, "test.png")
newFileID, err := th.Server.App().SaveFile(bytes.NewBuffer([]byte("test")), "test-team", testData.privateBoard.ID, "test.png", false)
require.NoError(t, err)
ttCases := ttCasesF()
@ -3394,7 +3394,7 @@ func TestPermissionsGetFile(t *testing.T) {
clients := setupLocalClients(th)
testData := setupData(t, th)
newFileID, err := th.Server.App().SaveFile(bytes.NewBuffer([]byte("test")), "test-team", testData.privateBoard.ID, "test.png")
newFileID, err := th.Server.App().SaveFile(bytes.NewBuffer([]byte("test")), "test-team", testData.privateBoard.ID, "test.png", false)
require.NoError(t, err)
ttCases := ttCasesF()

View File

@ -0,0 +1,27 @@
// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved.
// See LICENSE.txt for license information.
package model
import (
"mime"
"path/filepath"
"strings"
"github.com/mattermost/mattermost-server/server/v8/boards/utils"
mm_model "github.com/mattermost/mattermost-server/server/v8/model"
)
func NewFileInfo(name string) *mm_model.FileInfo {
extension := strings.ToLower(filepath.Ext(name))
now := utils.GetMillis()
return &mm_model.FileInfo{
CreatorId: "boards",
CreateAt: now,
UpdateAt: now,
Name: name,
Extension: extension,
MimeType: mime.TypeByExtension(extension),
}
}

View File

@ -126,7 +126,7 @@ func (s *SQLStore) isSchemaMigrationNeeded() (bool, error) {
case model.MysqlDBType:
query = query.Where(sq.Eq{"TABLE_SCHEMA": s.schemaName})
case model.PostgresDBType:
query = query.Where(sq.Eq{"TABLE_SCHEMA": "current_schema()"})
query = query.Where("table_schema = current_schema()")
}
rows, err := query.Query()

View File

@ -1,4 +1,4 @@
FROM debian:buster-slim@sha256:5b0b1a9a54651bbe9d4d3ee96bbda2b2a1da3d2fa198ddebbced46dfdca7f216
FROM ubuntu:jammy-20230308@sha256:7a57c69fe1e9d5b97c5fe649849e79f2cfc3bf11d10bbd5218b4eb61716aebe6
# Setting bash as our shell, and enabling pipefail option
@ -13,68 +13,13 @@ ARG MM_PACKAGE="https://releases.mattermost.com/7.10.0/mattermost-7.10.0-linux-a
# # Install needed packages and indirect dependencies
RUN apt-get update \
&& apt-get install --no-install-recommends -y \
ca-certificates=20200601~deb10u2 \
curl=7.64.0-4+deb10u2 \
mime-support=3.62 \
unrtf=0.21.10-clean-1 \
wv=1.2.9-4.2+b2 \
poppler-utils=0.71.0-5 \
tidy=2:5.6.0-10 \
libssl1.1=1.1.1n-0+deb10u3 \
sensible-utils=0.0.12 \
libsasl2-modules-db=2.1.27+dfsg-1+deb10u2 \
libsasl2-2=2.1.27+dfsg-1+deb10u2 \
libldap-common=2.4.47+dfsg-3+deb10u7 \
libldap-2.4-2=2.4.47+dfsg-3+deb10u7 \
libicu63=63.1-6+deb10u3 \
libxml2=2.9.4+dfsg1-7+deb10u4 \
ucf=3.0038+nmu1 \
openssl=1.1.1n-0+deb10u3 \
libkeyutils1=1.6-6 \
libkrb5support0=1.17-3+deb10u4 \
libk5crypto3=1.17-3+deb10u4 \
libkrb5-3=1.17-3+deb10u4 \
libgssapi-krb5-2=1.17-3+deb10u4 \
libnghttp2-14=1.36.0-2+deb10u1 \
libpsl5=0.20.2-2 \
librtmp1=2.4+20151223.gitfa8646d.1-2 \
libssh2-1=1.8.0-2.1 \
libcurl4=7.64.0-4+deb10u2 \
fonts-dejavu-core=2.37-1 \
fontconfig-config=2.13.1-2 \
libbsd0=0.9.1-2+deb10u1 \
libexpat1=2.2.6-2+deb10u4 \
libpng16-16=1.6.36-6 \
libfreetype6=2.9.1-3+deb10u2 \
libfontconfig1=2.13.1-2 \
libpixman-1-0=0.36.0-1 \
libxau6=1:1.0.8-1+b2 \
libxdmcp6=1:1.1.2-3 \
libxcb1=1.13.1-2 \
libx11-data=2:1.6.7-1+deb10u2 \
libx11-6=2:1.6.7-1+deb10u2 \
libxcb-render0=1.13.1-2 \
libxcb-shm0=1.13.1-2 \
libxext6=2:1.3.3-1+b2 \
libxrender1=1:0.9.10-1 \
libcairo2=1.16.0-4+deb10u1 \
libcurl3-gnutls=7.64.0-4+deb10u5 \
libglib2.0-0=2.58.3-2+deb10u3 \
libgsf-1-common=1.14.45-1 \
libgsf-1-114=1.14.45-1 \
libjbig0=2.1-3.1+b2 \
libjpeg62-turbo=1:1.5.2-2+deb10u1 \
liblcms2-2=2.9-3 \
libnspr4=2:4.20-1 \
libsqlite3-0=3.27.2-3+deb10u1 \
libnss3=2:3.42.1-1+deb10u5 \
libopenjp2-7=2.3.0-2+deb10u2 \
libwebp6=0.6.1-2+deb10u1 \
libtiff5=4.1.0+git191117-2~deb10u4 \
libpoppler82=0.71.0-5 \
libtidy5deb1=2:5.6.0-10 \
libwmf0.2-7=0.2.8.4-14 \
libwv-1.2-4=1.2.9-4.2+b2 \
ca-certificates \
curl \
mime-support \
unrtf \
wv \
poppler-utils \
tidy \
&& rm -rf /var/lib/apt/lists/*
# Set mattermost group/user and download Mattermost

View File

@ -329,13 +329,6 @@ func executeCommand(c *Context, w http.ResponseWriter, r *http.Request) {
return
}
// For compatibility reasons, PermissionCreatePost is also checked.
// TODO: Remove in 8.0: https://mattermost.atlassian.net/browse/MM-51274
if !c.App.SessionHasPermissionToChannel(c.AppContext, *c.AppContext.Session(), commandArgs.ChannelId, model.PermissionUseSlashCommands) {
c.SetPermissionError(model.PermissionUseSlashCommands)
return
}
channel, err := c.App.GetChannel(c.AppContext, commandArgs.ChannelId)
if err != nil {
c.Err = err
@ -354,13 +347,6 @@ func executeCommand(c *Context, w http.ResponseWriter, r *http.Request) {
c.SetPermissionError(model.PermissionCreatePost)
return
}
// For compatibility reasons, PermissionCreatePost is also checked.
// TODO: Remove in 8.0: https://mattermost.atlassian.net/browse/MM-51274
if !c.App.SessionHasPermissionTo(*c.AppContext.Session(), model.PermissionUseSlashCommands) {
c.SetPermissionError(model.PermissionUseSlashCommands)
return
}
}
}

View File

@ -1722,14 +1722,26 @@ func TestInstallMarketplacePluginPrepackagedDisabled(t *testing.T) {
appErr := th.App.AddPublicKey("pub_key", key)
require.Nil(t, appErr)
t.Cleanup(func() {
appErr = th.App.DeletePublicKey("pub_key")
require.Nil(t, appErr)
})
testServer := httptest.NewServer(http.HandlerFunc(func(res http.ResponseWriter, req *http.Request) {
serverVersion := req.URL.Query().Get("server_version")
require.NotEmpty(t, serverVersion)
require.Equal(t, model.CurrentVersion, serverVersion)
res.WriteHeader(http.StatusOK)
var out []byte
out, err = json.Marshal([]*model.MarketplacePlugin{samplePlugins[1]})
require.NoError(t, err)
// Return something if testplugin2 or no specific plugin is requested
pluginID := req.URL.Query().Get("plugin_id")
if pluginID == "" || pluginID == samplePlugins[1].Manifest.Id {
out, err = json.Marshal([]*model.MarketplacePlugin{samplePlugins[1]})
require.NoError(t, err)
}
res.Write(out)
}))
defer testServer.Close()
@ -1748,43 +1760,52 @@ func TestInstallMarketplacePluginPrepackagedDisabled(t *testing.T) {
require.Len(t, pluginsResp.Active, 0)
require.Len(t, pluginsResp.Inactive, 0)
// Should fail to install unknown prepackaged plugin
pRequest := &model.InstallMarketplacePluginRequest{Id: "testpluginXX"}
manifest, resp, err := client.InstallMarketplacePlugin(pRequest)
require.Error(t, err)
CheckInternalErrorStatus(t, resp)
require.Nil(t, manifest)
t.Run("Should fail to install unknown prepackaged plugin", func(t *testing.T) {
pRequest := &model.InstallMarketplacePluginRequest{Id: "testpluginXX"}
manifest, resp, err := client.InstallMarketplacePlugin(pRequest)
require.Error(t, err)
CheckInternalErrorStatus(t, resp)
require.Nil(t, manifest)
plugins := env.PrepackagedPlugins()
require.Len(t, plugins, 1)
require.Equal(t, "testplugin", plugins[0].Manifest.Id)
require.Equal(t, pluginSignatureData, plugins[0].Signature)
plugins := env.PrepackagedPlugins()
require.Len(t, plugins, 1)
require.Equal(t, "testplugin", plugins[0].Manifest.Id)
require.Equal(t, pluginSignatureData, plugins[0].Signature)
pluginsResp, _, err = client.GetPlugins()
require.NoError(t, err)
require.Len(t, pluginsResp.Active, 0)
require.Len(t, pluginsResp.Inactive, 0)
pluginsResp, _, err = client.GetPlugins()
require.NoError(t, err)
require.Len(t, pluginsResp.Active, 0)
require.Len(t, pluginsResp.Inactive, 0)
})
pRequest = &model.InstallMarketplacePluginRequest{Id: "testplugin"}
manifest1, _, err := client.InstallMarketplacePlugin(pRequest)
require.NoError(t, err)
require.NotNil(t, manifest1)
require.Equal(t, "testplugin", manifest1.Id)
require.Equal(t, "0.0.1", manifest1.Version)
t.Run("Install prepackaged plugin with Marketplace disabled", func(t *testing.T) {
pRequest := &model.InstallMarketplacePluginRequest{Id: "testplugin"}
manifest, _, err := client.InstallMarketplacePlugin(pRequest)
require.NoError(t, err)
require.NotNil(t, manifest)
require.Equal(t, "testplugin", manifest.Id)
require.Equal(t, "0.0.1", manifest.Version)
pluginsResp, _, err = client.GetPlugins()
require.NoError(t, err)
require.Len(t, pluginsResp.Active, 0)
require.Equal(t, pluginsResp.Inactive, []*model.PluginInfo{{
Manifest: *manifest1,
}})
t.Cleanup(func() {
_, err = client.RemovePlugin(manifest.Id)
require.NoError(t, err)
})
// Try to install remote marketplace plugin
pRequest = &model.InstallMarketplacePluginRequest{Id: "testplugin2"}
manifest, resp, err = client.InstallMarketplacePlugin(pRequest)
require.Error(t, err)
CheckInternalErrorStatus(t, resp)
require.Nil(t, manifest)
pluginsResp, _, err = client.GetPlugins()
require.NoError(t, err)
require.Len(t, pluginsResp.Active, 0)
require.Equal(t, pluginsResp.Inactive, []*model.PluginInfo{{
Manifest: *manifest,
}})
})
t.Run("Try to install remote marketplace plugin while Marketplace is disabled", func(t *testing.T) {
pRequest := &model.InstallMarketplacePluginRequest{Id: "testplugin2"}
manifest, resp, err := client.InstallMarketplacePlugin(pRequest)
require.Error(t, err)
CheckInternalErrorStatus(t, resp)
require.Nil(t, manifest)
})
// Enable remote marketplace
th.App.UpdateConfig(func(cfg *model.Config) {
@ -1794,31 +1815,58 @@ func TestInstallMarketplacePluginPrepackagedDisabled(t *testing.T) {
*cfg.PluginSettings.AllowInsecureDownloadURL = true
})
pRequest = &model.InstallMarketplacePluginRequest{Id: "testplugin2"}
manifest2, _, err := client.InstallMarketplacePlugin(pRequest)
require.NoError(t, err)
require.NotNil(t, manifest2)
require.Equal(t, "testplugin2", manifest2.Id)
require.Equal(t, "1.2.3", manifest2.Version)
t.Run("Install prepackaged, not listed plugin with Marketplace enabled", func(t *testing.T) {
pRequest := &model.InstallMarketplacePluginRequest{Id: "testplugin"}
manifest, _, err := client.InstallMarketplacePlugin(pRequest)
require.NoError(t, err)
pluginsResp, _, err = client.GetPlugins()
require.NoError(t, err)
require.Len(t, pluginsResp.Active, 0)
require.ElementsMatch(t, pluginsResp.Inactive, []*model.PluginInfo{
{
Manifest: *manifest1,
},
{
Manifest: *manifest2,
},
t.Cleanup(func() {
_, err = client.RemovePlugin(manifest.Id)
require.NoError(t, err)
})
require.NotNil(t, manifest)
assert.Equal(t, "testplugin", manifest.Id)
assert.Equal(t, "0.0.1", manifest.Version)
})
// Clean up
_, err = client.RemovePlugin(manifest1.Id)
require.NoError(t, err)
t.Run("Install both a prepacked and a Marketplace plugin", func(t *testing.T) {
pRequest := &model.InstallMarketplacePluginRequest{Id: "testplugin"}
manifest1, _, err := client.InstallMarketplacePlugin(pRequest)
require.NoError(t, err)
require.NotNil(t, manifest1)
assert.Equal(t, "testplugin", manifest1.Id)
assert.Equal(t, "0.0.1", manifest1.Version)
_, err = client.RemovePlugin(manifest2.Id)
require.NoError(t, err)
t.Cleanup(func() {
_, err = client.RemovePlugin(manifest1.Id)
require.NoError(t, err)
})
pRequest = &model.InstallMarketplacePluginRequest{Id: "testplugin2"}
manifest2, _, err := client.InstallMarketplacePlugin(pRequest)
require.NoError(t, err)
require.NotNil(t, manifest2)
require.Equal(t, "testplugin2", manifest2.Id)
require.Equal(t, "1.2.3", manifest2.Version)
t.Cleanup(func() {
_, err = client.RemovePlugin(manifest2.Id)
require.NoError(t, err)
})
pluginsResp, _, err = client.GetPlugins()
require.NoError(t, err)
require.Len(t, pluginsResp.Active, 0)
require.ElementsMatch(t, pluginsResp.Inactive, []*model.PluginInfo{
{
Manifest: *manifest1,
},
{
Manifest: *manifest2,
},
})
})
appErr = th.App.DeletePublicKey("pub_key")
require.Nil(t, appErr)

View File

@ -119,7 +119,6 @@ func TestDoAdvancedPermissionsMigration(t *testing.T) {
model.PermissionGetPublicLink.Id,
model.PermissionCreatePost.Id,
model.PermissionUseChannelMentions.Id,
model.PermissionUseSlashCommands.Id,
model.PermissionManagePublicChannelProperties.Id,
model.PermissionDeletePublicChannel.Id,
model.PermissionManagePrivateChannelProperties.Id,

View File

@ -459,7 +459,7 @@ func TestImportImportRole(t *testing.T) {
// Try changing all the params and reimporting.
data.DisplayName = ptrStr("new display name")
data.Description = ptrStr("description")
data.Permissions = &[]string{"use_slash_commands"}
data.Permissions = &[]string{"manage_slash_commands"}
err = th.App.importRole(th.Context, &data, false, true)
require.Nil(t, err, "Should have succeeded. %v", err)

View File

@ -114,7 +114,7 @@ func TestImportPermissions(t *testing.T) {
}
beforeCount = len(results)
json := fmt.Sprintf(`{"display_name":"%v","name":"%v","description":"%v","scope":"%v","default_team_admin_role":"","default_team_user_role":"","default_channel_admin_role":"%v","default_channel_user_role":"%v","roles":[{"id":"yzfx3g9xjjfw8cqo6bpn33xr7o","name":"%v","display_name":"Channel Admin Role for Scheme my_scheme_1526475590","description":"","create_at":1526475589687,"update_at":1526475589687,"delete_at":0,"permissions":["manage_channel_roles"],"scheme_managed":true,"built_in":false},{"id":"a7s3cp4n33dfxbsrmyh9djao3a","name":"%v","display_name":"Channel User Role for Scheme my_scheme_1526475590","description":"","create_at":1526475589688,"update_at":1526475589688,"delete_at":0,"permissions":["read_channel","add_reaction","remove_reaction","manage_public_channel_members","upload_file","get_public_link","create_post","use_slash_commands","manage_private_channel_members","delete_post","edit_post"],"scheme_managed":true,"built_in":false}]}`, displayName, name, description, scope, roleName1, roleName2, roleName1, roleName2)
json := fmt.Sprintf(`{"display_name":"%v","name":"%v","description":"%v","scope":"%v","default_team_admin_role":"","default_team_user_role":"","default_channel_admin_role":"%v","default_channel_user_role":"%v","roles":[{"id":"yzfx3g9xjjfw8cqo6bpn33xr7o","name":"%v","display_name":"Channel Admin Role for Scheme my_scheme_1526475590","description":"","create_at":1526475589687,"update_at":1526475589687,"delete_at":0,"permissions":["manage_channel_roles"],"scheme_managed":true,"built_in":false},{"id":"a7s3cp4n33dfxbsrmyh9djao3a","name":"%v","display_name":"Channel User Role for Scheme my_scheme_1526475590","description":"","create_at":1526475589688,"update_at":1526475589688,"delete_at":0,"permissions":["read_channel","add_reaction","remove_reaction","manage_public_channel_members","upload_file","get_public_link","create_post","manage_private_channel_members","delete_post","edit_post"],"scheme_managed":true,"built_in":false}]}`, displayName, name, description, scope, roleName1, roleName2, roleName1, roleName2)
r := strings.NewReader(json)
err := th.App.ImportPermissions(r)
@ -183,7 +183,7 @@ func TestImportPermissions_idempotentScheme(t *testing.T) {
roleName1 := model.NewId()
roleName2 := model.NewId()
json := fmt.Sprintf(`{"display_name":"%v","name":"%v","description":"%v","scope":"%v","default_team_admin_role":"","default_team_user_role":"","default_channel_admin_role":"%v","default_channel_user_role":"%v","roles":[{"id":"yzfx3g9xjjfw8cqo6bpn33xr7o","name":"%v","display_name":"Channel Admin Role for Scheme my_scheme_1526475590","description":"","create_at":1526475589687,"update_at":1526475589687,"delete_at":0,"permissions":["manage_channel_roles"],"scheme_managed":true,"built_in":false},{"id":"a7s3cp4n33dfxbsrmyh9djao3a","name":"%v","display_name":"Channel User Role for Scheme my_scheme_1526475590","description":"","create_at":1526475589688,"update_at":1526475589688,"delete_at":0,"permissions":["read_channel","add_reaction","remove_reaction","manage_public_channel_members","upload_file","get_public_link","create_post","use_slash_commands","manage_private_channel_members","delete_post","edit_post"],"scheme_managed":true,"built_in":false}]}`, displayName, name, description, scope, roleName1, roleName2, roleName1, roleName2)
json := fmt.Sprintf(`{"display_name":"%v","name":"%v","description":"%v","scope":"%v","default_team_admin_role":"","default_team_user_role":"","default_channel_admin_role":"%v","default_channel_user_role":"%v","roles":[{"id":"yzfx3g9xjjfw8cqo6bpn33xr7o","name":"%v","display_name":"Channel Admin Role for Scheme my_scheme_1526475590","description":"","create_at":1526475589687,"update_at":1526475589687,"delete_at":0,"permissions":["manage_channel_roles"],"scheme_managed":true,"built_in":false},{"id":"a7s3cp4n33dfxbsrmyh9djao3a","name":"%v","display_name":"Channel User Role for Scheme my_scheme_1526475590","description":"","create_at":1526475589688,"update_at":1526475589688,"delete_at":0,"permissions":["read_channel","add_reaction","remove_reaction","manage_public_channel_members","upload_file","get_public_link","create_post","manage_private_channel_members","delete_post","edit_post"],"scheme_managed":true,"built_in":false}]}`, displayName, name, description, scope, roleName1, roleName2, roleName1, roleName2)
jsonl := strings.Repeat(json+"\n", 4)
r := strings.NewReader(jsonl)
@ -226,7 +226,7 @@ func TestImportPermissions_schemeDeletedOnRoleFailure(t *testing.T) {
roleName1 := model.NewId()
roleName2 := model.NewId()
jsonl := fmt.Sprintf(`{"display_name":"%v","name":"%v","description":"%v","scope":"%v","default_team_admin_role":"","default_team_user_role":"","default_channel_admin_role":"%v","default_channel_user_role":"%v","roles":[{"id":"yzfx3g9xjjfw8cqo6bpn33xr7o","name":"%v","display_name":"Channel Admin Role for Scheme my_scheme_1526475590","description":"","create_at":1526475589687,"update_at":1526475589687,"delete_at":0,"permissions":["manage_channel_roles"],"scheme_managed":true,"built_in":false},{"id":"a7s3cp4n33dfxbsrmyh9djao3a","name":"%v","display_name":"Channel User Role for Scheme my_scheme_1526475590","description":"","create_at":1526475589688,"update_at":1526475589688,"delete_at":0,"permissions":["read_channel","add_reaction","remove_reaction","manage_public_channel_members","upload_file","get_public_link","create_post","use_slash_commands","manage_private_channel_members","delete_post","edit_post"],"scheme_managed":true,"built_in":false}]}`, displayName, name, description, scope, roleName1, roleName2, roleName1, roleName2)
jsonl := fmt.Sprintf(`{"display_name":"%v","name":"%v","description":"%v","scope":"%v","default_team_admin_role":"","default_team_user_role":"","default_channel_admin_role":"%v","default_channel_user_role":"%v","roles":[{"id":"yzfx3g9xjjfw8cqo6bpn33xr7o","name":"%v","display_name":"Channel Admin Role for Scheme my_scheme_1526475590","description":"","create_at":1526475589687,"update_at":1526475589687,"delete_at":0,"permissions":["manage_channel_roles"],"scheme_managed":true,"built_in":false},{"id":"a7s3cp4n33dfxbsrmyh9djao3a","name":"%v","display_name":"Channel User Role for Scheme my_scheme_1526475590","description":"","create_at":1526475589688,"update_at":1526475589688,"delete_at":0,"permissions":["read_channel","add_reaction","remove_reaction","manage_public_channel_members","upload_file","get_public_link","create_post","manage_private_channel_members","delete_post","edit_post"],"scheme_managed":true,"built_in":false}]}`, displayName, name, description, scope, roleName1, roleName2, roleName1, roleName2)
r := strings.NewReader(jsonl)
var results []*model.Scheme

View File

@ -203,35 +203,38 @@ func (ch *Channels) InstallMarketplacePlugin(request *model.InstallMarketplacePl
if *ch.cfgSvc.Config().PluginSettings.EnableRemoteMarketplace {
var plugin *model.BaseMarketplacePlugin
plugin, appErr = ch.getRemoteMarketplacePlugin(request.Id, request.Version)
if appErr != nil {
return nil, appErr
// The plugin might only be prepackaged and not on the Marketplace.
if appErr != nil && appErr.Id != "app.plugin.marketplace_plugins.not_found.app_error" {
mlog.Warn("Failed to reach Marketplace to install plugin", mlog.String("plugin_id", request.Id), mlog.Err(appErr))
}
var prepackagedVersion semver.Version
if prepackagedPlugin != nil {
var err error
prepackagedVersion, err = semver.Parse(prepackagedPlugin.Manifest.Version)
if err != nil {
return nil, model.NewAppError("InstallMarketplacePlugin", "app.plugin.invalid_version.app_error", nil, "", http.StatusBadRequest).Wrap(err)
if plugin != nil {
var prepackagedVersion semver.Version
if prepackagedPlugin != nil {
var err error
prepackagedVersion, err = semver.Parse(prepackagedPlugin.Manifest.Version)
if err != nil {
return nil, model.NewAppError("InstallMarketplacePlugin", "app.plugin.invalid_version.app_error", nil, "", http.StatusBadRequest).Wrap(err)
}
}
}
marketplaceVersion, err := semver.Parse(plugin.Manifest.Version)
if err != nil {
return nil, model.NewAppError("InstallMarketplacePlugin", "app.prepackged-plugin.invalid_version.app_error", nil, "", http.StatusBadRequest).Wrap(err)
}
marketplaceVersion, err := semver.Parse(plugin.Manifest.Version)
if err != nil {
return nil, model.NewAppError("InstallMarketplacePlugin", "app.prepackged-plugin.invalid_version.app_error", nil, "", http.StatusBadRequest).Wrap(err)
}
if prepackagedVersion.LT(marketplaceVersion) { // Always true if no prepackaged plugin was found
downloadedPluginBytes, err := ch.srv.downloadFromURL(plugin.DownloadURL)
if err != nil {
return nil, model.NewAppError("InstallMarketplacePlugin", "app.plugin.install_marketplace_plugin.app_error", nil, "", http.StatusInternalServerError).Wrap(err)
if prepackagedVersion.LT(marketplaceVersion) { // Always true if no prepackaged plugin was found
downloadedPluginBytes, err := ch.srv.downloadFromURL(plugin.DownloadURL)
if err != nil {
return nil, model.NewAppError("InstallMarketplacePlugin", "app.plugin.install_marketplace_plugin.app_error", nil, "", http.StatusInternalServerError).Wrap(err)
}
signature, err := plugin.DecodeSignature()
if err != nil {
return nil, model.NewAppError("InstallMarketplacePlugin", "app.plugin.signature_decode.app_error", nil, "", http.StatusNotImplemented).Wrap(err)
}
pluginFile = bytes.NewReader(downloadedPluginBytes)
signatureFile = signature
}
signature, err := plugin.DecodeSignature()
if err != nil {
return nil, model.NewAppError("InstallMarketplacePlugin", "app.plugin.signature_decode.app_error", nil, "", http.StatusNotImplemented).Wrap(err)
}
pluginFile = bytes.NewReader(downloadedPluginBytes)
signatureFile = signature
}
}

View File

@ -93,6 +93,7 @@ var wt{{.MD5}} = &WorkTemplate{
Illustration: "{{.Playbook.Illustration}}",
},{{end}}{{if .Integration}}Integration: &Integration{
ID: "{{.Integration.ID}}",
Recommended: {{.Integration.Recommended}},
},{{end}}
},
{{end}}

View File

@ -45,8 +45,10 @@ content:
illustration: "/static/worktemplates/playbooks/product_release.png"
- integration:
id: jira
recommended: true
- integration:
id: github
recommended: true
---
id: 'product_teams/goals_and_okrs:v1'
category: product_teams
@ -86,7 +88,7 @@ content:
channel: channel-1674845108569
- integration:
id: zoom
recommended: true
---
id: 'product_teams/bug_bash:v1'
category: product_teams
@ -120,6 +122,7 @@ content:
playbook: playbook-1674844017943
- integration:
id: jira
recommended: true
---
id: 'product_teams/sprint_planning:v1'
category: product_teams
@ -153,6 +156,7 @@ content:
channel: channel-1674850783500
- integration:
id: zoom
recommended: true
---
id: 'product_teams/product_roadmap:v1'
category: product_teams
@ -282,6 +286,7 @@ content:
channel: channel-1674845108569
- integration:
id: zoom
recommended: true
---
id: 'companywide/create_project:v1'
category: companywide
@ -316,10 +321,13 @@ content:
channel: channel-1674851940114
- integration:
id: jira
recommended: true
- integration:
id: github
recommended: true
- integration:
id: zoom
recommended: true
---
######################
# Leadership
@ -356,4 +364,4 @@ content:
channel: channel-1674845108569
- integration:
id: zoom
recommended: true

View File

@ -108,7 +108,8 @@ func (wt WorkTemplate) ToModelWorkTemplate(t i18n.TranslateFunc) *model.WorkTemp
if content.Integration != nil {
mwt.Content = append(mwt.Content, model.WorkTemplateContent{
Integration: &model.WorkTemplateIntegration{
ID: content.Integration.ID,
ID: content.Integration.ID,
Recommended: content.Integration.Recommended,
},
})
}
@ -320,7 +321,8 @@ func (p *Playbook) Validate() error {
}
type Integration struct {
ID string `yaml:"id"`
ID string `yaml:"id"`
Recommended bool `yaml:"recommended"`
}
func (i *Integration) Validate() error {

View File

@ -148,12 +148,14 @@ var wt00a1b44a5831c0a3acb14787b3fdd352 = &WorkTemplate{
},
{
Integration: &Integration{
ID: "jira",
ID: "jira",
Recommended: true,
},
},
{
Integration: &Integration{
ID: "github",
ID: "github",
Recommended: true,
},
},
},
@ -214,7 +216,8 @@ var wt5baa68055bf9ea423273662e01ccc575 = &WorkTemplate{
},
{
Integration: &Integration{
ID: "zoom",
ID: "zoom",
Recommended: true,
},
},
},
@ -265,7 +268,8 @@ var wtfeb56bc6a8f277c47b503bd1c92d830e = &WorkTemplate{
},
{
Integration: &Integration{
ID: "jira",
ID: "jira",
Recommended: true,
},
},
},
@ -317,7 +321,8 @@ var wt8d2ef53deac5517eb349dc5de6150196 = &WorkTemplate{
},
{
Integration: &Integration{
ID: "zoom",
ID: "zoom",
Recommended: true,
},
},
},
@ -518,7 +523,8 @@ var wtf7b846d35810f8272eeb9a1a562025b5 = &WorkTemplate{
},
{
Integration: &Integration{
ID: "zoom",
ID: "zoom",
Recommended: true,
},
},
},
@ -570,17 +576,20 @@ var wtb9ab412890c2410c7b49eec8f12e7edc = &WorkTemplate{
},
{
Integration: &Integration{
ID: "jira",
ID: "jira",
Recommended: true,
},
},
{
Integration: &Integration{
ID: "github",
ID: "github",
Recommended: true,
},
},
{
Integration: &Integration{
ID: "zoom",
ID: "zoom",
Recommended: true,
},
},
},
@ -632,7 +641,8 @@ var wt32ab773bfe021e3d4913931041552559 = &WorkTemplate{
},
{
Integration: &Integration{
ID: "zoom",
ID: "zoom",
Recommended: true,
},
},
},

View File

@ -3035,7 +3035,8 @@ func (s SqlChannelStore) Autocomplete(userID, term string, includeDeleted, isGue
sq.Expr("t.id = tm.TeamId"),
sq.Eq{"tm.UserId": userID},
}).
OrderBy("c.DisplayName")
OrderBy("c.DisplayName").
Limit(model.ChannelSearchDefaultLimit)
if !includeDeleted {
query = query.Where(sq.And{
@ -3073,7 +3074,7 @@ func (s SqlChannelStore) Autocomplete(userID, term string, includeDeleted, isGue
channels := model.ChannelListWithTeamData{}
err = s.GetReplicaX().Select(&channels, sql, args...)
if err != nil {
return nil, errors.Wrapf(err, "could not find channel with term=%s", term)
return nil, errors.Wrapf(err, "could not find channel with term=%s", trimInput(term))
}
return channels, nil
}
@ -3186,7 +3187,7 @@ func (s SqlChannelStore) AutocompleteInTeamForSearch(teamID string, userID strin
// query the database
err = s.GetReplicaX().Select(&channels, sql, args...)
if err != nil {
return nil, errors.Wrapf(err, "failed to find Channels with term='%s'", term)
return nil, errors.Wrapf(err, "failed to find Channels with term='%s'", trimInput(term))
}
directChannels, err := s.autocompleteInTeamForSearchDirectMessages(userID, term)
@ -3242,7 +3243,7 @@ func (s SqlChannelStore) autocompleteInTeamForSearchDirectMessages(userID string
// query the channel list from the database using SQLX
channels := model.ChannelList{}
if err := s.GetReplicaX().Select(&channels, sql, args...); err != nil {
return nil, errors.Wrapf(err, "failed to find Channels with term='%s' (%s %% %v)", term, sql, args)
return nil, errors.Wrapf(err, "failed to find Channels with term='%s'", trimInput(term))
}
return channels, nil
@ -3461,7 +3462,7 @@ func (s SqlChannelStore) SearchAllChannels(term string, opts store.ChannelSearch
}
channels := model.ChannelListWithTeamData{}
if err2 := s.GetReplicaX().Select(&channels, queryString, args...); err2 != nil {
return nil, 0, errors.Wrapf(err2, "failed to find Channels with term='%s'", term)
return nil, 0, errors.Wrapf(err2, "failed to find Channels with term='%s'", trimInput(term))
}
var totalCount int64
@ -3474,7 +3475,7 @@ func (s SqlChannelStore) SearchAllChannels(term string, opts store.ChannelSearch
return nil, 0, errors.Wrap(err, "channel_tosql")
}
if err2 := s.GetReplicaX().Get(&totalCount, queryString, args...); err2 != nil {
return nil, 0, errors.Wrapf(err2, "failed to find Channels with term='%s'", term)
return nil, 0, errors.Wrapf(err2, "failed to find Channels with term='%s'", trimInput(term))
}
} else {
totalCount = int64(len(channels))
@ -3651,7 +3652,7 @@ func (s SqlChannelStore) performSearch(searchQuery sq.SelectBuilder, term string
channels := model.ChannelList{}
err = s.GetReplicaX().Select(&channels, sql, args...)
if err != nil {
return channels, errors.Wrapf(err, "failed to find Channels with term='%s'", term)
return channels, errors.Wrapf(err, "failed to find Channels with term='%s'", trimInput(term))
}
return channels, nil
@ -3744,7 +3745,7 @@ func (s SqlChannelStore) SearchGroupChannels(userId, term string) (model.Channel
groupChannels := model.ChannelList{}
if err := s.GetReplicaX().Select(&groupChannels, sql, params...); err != nil {
return nil, errors.Wrapf(err, "failed to find Channels with term='%s' and userId=%s", term, userId)
return nil, errors.Wrapf(err, "failed to find Channels with term='%s' and userId=%s", trimInput(term), userId)
}
return groupChannels, nil
}

View File

@ -681,7 +681,7 @@ func (fs SqlFileInfoStore) Search(paramsList []*model.SearchParams, userId, team
items := []fileInfoWithChannelID{}
err = fs.GetSearchReplicaX().Select(&items, queryString, args...)
if err != nil {
mlog.Warn("Query error searching files.", mlog.Err(err))
mlog.Warn("Query error searching files.", mlog.String("error", trimInput(err.Error())))
// Don't return the error to the caller as it is of no use to the user. Instead return an empty set of search results.
} else {
for _, item := range items {

View File

@ -2075,7 +2075,7 @@ func (s *SqlPostStore) search(teamId string, userId string, params *model.Search
var posts []*model.Post
if err := s.GetSearchReplicaX().Select(&posts, searchQuery, searchQueryArgs...); err != nil {
mlog.Warn("Query error searching posts.", mlog.Err(err))
mlog.Warn("Query error searching posts.", mlog.String("error", trimInput(err.Error())))
// Don't return the error to the caller as it is of no use to the user. Instead return an empty set of search results.
} else {
for _, p := range posts {

View File

@ -233,3 +233,14 @@ func SanitizeDataSource(driverName, dataSource string) (string, error) {
return "", errors.New("invalid drivername. Not postgres or mysql.")
}
}
const maxTokenSize = 50
// trimInput limits the string to a max size to prevent clogging up disk space
// while logging
func trimInput(input string) string {
if len(input) > maxTokenSize {
input = input[:maxTokenSize] + "..."
}
return input
}

View File

@ -115,7 +115,7 @@ func TestChannelStore(t *testing.T, ss store.Store, s SqlStore) {
t.Run("GetGuestCount", func(t *testing.T) { testGetGuestCount(t, ss) })
t.Run("SearchMore", func(t *testing.T) { testChannelStoreSearchMore(t, ss) })
t.Run("SearchInTeam", func(t *testing.T) { testChannelStoreSearchInTeam(t, ss) })
t.Run("Autocomplete", func(t *testing.T) { testAutocomplete(t, ss) })
t.Run("Autocomplete", func(t *testing.T) { testAutocomplete(t, ss, s) })
t.Run("SearchArchivedInTeam", func(t *testing.T) { testChannelStoreSearchArchivedInTeam(t, ss, s) })
t.Run("SearchForUserInTeam", func(t *testing.T) { testChannelStoreSearchForUserInTeam(t, ss) })
t.Run("SearchAllChannels", func(t *testing.T) { testChannelStoreSearchAllChannels(t, ss) })
@ -5986,7 +5986,7 @@ func testChannelStoreSearchInTeam(t *testing.T, ss store.Store) {
}
}
func testAutocomplete(t *testing.T, ss store.Store) {
func testAutocomplete(t *testing.T, ss store.Store, s SqlStore) {
t1 := &model.Team{
DisplayName: "t1",
Name: NewTestId(),
@ -6165,9 +6165,9 @@ func testAutocomplete(t *testing.T, ss store.Store) {
}
for _, testCase := range testCases {
t.Run("Autocomplete/"+testCase.Description, func(t *testing.T) {
channels, err := ss.Channel().Autocomplete(testCase.UserID, testCase.Term, testCase.IncludeDeleted, testCase.IsGuest)
require.NoError(t, err)
t.Run(testCase.Description, func(t *testing.T) {
channels, err2 := ss.Channel().Autocomplete(testCase.UserID, testCase.Term, testCase.IncludeDeleted, testCase.IsGuest)
require.NoError(t, err2)
var gotChannelIds []string
var gotTeamNames []string
for _, ch := range channels {
@ -6178,6 +6178,24 @@ func testAutocomplete(t *testing.T, ss store.Store) {
require.ElementsMatch(t, testCase.ExpectedTeamNames, gotTeamNames, "team names are not as expected")
})
}
t.Run("Limit", func(t *testing.T) {
for i := 0; i < model.ChannelSearchDefaultLimit+10; i++ {
_, err = ss.Channel().Save(&model.Channel{
TeamId: teamID,
DisplayName: "Channel " + strconv.Itoa(i),
Name: NewTestId(),
Type: model.ChannelTypeOpen,
}, -1)
require.NoError(t, err)
}
channels, err := ss.Channel().Autocomplete(m1.UserId, "Chann", false, false)
require.NoError(t, err)
assert.Len(t, channels, model.ChannelSearchDefaultLimit)
})
// Manually truncate Channels table until testlib can handle cleanups
s.GetMasterX().Exec("TRUNCATE Channels")
}
func testChannelStoreSearchForUserInTeam(t *testing.T, ss store.Store) {

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

View File

@ -21,10 +21,6 @@ type Permission struct {
var PermissionInviteUser *Permission
var PermissionAddUserToTeam *Permission
// Deprecated: PermissionCreatePost should be used to determine if a slash command can be executed.
// TODO: Remove in 8.0: https://mattermost.atlassian.net/browse/MM-51274
var PermissionUseSlashCommands *Permission
var PermissionManageSlashCommands *Permission
var PermissionManageOthersSlashCommands *Permission
var PermissionCreatePublicChannel *Permission
@ -393,12 +389,6 @@ func initializePermissions() {
"authentication.permissions.add_user_to_team.description",
PermissionScopeTeam,
}
PermissionUseSlashCommands = &Permission{
"use_slash_commands",
"authentication.permissions.team_use_slash_commands.name",
"authentication.permissions.team_use_slash_commands.description",
PermissionScopeChannel,
}
PermissionManageSlashCommands = &Permission{
"manage_slash_commands",
"authentication.permissions.manage_slash_commands.name",
@ -2318,7 +2308,6 @@ func initializePermissions() {
}
ChannelScopedPermissions := []*Permission{
PermissionUseSlashCommands,
PermissionManagePublicChannelMembers,
PermissionManagePrivateChannelMembers,
PermissionManageChannelRoles,

View File

@ -755,7 +755,6 @@ func MakeDefaultRoles() map[string]*Role {
PermissionEditPost.Id,
PermissionCreatePost.Id,
PermissionUseChannelMentions.Id,
PermissionUseSlashCommands.Id,
},
SchemeManaged: true,
BuiltIn: true,
@ -774,7 +773,6 @@ func MakeDefaultRoles() map[string]*Role {
PermissionGetPublicLink.Id,
PermissionCreatePost.Id,
PermissionUseChannelMentions.Id,
PermissionUseSlashCommands.Id,
PermissionManagePublicChannelProperties.Id,
PermissionDeletePublicChannel.Id,
PermissionManagePrivateChannelProperties.Id,

View File

@ -71,7 +71,6 @@ func TestRolePatchFromChannelModerationsPatch(t *testing.T) {
PermissionManagePublicChannelMembers.Id,
PermissionUploadFile.Id,
PermissionGetPublicLink.Id,
PermissionUseSlashCommands.Id,
}
baseModeratedPermissions := []string{

View File

@ -251,6 +251,8 @@ type AppError struct {
wrapped error
}
const maxErrorLength = 1024
func (er *AppError) Error() string {
var sb strings.Builder
@ -276,7 +278,11 @@ func (er *AppError) Error() string {
sb.WriteString(err.Error())
}
return sb.String()
res := sb.String()
if len(res) > maxErrorLength {
res = res[:maxErrorLength] + "..."
}
return res
}
func (er *AppError) Translate(T i18n.TranslateFunc) {

View File

@ -116,6 +116,13 @@ func TestAppErrorRender(t *testing.T) {
aerr := NewAppError("here", "message", nil, "details", http.StatusTeapot).Wrap(fmt.Errorf("my error (%w)", fmt.Errorf("inner error")))
assert.EqualError(t, aerr, "here: message, details, my error (inner error)")
})
t.Run("MaxLength", func(t *testing.T) {
str := strings.Repeat("error", 65536)
msg := "msg"
aerr := NewAppError("id", msg, nil, str, http.StatusTeapot).Wrap(errors.New(str))
assert.Len(t, aerr.Error(), maxErrorLength+len(msg))
})
}
func TestAppErrorSerialize(t *testing.T) {

View File

@ -69,7 +69,8 @@ type WorkTemplatePlaybook struct {
}
type WorkTemplateIntegration struct {
ID string `json:"id"`
ID string `json:"id"`
Recommended bool `json:"recommended"`
}
type WorkTemplateContent struct {

View File

@ -628,7 +628,7 @@ func MergeInlineText(inlines []Inline) []Inline {
}
func Unescape(markdown string) string {
ret := ""
var ret strings.Builder
position := 0
for position < len(markdown) {
@ -637,27 +637,27 @@ func Unescape(markdown string) string {
switch c {
case '\\':
if position+1 < len(markdown) && isEscapableByte(markdown[position+1]) {
ret += string(markdown[position+1])
ret.WriteByte(markdown[position+1])
position += 2
} else {
ret += `\`
ret.WriteString(`\`)
position++
}
case '&':
position++
if semicolon := strings.IndexByte(markdown[position:], ';'); semicolon == -1 {
ret += "&"
ret.WriteString("&")
} else if s := CharacterReference(markdown[position : position+semicolon]); s != "" {
position += semicolon + 1
ret += s
ret.WriteString(s)
} else {
ret += "&"
ret.WriteString("&")
}
default:
ret += string(c)
ret.WriteRune(c)
position += cSize
}
}
return ret
return ret.String()
}

View File

@ -10,7 +10,7 @@ info:
servers:
- url: http://localhost:8065/plugins/playbooks/api/v0
paths:
/runs:
/plugins/playbooks/api/v0/runs:
get:
summary: List all playbook runs
description: Retrieve a paged list of playbook runs, filtered by team, status, owner, name and/or members, and sorted by ID, name, status, creation date, end date, team or owner ID.
@ -198,7 +198,7 @@ paths:
500:
$ref: "#/components/responses/500"
/runs/dialog:
/plugins/playbooks/api/v0/runs/dialog:
post:
summary: Create a new playbook run from dialog
description: This is an internal endpoint to create a playbook run from the submission of an interactive dialog, filled by a user in the webapp. See [Interactive Dialogs](https://docs.mattermost.com/developer/interactive-dialogs.html) for more information.
@ -276,7 +276,7 @@ paths:
500:
$ref: "#/components/responses/500"
/runs/owners:
/plugins/playbooks/api/v0/runs/owners:
get:
summary: Get all owners
description: Get the owners of all playbook runs, filtered by team.
@ -314,7 +314,7 @@ paths:
500:
$ref: "#/components/responses/500"
/runs/channels:
/plugins/playbooks/api/v0/runs/channels:
get:
summary: Get playbook run channels
description: Get all channels associated with a playbook run, filtered by team, status, owner, name and/or members, and sorted by ID, name, status, creation date, end date, team, or owner ID.
@ -413,7 +413,7 @@ paths:
500:
$ref: "#/components/responses/500"
/runs/checklist-autocomplete:
/plugins/playbooks/api/v0/runs/checklist-autocomplete:
get:
summary: Get autocomplete data for /playbook check
description: This is an internal endpoint used by the autocomplete system to retrieve the data needed to show the list of items that the user can check.
@ -459,7 +459,7 @@ paths:
500:
$ref: "#/components/responses/500"
/runs/channel/{channel_id}:
/plugins/playbooks/api/v0/runs/channel/{channel_id}:
get:
summary: Find playbook run by channel ID
operationId: getPlaybookRunByChannelId
@ -492,7 +492,7 @@ paths:
500:
$ref: "#/components/responses/500"
/runs/{id}:
/plugins/playbooks/api/v0/runs/{id}:
get:
summary: Get a playbook run
operationId: getPlaybookRun
@ -565,7 +565,7 @@ paths:
500:
$ref: "#/components/responses/500"
/runs/{id}/metadata:
/plugins/playbooks/api/v0/runs/{id}/metadata:
get:
summary: Get playbook run metadata
operationId: getPlaybookRunMetadata
@ -598,7 +598,7 @@ paths:
500:
$ref: "#/components/responses/500"
/runs/{id}/end:
/plugins/playbooks/api/v0/runs/{id}/end:
put:
summary: End a playbook run
operationId: endPlaybookRun
@ -651,7 +651,7 @@ paths:
500:
$ref: "#/components/responses/500"
/runs/{id}/restart:
/plugins/playbooks/api/v0/runs/{id}/restart:
put:
summary: Restart a playbook run
operationId: restartPlaybookRun
@ -678,7 +678,7 @@ paths:
500:
$ref: "#/components/responses/500"
/runs/{id}/status:
/plugins/playbooks/api/v0/runs/{id}/status:
post:
summary: Update a playbook run's status
operationId: status
@ -728,7 +728,7 @@ paths:
500:
$ref: "#/components/responses/500"
/runs/{id}/finish:
/plugins/playbooks/api/v0/runs/{id}/finish:
put:
summary: Finish a playbook
operationId: finish
@ -755,7 +755,7 @@ paths:
500:
$ref: "#/components/responses/500"
/runs/{id}/owner:
/plugins/playbooks/api/v0/runs/{id}/owner:
post:
summary: Update playbook run owner
operationId: changeOwner
@ -800,7 +800,7 @@ paths:
500:
$ref: "#/components/responses/500"
/runs/{id}/next-stage-dialog:
/plugins/playbooks/api/v0/runs/{id}/next-stage-dialog:
post:
summary: Go to next stage from dialog
description: This is an internal endpoint to go to the next stage via a confirmation dialog, submitted by a user in the webapp.
@ -835,7 +835,7 @@ paths:
500:
$ref: "#/components/responses/500"
/runs/{id}/checklists/{checklist}/add:
/plugins/playbooks/api/v0/runs/{id}/checklists/{checklist}/add:
put:
summary: Add an item to a playbook run's checklist
description: The most common pattern to add a new item is to only send its title as the request payload. By default, it is an open item, with no assignee and no slash command.
@ -923,7 +923,7 @@ paths:
schema:
$ref: "#/components/schemas/Error"
/runs/{id}/checklists/{checklist}/reorder:
/plugins/playbooks/api/v0/runs/{id}/checklists/{checklist}/reorder:
put:
summary: Reorder an item in a playbook run's checklist
operationId: reoderChecklistItem
@ -978,7 +978,7 @@ paths:
500:
$ref: "#/components/responses/500"
/runs/{id}/checklists/{checklist}/item/{item}:
/plugins/playbooks/api/v0/runs/{id}/checklists/{checklist}/item/{item}:
put:
summary: Update an item of a playbook run's checklist
description: Update the title and the slash command of an item in one of the playbook run's checklists.
@ -1083,7 +1083,7 @@ paths:
500:
$ref: "#/components/responses/500"
/runs/{id}/checklists/{checklist}/item/{item}/state:
/plugins/playbooks/api/v0/runs/{id}/checklists/{checklist}/item/{item}/state:
put:
summary: Update the state of an item
operationId: itemSetState
@ -1145,7 +1145,7 @@ paths:
500:
$ref: "#/components/responses/500"
/runs/{id}/checklists/{checklist}/item/{item}/assignee:
/plugins/playbooks/api/v0/runs/{id}/checklists/{checklist}/item/{item}/assignee:
put:
summary: Update the assignee of an item
operationId: itemSetAssignee
@ -1202,7 +1202,7 @@ paths:
500:
$ref: "#/components/responses/500"
/runs/{id}/checklists/{checklist}/item/{item}/run:
/plugins/playbooks/api/v0/runs/{id}/checklists/{checklist}/item/{item}/run:
put:
summary: Run an item's slash command
operationId: itemRun
@ -1249,7 +1249,7 @@ paths:
500:
$ref: "#/components/responses/500"
/runs/{id}/timeline/{event_id}/:
/plugins/playbooks/api/v0/runs/{id}/timeline/{event_id}/:
delete:
summary: Remove a timeline event from the playbook run
operationId: removeTimelineEvent
@ -1285,7 +1285,7 @@ paths:
500:
$ref: "#/components/responses/500"
/playbooks:
/plugins/playbooks/api/v0/playbooks:
get:
summary: List all playbooks
description: Retrieve a paged list of playbooks, filtered by team, and sorted by title, number of stages or number of steps.
@ -1562,7 +1562,7 @@ paths:
500:
$ref: "#/components/responses/500"
/playbooks/{id}:
/plugins/playbooks/api/v0/playbooks/{id}:
get:
summary: Get a playbook
operationId: getPlaybook
@ -1658,7 +1658,7 @@ paths:
500:
$ref: "#/components/responses/500"
/playbooks/{id}/autofollows:
/plugins/playbooks/api/v0/playbooks/{id}/autofollows:
get:
summary: Get the list of followers' user IDs of a playbook
operationId: getAutoFollows

View File

@ -10,14 +10,12 @@ import (
"net/url"
"strconv"
"strings"
"time"
"github.com/gorilla/mux"
"github.com/mattermost/mattermost-server/server/v8/model"
"github.com/mattermost/mattermost-server/server/v8/playbooks/server/app"
"github.com/mattermost/mattermost-server/server/v8/playbooks/server/config"
"github.com/mattermost/mattermost-server/server/v8/playbooks/server/playbooks"
"github.com/mattermost/mattermost-server/server/v8/playbooks/server/timeutils"
"github.com/pkg/errors"
"github.com/sirupsen/logrus"
)
@ -693,14 +691,8 @@ func (h *PlaybookHandler) getTopPlaybooksForUser(c *Context, w http.ResponseWrit
h.HandleErrorWithCode(w, c.logger, http.StatusBadRequest, "unable to get user", err)
return
}
timezone, err := timeutils.GetUserTimezone(user)
if err != nil {
h.HandleErrorWithCode(w, c.logger, http.StatusBadRequest, "unable to get user timezone", err)
return
}
if timezone == nil {
timezone = time.Now().UTC().Location()
}
timezone := user.GetTimezoneLocation()
// get unix time for duration
startTime, appErr := model.GetStartOfDayForTimeRange(timeRange, timezone)
if appErr != nil {
@ -750,14 +742,8 @@ func (h *PlaybookHandler) getTopPlaybooksForTeam(c *Context, w http.ResponseWrit
h.HandleErrorWithCode(w, c.logger, http.StatusBadRequest, "unable to get user", err)
return
}
timezone, err := timeutils.GetUserTimezone(user)
if err != nil {
h.HandleErrorWithCode(w, c.logger, http.StatusBadRequest, "unable to get user timezone", err)
return
}
if timezone == nil {
timezone = time.Now().UTC().Location()
}
timezone := user.GetTimezoneLocation()
// get unix time for duration
startTime, appErr := model.GetStartOfDayForTimeRange(timeRange, timezone)
if appErr != nil {

View File

@ -0,0 +1 @@
A collection of ad-hoc scripts to upgrade between ESRs.

View File

@ -0,0 +1,160 @@
/* Product notices are controlled externally, via the mattermost/notices repository.
When there is a new notice specified there, the server may have time, right after
the migration and before it is shut down, to download it and modify the
ProductNoticeViewState table, adding a row for all users that have not seen it or
removing old notices that no longer need to be shown. This can happen in the
UpdateProductNotices function that is executed periodically to update the notices
cache. The script will never do this, so we need to remove all rows in that table
to avoid any unwanted diff. */
DELETE FROM ProductNoticeViewState;
/* The script does not update the Systems row that tracks the version, so it is manually updated
here so that it does not show in the diff. */
UPDATE Systems SET Value = '6.3.0' WHERE Name = 'Version';
/* The script does not update the schema_migrations table, which is automatically used by the
migrate library to track the version, so we drop it altogether to avoid spurious errors in
the diff */
DROP TABLE IF EXISTS schema_migrations;
/* Migration 000054_create_crt_channelmembership_count.up sets
ChannelMembers.LastUpdateAt to the results of SELECT ROUND(UNIX_TIMESTAMP(NOW(3))*1000)
which will be different each time the migration is run. Thus, the column will always be
different when comparing the server and script migrations. To bypass this, we update all
rows in ChannelMembers so that they contain the same value for such column. */
UPDATE ChannelMembers SET LastUpdateAt = 1;
/* Migration 000055_create_crt_thread_count_and_unreads.up sets
ThreadMemberships.LastUpdated to the results of SELECT ROUND(UNIX_TIMESTAMP(NOW(3))*1000)
which will be different each time the migration is run. Thus, the column will always be
different when comparing the server and script migrations. To bypass this, we update all
rows in ThreadMemberships so that they contain the same value for such column. */
UPDATE ThreadMemberships SET LastUpdated = 1;
/* The security update check in the server may update the LastSecurityTime system value. To
avoid any spurious difference in the migrations, we update it to a fixed value. */
UPDATE Systems SET Value = 1 WHERE Name = 'LastSecurityTime';
/* The server migration contains an in-app migration that adds new roles for Playbooks:
doPlaybooksRolesCreationMigration, defined in https://github.com/mattermost/mattermost-server/blob/282bd351e3767dcfd8c8340da2e0915197c0dbcb/app/migrations.go#L345-L469
The roles are the ones defined in https://github.com/mattermost/mattermost-server/blob/282bd351e3767dcfd8c8340da2e0915197c0dbcb/model/role.go#L874-L929
When this migration finishes, it also adds a new row to the Systems table with the key of the migration.
This in-app migration does not happen in the script, so we remove those rows here. */
DELETE FROM Roles WHERE Name = 'playbook_member';
DELETE FROM Roles WHERE Name = 'playbook_admin';
DELETE FROM Roles WHERE Name = 'run_member';
DELETE FROM Roles WHERE Name = 'run_admin';
DELETE FROM Systems WHERE Name = 'PlaybookRolesCreationMigrationComplete';
/* The server migration contains an in-app migration that add playbooks permissions to certain roles:
getAddPlaybooksPermissions, defined in https://github.com/mattermost/mattermost-server/blob/f9b996934cabf9a8fad5901835e7e9b418917402/app/permissions_migrations.go#L918-L951
The specific roles ('%playbook%') are removed in the procedure below, but the migrations also add a new row to the Systems table marking the migration as complete.
This in-app migration does not happen in the script, so we remove that rows here. */
DELETE FROM Systems WHERE Name = 'playbooks_permissions';
/* The rest of this script defines and executes a procedure to update the Roles table. It performs several changes:
1. Set the UpdateAt column of all rows to a fixed value, so that the server migration changes to this column
do not appear in the diff.
2. Remove the set of specific permissions added in the server migration that is not covered by the script, as
this logic happens all in-app after the normal DB migrations.
3. Set a consistent order in the Permissions column, which is modelled a space-separated string containing each of
the different permissions each role has. This change is the reason why we need a complex procedure, which creates
a temporary table that pairs each single permission to its corresponding ID. So if the Roles table contains two
rows like:
Id: 'abcd'
Permissions: 'view_team read_public_channel invite_user'
Id: 'efgh'
Permissions: 'view_team create_emojis'
then the new temporary table will contain five rows like:
Id: 'abcd'
Permissions: 'view_team'
Id: 'abcd'
Permissions: 'read_public_channel'
Id: 'abcd'
Permissions: 'invite_user'
Id: 'efgh'
Permissions: 'view_team'
Id: 'efgh'
Permissions: 'create_emojis'
*/
DROP PROCEDURE IF EXISTS splitPermissions;
DROP PROCEDURE IF EXISTS sortAndFilterPermissionsInRoles;
DROP TEMPORARY TABLE IF EXISTS temp_roles;
CREATE TEMPORARY TABLE temp_roles(id varchar(26), permission longtext);
DELIMITER //
/* Auxiliary procedure that splits the space-separated permissions string into single rows that are inserted
in the temporary temp_roles table along with their corresponding ID. */
CREATE PROCEDURE splitPermissions(
IN id varchar(26),
IN permissionsString longtext
)
BEGIN
DECLARE idx INT DEFAULT 0;
SELECT TRIM(permissionsString) INTO permissionsString;
SELECT LOCATE(' ', permissionsString) INTO idx;
WHILE idx > 0 DO
INSERT INTO temp_roles SELECT id, TRIM(LEFT(permissionsString, idx));
SELECT SUBSTR(permissionsString, idx+1) INTO permissionsString;
SELECT LOCATE(' ', permissionsString) INTO idx;
END WHILE;
INSERT INTO temp_roles(id, permission) VALUES(id, TRIM(permissionsString));
END; //
/* Main procedure that does update the Roles table */
CREATE PROCEDURE sortAndFilterPermissionsInRoles()
BEGIN
DECLARE done INT DEFAULT FALSE;
DECLARE rolesId varchar(26) DEFAULT '';
DECLARE rolesPermissions longtext DEFAULT '';
DECLARE cur1 CURSOR FOR SELECT Id, Permissions FROM Roles;
DECLARE CONTINUE HANDLER FOR NOT FOUND SET done = TRUE;
/* 1. Set a fixed value in the UpdateAt column for all rows in Roles table */
UPDATE Roles SET UpdateAt = 1;
/* Call splitPermissions for every row in the Roles table, thus populating the
temp_roles table. */
OPEN cur1;
read_loop: LOOP
FETCH cur1 INTO rolesId, rolesPermissions;
IF done THEN
LEAVE read_loop;
END IF;
CALL splitPermissions(rolesId, rolesPermissions);
END LOOP;
CLOSE cur1;
/* 2. Filter out the new permissions added by the in-app migrations */
DELETE FROM temp_roles WHERE permission LIKE '%playbook%';
DELETE FROM temp_roles WHERE permission LIKE 'run_create';
DELETE FROM temp_roles WHERE permission LIKE 'run_manage_members';
DELETE FROM temp_roles WHERE permission LIKE 'run_manage_properties';
DELETE FROM temp_roles WHERE permission LIKE 'run_view';
/* Temporarily set to the maximum permitted value, since the call to group_concat
below needs a value bigger than the default */
SET group_concat_max_len = 18446744073709551615;
/* 3. Update the Permissions column in the Roles table with the filtered, sorted permissions,
concatenated again as a space-separated string */
UPDATE
Roles INNER JOIN (
SELECT temp_roles.id as Id, TRIM(group_concat(temp_roles.permission ORDER BY temp_roles.permission SEPARATOR ' ')) as Permissions
FROM Roles JOIN temp_roles ON Roles.Id = temp_roles.id
GROUP BY temp_roles.id
) AS Sorted
ON Roles.Id = Sorted.Id
SET Roles.Permissions = Sorted.Permissions;
/* Reset group_concat_max_len to its default value */
SET group_concat_max_len = 1024;
END; //
DELIMITER ;
CALL sortAndFilterPermissionsInRoles();
DROP TEMPORARY TABLE IF EXISTS temp_roles;

View File

@ -0,0 +1,695 @@
/* ==> mysql/000054_create_crt_channelmembership_count.up.sql <== */
/* fixCRTChannelMembershipCounts fixes the channel counts, i.e. the total message count,
total root message count, mention count, and mention count in root messages for users
who have viewed the channel after the last post in the channel */
DELIMITER //
CREATE PROCEDURE MigrateCRTChannelMembershipCounts ()
BEGIN
IF(
SELECT
EXISTS (
SELECT
* FROM Systems
WHERE
Name = 'CRTChannelMembershipCountsMigrationComplete') = 0) THEN
UPDATE
ChannelMembers
INNER JOIN Channels ON Channels.Id = ChannelMembers.ChannelId SET
MentionCount = 0, MentionCountRoot = 0, MsgCount = Channels.TotalMsgCount, MsgCountRoot = Channels.TotalMsgCountRoot, LastUpdateAt = (
SELECT
(SELECT ROUND(UNIX_TIMESTAMP(NOW(3))*1000)))
WHERE
ChannelMembers.LastViewedAt >= Channels.LastPostAt;
INSERT INTO Systems
VALUES('CRTChannelMembershipCountsMigrationComplete', 'true');
END IF;
END//
DELIMITER ;
CALL MigrateCRTChannelMembershipCounts ();
DROP PROCEDURE IF EXISTS MigrateCRTChannelMembershipCounts;
/* ==> mysql/000055_create_crt_thread_count_and_unreads.up.sql <== */
/* fixCRTThreadCountsAndUnreads Marks threads as read for users where the last
reply time of the thread is earlier than the time the user viewed the channel.
Marking a thread means setting the mention count to zero and setting the
last viewed at time of the the thread as the last viewed at time
of the channel */
DELIMITER //
CREATE PROCEDURE MigrateCRTThreadCountsAndUnreads ()
BEGIN
IF(SELECT EXISTS(SELECT * FROM Systems WHERE Name = 'CRTThreadCountsAndUnreadsMigrationComplete') = 0) THEN
UPDATE
ThreadMemberships
INNER JOIN (
SELECT
PostId,
UserId,
ChannelMembers.LastViewedAt AS CM_LastViewedAt,
Threads.LastReplyAt
FROM
Threads
INNER JOIN ChannelMembers ON ChannelMembers.ChannelId = Threads.ChannelId
WHERE
Threads.LastReplyAt <= ChannelMembers.LastViewedAt) AS q ON ThreadMemberships.Postid = q.PostId
AND ThreadMemberships.UserId = q.UserId SET LastViewed = q.CM_LastViewedAt + 1, UnreadMentions = 0, LastUpdated = (
SELECT
(SELECT ROUND(UNIX_TIMESTAMP(NOW(3))*1000)));
INSERT INTO Systems
VALUES('CRTThreadCountsAndUnreadsMigrationComplete', 'true');
END IF;
END//
DELIMITER ;
CALL MigrateCRTThreadCountsAndUnreads ();
DROP PROCEDURE IF EXISTS MigrateCRTThreadCountsAndUnreads;
/* ==> mysql/000056_upgrade_channels_v6.0.up.sql <== */
SET @preparedStatement = (SELECT IF(
(
SELECT COUNT(*) FROM INFORMATION_SCHEMA.STATISTICS
WHERE table_name = 'Channels'
AND table_schema = DATABASE()
AND index_name = 'idx_channels_team_id_display_name'
) > 0,
'SELECT 1',
'CREATE INDEX idx_channels_team_id_display_name ON Channels(TeamId, DisplayName);'
));
PREPARE createIndexIfNotExists FROM @preparedStatement;
EXECUTE createIndexIfNotExists;
DEALLOCATE PREPARE createIndexIfNotExists;
SET @preparedStatement = (SELECT IF(
(
SELECT COUNT(*) FROM INFORMATION_SCHEMA.STATISTICS
WHERE table_name = 'Channels'
AND table_schema = DATABASE()
AND index_name = 'idx_channels_team_id_type'
) > 0,
'SELECT 1',
'CREATE INDEX idx_channels_team_id_type ON Channels(TeamId, Type);'
));
PREPARE createIndexIfNotExists FROM @preparedStatement;
EXECUTE createIndexIfNotExists;
DEALLOCATE PREPARE createIndexIfNotExists;
SET @preparedStatement = (SELECT IF(
(
SELECT COUNT(*) FROM INFORMATION_SCHEMA.STATISTICS
WHERE table_name = 'Channels'
AND table_schema = DATABASE()
AND index_name = 'idx_channels_team_id'
) > 0,
'DROP INDEX idx_channels_team_id ON Channels;',
'SELECT 1'
));
PREPARE removeIndexIfExists FROM @preparedStatement;
EXECUTE removeIndexIfExists;
DEALLOCATE PREPARE removeIndexIfExists;
/* ==> mysql/000057_upgrade_command_webhooks_v6.0.up.sql <== */
DELIMITER //
CREATE PROCEDURE MigrateRootId_CommandWebhooks () BEGIN DECLARE ParentId_EXIST INT;
SELECT COUNT(*)
FROM INFORMATION_SCHEMA.COLUMNS
WHERE TABLE_NAME = 'CommandWebhooks'
AND table_schema = DATABASE()
AND COLUMN_NAME = 'ParentId' INTO ParentId_EXIST;
IF(ParentId_EXIST > 0) THEN
UPDATE CommandWebhooks SET RootId = ParentId WHERE RootId = '' AND RootId != ParentId;
END IF;
END//
DELIMITER ;
CALL MigrateRootId_CommandWebhooks ();
DROP PROCEDURE IF EXISTS MigrateRootId_CommandWebhooks;
SET @preparedStatement = (SELECT IF(
(
SELECT COUNT(*) FROM INFORMATION_SCHEMA.COLUMNS
WHERE table_name = 'CommandWebhooks'
AND table_schema = DATABASE()
AND column_name = 'ParentId'
) > 0,
'ALTER TABLE CommandWebhooks DROP COLUMN ParentId;',
'SELECT 1'
));
PREPARE alterIfExists FROM @preparedStatement;
EXECUTE alterIfExists;
DEALLOCATE PREPARE alterIfExists;
/* ==> mysql/000058_upgrade_channelmembers_v6.0.up.sql <== */
SET @preparedStatement = (SELECT IF(
(
SELECT COUNT(*) FROM INFORMATION_SCHEMA.COLUMNS
WHERE table_name = 'ChannelMembers'
AND table_schema = DATABASE()
AND column_name = 'NotifyProps'
AND column_type != 'JSON'
) > 0,
'ALTER TABLE ChannelMembers MODIFY COLUMN NotifyProps JSON;',
'SELECT 1'
));
PREPARE alterIfExists FROM @preparedStatement;
EXECUTE alterIfExists;
DEALLOCATE PREPARE alterIfExists;
SET @preparedStatement = (SELECT IF(
(
SELECT COUNT(*) FROM INFORMATION_SCHEMA.STATISTICS
WHERE table_name = 'ChannelMembers'
AND table_schema = DATABASE()
AND index_name = 'idx_channelmembers_user_id'
) > 0,
'DROP INDEX idx_channelmembers_user_id ON ChannelMembers;',
'SELECT 1'
));
PREPARE removeIndexIfExists FROM @preparedStatement;
EXECUTE removeIndexIfExists;
DEALLOCATE PREPARE removeIndexIfExists;
SET @preparedStatement = (SELECT IF(
(
SELECT COUNT(*) FROM INFORMATION_SCHEMA.STATISTICS
WHERE table_name = 'ChannelMembers'
AND table_schema = DATABASE()
AND index_name = 'idx_channelmembers_user_id_channel_id_last_viewed_at'
) > 0,
'SELECT 1',
'CREATE INDEX idx_channelmembers_user_id_channel_id_last_viewed_at ON ChannelMembers(UserId, ChannelId, LastViewedAt);'
));
PREPARE createIndexIfNotExists FROM @preparedStatement;
EXECUTE createIndexIfNotExists;
DEALLOCATE PREPARE createIndexIfNotExists;
SET @preparedStatement = (SELECT IF(
(
SELECT COUNT(*) FROM INFORMATION_SCHEMA.STATISTICS
WHERE table_name = 'ChannelMembers'
AND table_schema = DATABASE()
AND index_name = 'idx_channelmembers_channel_id_scheme_guest_user_id'
) > 0,
'SELECT 1',
'CREATE INDEX idx_channelmembers_channel_id_scheme_guest_user_id ON ChannelMembers(ChannelId, SchemeGuest, UserId);'
));
PREPARE createIndexIfNotExists FROM @preparedStatement;
EXECUTE createIndexIfNotExists;
DEALLOCATE PREPARE createIndexIfNotExists;
/* ==> mysql/000059_upgrade_users_v6.0.up.sql <== */
SET @preparedStatement = (SELECT IF(
(
SELECT COUNT(*) FROM INFORMATION_SCHEMA.COLUMNS
WHERE table_name = 'Users'
AND table_schema = DATABASE()
AND column_name = 'Props'
AND column_type != 'JSON'
) > 0,
'ALTER TABLE Users MODIFY COLUMN Props JSON;',
'SELECT 1'
));
PREPARE alterIfExists FROM @preparedStatement;
EXECUTE alterIfExists;
DEALLOCATE PREPARE alterIfExists;
SET @preparedStatement = (SELECT IF(
(
SELECT COUNT(*) FROM INFORMATION_SCHEMA.COLUMNS
WHERE table_name = 'Users'
AND table_schema = DATABASE()
AND column_name = 'NotifyProps'
AND column_type != 'JSON'
) > 0,
'ALTER TABLE Users MODIFY COLUMN NotifyProps JSON;',
'SELECT 1'
));
PREPARE alterIfExists FROM @preparedStatement;
EXECUTE alterIfExists;
DEALLOCATE PREPARE alterIfExists;
SET @preparedStatement = (SELECT IF(
(
SELECT COUNT(*) FROM INFORMATION_SCHEMA.COLUMNS
WHERE table_name = 'Users'
AND table_schema = DATABASE()
AND column_name = 'Timezone'
AND column_default IS NOT NULL
) > 0,
'ALTER TABLE Users ALTER Timezone DROP DEFAULT;',
'SELECT 1'
));
PREPARE alterIfExists FROM @preparedStatement;
EXECUTE alterIfExists;
DEALLOCATE PREPARE alterIfExists;
SET @preparedStatement = (SELECT IF(
(
SELECT COUNT(*) FROM INFORMATION_SCHEMA.COLUMNS
WHERE table_name = 'Users'
AND table_schema = DATABASE()
AND column_name = 'Timezone'
AND column_type != 'JSON'
) > 0,
'ALTER TABLE Users MODIFY COLUMN Timezone JSON;',
'SELECT 1'
));
PREPARE alterIfExists FROM @preparedStatement;
EXECUTE alterIfExists;
DEALLOCATE PREPARE alterIfExists;
SET @preparedStatement = (SELECT IF(
(
SELECT COUNT(*) FROM INFORMATION_SCHEMA.COLUMNS
WHERE table_name = 'Users'
AND table_schema = DATABASE()
AND column_name = 'Roles'
AND column_type != 'text'
) > 0,
'ALTER TABLE Users MODIFY COLUMN Roles text;',
'SELECT 1'
));
PREPARE alterIfExists FROM @preparedStatement;
EXECUTE alterIfExists;
DEALLOCATE PREPARE alterIfExists;
/* ==> mysql/000060_upgrade_jobs_v6.0.up.sql <== */
SET @preparedStatement = (SELECT IF(
(
SELECT COUNT(*) FROM INFORMATION_SCHEMA.COLUMNS
WHERE table_name = 'Jobs'
AND table_schema = DATABASE()
AND column_name = 'Data'
AND column_type != 'JSON'
) > 0,
'ALTER TABLE Jobs MODIFY COLUMN Data JSON;',
'SELECT 1'
));
PREPARE alterIfExists FROM @preparedStatement;
EXECUTE alterIfExists;
DEALLOCATE PREPARE alterIfExists;
/* ==> mysql/000061_upgrade_link_metadata_v6.0.up.sql <== */
SET @preparedStatement = (SELECT IF(
(
SELECT COUNT(*) FROM INFORMATION_SCHEMA.COLUMNS
WHERE table_name = 'LinkMetadata'
AND table_schema = DATABASE()
AND column_name = 'Data'
AND column_type != 'JSON'
) > 0,
'ALTER TABLE LinkMetadata MODIFY COLUMN Data JSON;',
'SELECT 1'
));
PREPARE alterIfExists FROM @preparedStatement;
EXECUTE alterIfExists;
DEALLOCATE PREPARE alterIfExists;
/* ==> mysql/000062_upgrade_sessions_v6.0.up.sql <== */
SET @preparedStatement = (SELECT IF(
(
SELECT COUNT(*) FROM INFORMATION_SCHEMA.COLUMNS
WHERE table_name = 'Sessions'
AND table_schema = DATABASE()
AND column_name = 'Props'
AND column_type != 'JSON'
) > 0,
'ALTER TABLE Sessions MODIFY COLUMN Props JSON;',
'SELECT 1'
));
PREPARE alterIfExists FROM @preparedStatement;
EXECUTE alterIfExists;
DEALLOCATE PREPARE alterIfExists;
/* ==> mysql/000063_upgrade_threads_v6.0.up.sql <== */
SET @preparedStatement = (SELECT IF(
(
SELECT COUNT(*) FROM INFORMATION_SCHEMA.COLUMNS
WHERE table_name = 'Threads'
AND table_schema = DATABASE()
AND column_name = 'Participants'
AND column_type != 'JSON'
) > 0,
'ALTER TABLE Threads MODIFY COLUMN Participants JSON;',
'SELECT 1'
));
PREPARE alterIfExists FROM @preparedStatement;
EXECUTE alterIfExists;
DEALLOCATE PREPARE alterIfExists;
SET @preparedStatement = (SELECT IF(
(
SELECT COUNT(*) FROM INFORMATION_SCHEMA.STATISTICS
WHERE table_name = 'Threads'
AND table_schema = DATABASE()
AND index_name = 'idx_threads_channel_id_last_reply_at'
) > 0,
'SELECT 1',
'CREATE INDEX idx_threads_channel_id_last_reply_at ON Threads(ChannelId, LastReplyAt);'
));
PREPARE createIndexIfNotExists FROM @preparedStatement;
EXECUTE createIndexIfNotExists;
DEALLOCATE PREPARE createIndexIfNotExists;
SET @preparedStatement = (SELECT IF(
(
SELECT COUNT(*) FROM INFORMATION_SCHEMA.STATISTICS
WHERE table_name = 'Threads'
AND table_schema = DATABASE()
AND index_name = 'idx_threads_channel_id'
) > 0,
'DROP INDEX idx_threads_channel_id ON Threads;',
'SELECT 1'
));
PREPARE removeIndexIfExists FROM @preparedStatement;
EXECUTE removeIndexIfExists;
DEALLOCATE PREPARE removeIndexIfExists;
/* ==> mysql/000064_upgrade_status_v6.0.up.sql <== */
SET @preparedStatement = (SELECT IF(
(
SELECT COUNT(*) FROM INFORMATION_SCHEMA.STATISTICS
WHERE table_name = 'Status'
AND table_schema = DATABASE()
AND index_name = 'idx_status_status_dndendtime'
) > 0,
'SELECT 1',
'CREATE INDEX idx_status_status_dndendtime ON Status(Status, DNDEndTime);'
));
PREPARE createIndexIfNotExists FROM @preparedStatement;
EXECUTE createIndexIfNotExists;
DEALLOCATE PREPARE createIndexIfNotExists;
SET @preparedStatement = (SELECT IF(
(
SELECT COUNT(*) FROM INFORMATION_SCHEMA.STATISTICS
WHERE table_name = 'Status'
AND table_schema = DATABASE()
AND index_name = 'idx_status_status'
) > 0,
'DROP INDEX idx_status_status ON Status;',
'SELECT 1'
));
PREPARE removeIndexIfExists FROM @preparedStatement;
EXECUTE removeIndexIfExists;
DEALLOCATE PREPARE removeIndexIfExists;
/* ==> mysql/000065_upgrade_groupchannels_v6.0.up.sql <== */
SET @preparedStatement = (SELECT IF(
(
SELECT COUNT(*) FROM INFORMATION_SCHEMA.STATISTICS
WHERE table_name = 'GroupChannels'
AND table_schema = DATABASE()
AND index_name = 'idx_groupchannels_schemeadmin'
) > 0,
'SELECT 1',
'CREATE INDEX idx_groupchannels_schemeadmin ON GroupChannels(SchemeAdmin);'
));
PREPARE createIndexIfNotExists FROM @preparedStatement;
EXECUTE createIndexIfNotExists;
DEALLOCATE PREPARE createIndexIfNotExists;
/* ==> mysql/000066_upgrade_posts_v6.0.up.sql <== */
DELIMITER //
CREATE PROCEDURE MigrateRootId_Posts ()
BEGIN
DECLARE ParentId_EXIST INT;
DECLARE Alter_FileIds INT;
DECLARE Alter_Props INT;
SELECT COUNT(*) FROM INFORMATION_SCHEMA.COLUMNS
WHERE TABLE_NAME = 'Posts'
AND table_schema = DATABASE()
AND COLUMN_NAME = 'ParentId' INTO ParentId_EXIST;
SELECT COUNT(*) FROM INFORMATION_SCHEMA.COLUMNS
WHERE table_name = 'Posts'
AND table_schema = DATABASE()
AND column_name = 'FileIds'
AND column_type != 'text' INTO Alter_FileIds;
SELECT COUNT(*) FROM INFORMATION_SCHEMA.COLUMNS
WHERE table_name = 'Posts'
AND table_schema = DATABASE()
AND column_name = 'Props'
AND column_type != 'JSON' INTO Alter_Props;
IF (Alter_Props OR Alter_FileIds) THEN
IF(ParentId_EXIST > 0) THEN
UPDATE Posts SET RootId = ParentId WHERE RootId = '' AND RootId != ParentId;
ALTER TABLE Posts MODIFY COLUMN FileIds text, MODIFY COLUMN Props JSON, DROP COLUMN ParentId;
ELSE
ALTER TABLE Posts MODIFY COLUMN FileIds text, MODIFY COLUMN Props JSON;
END IF;
END IF;
END//
DELIMITER ;
CALL MigrateRootId_Posts ();
DROP PROCEDURE IF EXISTS MigrateRootId_Posts;
SET @preparedStatement = (SELECT IF(
(
SELECT COUNT(*) FROM INFORMATION_SCHEMA.STATISTICS
WHERE table_name = 'Posts'
AND table_schema = DATABASE()
AND index_name = 'idx_posts_root_id_delete_at'
) > 0,
'SELECT 1',
'CREATE INDEX idx_posts_root_id_delete_at ON Posts(RootId, DeleteAt);'
));
PREPARE createIndexIfNotExists FROM @preparedStatement;
EXECUTE createIndexIfNotExists;
DEALLOCATE PREPARE createIndexIfNotExists;
SET @preparedStatement = (SELECT IF(
(
SELECT COUNT(*) FROM INFORMATION_SCHEMA.STATISTICS
WHERE table_name = 'Posts'
AND table_schema = DATABASE()
AND index_name = 'idx_posts_root_id'
) > 0,
'DROP INDEX idx_posts_root_id ON Posts;',
'SELECT 1'
));
PREPARE removeIndexIfExists FROM @preparedStatement;
EXECUTE removeIndexIfExists;
DEALLOCATE PREPARE removeIndexIfExists;
/* ==> mysql/000067_upgrade_channelmembers_v6.1.up.sql <== */
SET @preparedStatement = (SELECT IF(
(
SELECT COUNT(*) FROM INFORMATION_SCHEMA.COLUMNS
WHERE table_name = 'ChannelMembers'
AND table_schema = DATABASE()
AND column_name = 'Roles'
AND column_type != 'text'
) > 0,
'ALTER TABLE ChannelMembers MODIFY COLUMN Roles text;',
'SELECT 1'
));
PREPARE alterIfExists FROM @preparedStatement;
EXECUTE alterIfExists;
DEALLOCATE PREPARE alterIfExists;
/* ==> mysql/000068_upgrade_teammembers_v6.1.up.sql <== */
SET @preparedStatement = (SELECT IF(
(
SELECT COUNT(*) FROM INFORMATION_SCHEMA.COLUMNS
WHERE table_name = 'TeamMembers'
AND table_schema = DATABASE()
AND column_name = 'Roles'
AND column_type != 'text'
) > 0,
'ALTER TABLE TeamMembers MODIFY COLUMN Roles text;',
'SELECT 1'
));
PREPARE alterIfExists FROM @preparedStatement;
EXECUTE alterIfExists;
DEALLOCATE PREPARE alterIfExists;
/* ==> mysql/000069_upgrade_jobs_v6.1.up.sql <== */
SET @preparedStatement = (SELECT IF(
(
SELECT COUNT(*) FROM INFORMATION_SCHEMA.STATISTICS
WHERE table_name = 'Jobs'
AND table_schema = DATABASE()
AND index_name = 'idx_jobs_status_type'
) > 0,
'SELECT 1',
'CREATE INDEX idx_jobs_status_type ON Jobs(Status, Type);'
));
PREPARE createIndexIfNotExists FROM @preparedStatement;
EXECUTE createIndexIfNotExists;
DEALLOCATE PREPARE createIndexIfNotExists;
/* ==> mysql/000070_upgrade_cte_v6.1.up.sql <== */
DELIMITER //
CREATE PROCEDURE Migrate_LastRootPostAt ()
BEGIN
DECLARE
LastRootPostAt_EXIST INT;
SELECT
COUNT(*)
FROM
INFORMATION_SCHEMA.COLUMNS
WHERE
TABLE_NAME = 'Channels'
AND table_schema = DATABASE()
AND COLUMN_NAME = 'LastRootPostAt' INTO LastRootPostAt_EXIST;
IF(LastRootPostAt_EXIST = 0) THEN
ALTER TABLE Channels ADD COLUMN LastRootPostAt bigint DEFAULT 0;
UPDATE
Channels
INNER JOIN (
SELECT
Channels.Id channelid,
COALESCE(MAX(Posts.CreateAt), 0) AS lastrootpost
FROM
Channels
LEFT JOIN Posts FORCE INDEX (idx_posts_channel_id_update_at) ON Channels.Id = Posts.ChannelId
WHERE
Posts.RootId = ''
GROUP BY
Channels.Id) AS q ON q.channelid = Channels.Id SET LastRootPostAt = lastrootpost;
END IF;
END//
DELIMITER ;
CALL Migrate_LastRootPostAt ();
DROP PROCEDURE IF EXISTS Migrate_LastRootPostAt;
/* ==> mysql/000071_upgrade_sessions_v6.1.up.sql <== */
SET @preparedStatement = (SELECT IF(
(
SELECT COUNT(*) FROM INFORMATION_SCHEMA.COLUMNS
WHERE table_name = 'Sessions'
AND table_schema = DATABASE()
AND column_name = 'Roles'
AND column_type != 'text'
) > 0,
'ALTER TABLE Sessions MODIFY COLUMN Roles text;',
'SELECT 1'
));
PREPARE alterIfExists FROM @preparedStatement;
EXECUTE alterIfExists;
DEALLOCATE PREPARE alterIfExists;
/* ==> mysql/000072_upgrade_schemes_v6.3.up.sql <== */
SET @preparedStatement = (SELECT IF(
(
SELECT COUNT(*) FROM INFORMATION_SCHEMA.COLUMNS
WHERE table_name = 'Schemes'
AND table_schema = DATABASE()
AND column_name = 'DefaultPlaybookAdminRole'
) > 0,
'SELECT 1',
'ALTER TABLE Schemes ADD COLUMN DefaultPlaybookAdminRole VARCHAR(64) DEFAULT "";'
));
PREPARE alterIfNotExists FROM @preparedStatement;
EXECUTE alterIfNotExists;
DEALLOCATE PREPARE alterIfNotExists;
SET @preparedStatement = (SELECT IF(
(
SELECT COUNT(*) FROM INFORMATION_SCHEMA.COLUMNS
WHERE table_name = 'Schemes'
AND table_schema = DATABASE()
AND column_name = 'DefaultPlaybookMemberRole'
) > 0,
'SELECT 1',
'ALTER TABLE Schemes ADD COLUMN DefaultPlaybookMemberRole VARCHAR(64) DEFAULT "";'
));
PREPARE alterIfNotExists FROM @preparedStatement;
EXECUTE alterIfNotExists;
DEALLOCATE PREPARE alterIfNotExists;
SET @preparedStatement = (SELECT IF(
(
SELECT COUNT(*) FROM INFORMATION_SCHEMA.COLUMNS
WHERE table_name = 'Schemes'
AND table_schema = DATABASE()
AND column_name = 'DefaultRunAdminRole'
) > 0,
'SELECT 1',
'ALTER TABLE Schemes ADD COLUMN DefaultRunAdminRole VARCHAR(64) DEFAULT "";'
));
PREPARE alterIfNotExists FROM @preparedStatement;
EXECUTE alterIfNotExists;
DEALLOCATE PREPARE alterIfNotExists;
SET @preparedStatement = (SELECT IF(
(
SELECT COUNT(*) FROM INFORMATION_SCHEMA.COLUMNS
WHERE table_name = 'Schemes'
AND table_schema = DATABASE()
AND column_name = 'DefaultRunMemberRole'
) > 0,
'SELECT 1',
'ALTER TABLE Schemes ADD COLUMN DefaultRunMemberRole VARCHAR(64) DEFAULT "";'
));
PREPARE alterIfNotExists FROM @preparedStatement;
EXECUTE alterIfNotExists;
DEALLOCATE PREPARE alterIfNotExists;
/* ==> mysql/000073_upgrade_plugin_key_value_store_v6.3.up.sql <== */
SET @preparedStatement = (SELECT IF(
(
SELECT Count(*) FROM Information_Schema.Columns
WHERE table_name = 'PluginKeyValueStore'
AND table_schema = DATABASE()
AND column_name = 'PKey'
AND column_type != 'varchar(150)'
) > 0,
'ALTER TABLE PluginKeyValueStore MODIFY COLUMN PKey varchar(150);',
'SELECT 1'
));
PREPARE alterTypeIfExists FROM @preparedStatement;
EXECUTE alterTypeIfExists;
DEALLOCATE PREPARE alterTypeIfExists;
/* ==> mysql/000074_upgrade_users_v6.3.up.sql <== */
SET @preparedStatement = (SELECT IF(
(
SELECT COUNT(*) FROM INFORMATION_SCHEMA.COLUMNS
WHERE table_name = 'Users'
AND table_schema = DATABASE()
AND column_name = 'AcceptedTermsOfServiceId'
) > 0,
'ALTER TABLE Users DROP COLUMN AcceptedTermsOfServiceId;',
'SELECT 1'
));
PREPARE alterIfExists FROM @preparedStatement;
EXECUTE alterIfExists;
DEALLOCATE PREPARE alterIfExists;

View File

@ -0,0 +1,199 @@
/* Product notices are controlled externally, via the mattermost/notices repository.
When there is a new notice specified there, the server may have time, right after
the migration and before it is shut down, to download it and modify the
ProductNoticeViewState table, adding a row for all users that have not seen it or
removing old notices that no longer need to be shown. This can happen in the
UpdateProductNotices function that is executed periodically to update the notices
cache. The script will never do this, so we need to remove all rows in that table
to avoid any unwanted diff. */
DELETE FROM ProductNoticeViewState;
/* Remove migration-related tables that are only updated through the server to track which
migrations have been applied */
DROP TABLE IF EXISTS db_lock;
DROP TABLE IF EXISTS db_migrations;
/* Migration 000054_create_crt_channelmembership_count.up sets
ChannelMembers.LastUpdateAt to the results of SELECT ROUND(UNIX_TIMESTAMP(NOW(3))*1000)
which will be different each time the migration is run. Thus, the column will always be
different when comparing the server and script migrations. To bypass this, we update all
rows in ChannelMembers so that they contain the same value for such column. */
UPDATE ChannelMembers SET LastUpdateAt = 1;
/* Migration 000055_create_crt_thread_count_and_unreads.up sets
ThreadMemberships.LastUpdated to the results of SELECT ROUND(UNIX_TIMESTAMP(NOW(3))*1000)
which will be different each time the migration is run. Thus, the column will always be
different when comparing the server and script migrations. To bypass this, we update all
rows in ThreadMemberships so that they contain the same value for such column. */
UPDATE ThreadMemberships SET LastUpdated = 1;
/* The security update check in the server may update the LastSecurityTime system value. To
avoid any spurious difference in the migrations, we update it to a fixed value. */
UPDATE Systems SET Value = 1 WHERE Name = 'LastSecurityTime';
/* The server migration may contain a row in the Systems table marking the onboarding as complete.
There are no migrations related to this, so we can simply drop it here. */
DELETE FROM Systems WHERE Name = 'FirstAdminSetupComplete';
/* The server migration contains an in-app migration that adds new roles for Playbooks:
doPlaybooksRolesCreationMigration, defined in https://github.com/mattermost/mattermost-server/blob/282bd351e3767dcfd8c8340da2e0915197c0dbcb/app/migrations.go#L345-L469
The roles are the ones defined in https://github.com/mattermost/mattermost-server/blob/282bd351e3767dcfd8c8340da2e0915197c0dbcb/model/role.go#L874-L929
When this migration finishes, it also adds a new row to the Systems table with the key of the migration.
This in-app migration does not happen in the script, so we remove those rows here. */
DELETE FROM Roles WHERE Name = 'playbook_member';
DELETE FROM Roles WHERE Name = 'playbook_admin';
DELETE FROM Roles WHERE Name = 'run_member';
DELETE FROM Roles WHERE Name = 'run_admin';
DELETE FROM Systems WHERE Name = 'PlaybookRolesCreationMigrationComplete';
/* The server migration contains two in-app migrations that add playbooks permissions to certain roles:
getAddPlaybooksPermissions and getPlaybooksPermissionsAddManageRoles, defined in https://github.com/mattermost/mattermost-server/blob/282bd351e3767dcfd8c8340da2e0915197c0dbcb/app/permissions_migrations.go#L1021-L1072
The specific roles ('%playbook%') are removed in the procedure below, but the migrations also add new rows to the Systems table marking the migrations as complete.
These in-app migrations do not happen in the script, so we remove those rows here. */
DELETE FROM Systems WHERE Name = 'playbooks_manage_roles';
DELETE FROM Systems WHERE Name = 'playbooks_permissions';
/* The server migration contains an in-app migration that adds boards permissions to certain roles:
getProductsBoardsPermissions, defined in https://github.com/mattermost/mattermost-server/blob/282bd351e3767dcfd8c8340da2e0915197c0dbcb/app/permissions_migrations.go#L1074-L1093
The specific roles (sysconsole_read_product_boards and sysconsole_write_product_boards) are removed in the procedure below,
but the migrations also adds a new row to the Systems table marking the migrations as complete.
This in-app migration does not happen in the script, so we remove that row here. */
DELETE FROM Systems WHERE Name = 'products_boards';
/* TODO: REVIEW STARTING HERE */
/* The server migration contain an in-app migration that adds Ids to the Teams whose InviteId is an empty string:
doRemainingSchemaMigrations, defined in https://github.com/mattermost/mattermost-server/blob/282bd351e3767dcfd8c8340da2e0915197c0dbcb/app/migrations.go#L515-L540
The migration is not replicated in the script, since it happens in-app, but the server adds a new row to the
Systems table marking the table as complete, which the script doesn't do, so we remove that row here. */
DELETE FROM Systems WHERE Name = 'RemainingSchemaMigrations';
/* The server migration contains three in-app migration that adds a new role and new permissions
related to custom groups. The migrations are:
- doCustomGroupAdminRoleCreationMigration https://github.com/mattermost/mattermost-server/blob/282bd351e3767dcfd8c8340da2e0915197c0dbcb/app/migrations.go#L345-L469
- getAddCustomUserGroupsPermissions https://github.com/mattermost/mattermost-server/blob/282bd351e3767dcfd8c8340da2e0915197c0dbcb/app/permissions_migrations.go#L974-L995
- getAddCustomUserGroupsPermissionRestore https://github.com/mattermost/mattermost-server/blob/282bd351e3767dcfd8c8340da2e0915197c0dbcb/app/permissions_migrations.go#L997-L1019
The specific roles and permissions are removed in the procedure below, but the migrations also
adds a new row to the Roles table for the new role and new rows to the Systems table marking the
migrations as complete.
This in-app migration does not happen in the script, so we remove that row here. */
DELETE FROM Roles WHERE Name = 'system_custom_group_admin';
DELETE FROM Systems WHERE Name = 'CustomGroupAdminRoleCreationMigrationComplete';
DELETE FROM Systems WHERE Name = 'custom_groups_permissions';
DELETE FROM Systems WHERE Name = 'custom_groups_permission_restore';
/* The server migration contains an in-app migration that updates the config, setting ServiceSettings.PostPriority
to true, doPostPriorityConfigDefaultTrueMigration, defined in https://github.com/mattermost/mattermost-server/blob/282bd351e3767dcfd8c8340da2e0915197c0dbcb/app/migrations.go#L542-L560
The migration is not replicated in the script, since it happens in-app, but the server adds a new row to the
Systems table marking the table as complete, which the script doesn't do, so we remove that row here. */
DELETE FROM Systems WHERE Name = 'PostPriorityConfigDefaultTrueMigrationComplete';
/* The rest of this script defines and executes a procedure to update the Roles table. It performs several changes:
1. Set the UpdateAt column of all rows to a fixed value, so that the server migration changes to this column
do not appear in the diff.
2. Remove the set of specific permissions added in the server migration that is not covered by the script, as
this logic happens all in-app after the normal DB migrations.
3. Set a consistent order in the Permissions column, which is modelled a space-separated string containing each of
the different permissions each role has. This change is the reason why we need a complex procedure, which creates
a temporary table that pairs each single permission to its corresponding ID. So if the Roles table contains two
rows like:
Id: 'abcd'
Permissions: 'view_team read_public_channel invite_user'
Id: 'efgh'
Permissions: 'view_team create_emojis'
then the new temporary table will contain five rows like:
Id: 'abcd'
Permissions: 'view_team'
Id: 'abcd'
Permissions: 'read_public_channel'
Id: 'abcd'
Permissions: 'invite_user'
Id: 'efgh'
Permissions: 'view_team'
Id: 'efgh'
Permissions: 'create_emojis'
*/
DROP PROCEDURE IF EXISTS splitPermissions;
DROP PROCEDURE IF EXISTS sortAndFilterPermissionsInRoles;
DROP TEMPORARY TABLE IF EXISTS temp_roles;
CREATE TEMPORARY TABLE temp_roles(id varchar(26), permission longtext);
DELIMITER //
/* Auxiliary procedure that splits the space-separated permissions string into single rows that are inserted
in the temporary temp_roles table along with their corresponding ID. */
CREATE PROCEDURE splitPermissions(
IN id varchar(26),
IN permissionsString longtext
)
BEGIN
DECLARE idx INT DEFAULT 0;
SELECT TRIM(permissionsString) INTO permissionsString;
SELECT LOCATE(' ', permissionsString) INTO idx;
WHILE idx > 0 DO
INSERT INTO temp_roles SELECT id, TRIM(LEFT(permissionsString, idx));
SELECT SUBSTR(permissionsString, idx+1) INTO permissionsString;
SELECT LOCATE(' ', permissionsString) INTO idx;
END WHILE;
INSERT INTO temp_roles(id, permission) VALUES(id, TRIM(permissionsString));
END; //
/* Main procedure that does update the Roles table */
CREATE PROCEDURE sortAndFilterPermissionsInRoles()
BEGIN
DECLARE done INT DEFAULT FALSE;
DECLARE rolesId varchar(26) DEFAULT '';
DECLARE rolesPermissions longtext DEFAULT '';
DECLARE cur1 CURSOR FOR SELECT Id, Permissions FROM Roles;
DECLARE CONTINUE HANDLER FOR NOT FOUND SET done = TRUE;
/* 1. Set a fixed value in the UpdateAt column for all rows in Roles table */
UPDATE Roles SET UpdateAt = 1;
/* Call splitPermissions for every row in the Roles table, thus populating the
temp_roles table. */
OPEN cur1;
read_loop: LOOP
FETCH cur1 INTO rolesId, rolesPermissions;
IF done THEN
LEAVE read_loop;
END IF;
CALL splitPermissions(rolesId, rolesPermissions);
END LOOP;
CLOSE cur1;
/* 2. Filter out the new permissions added by the in-app migrations */
DELETE FROM temp_roles WHERE permission LIKE 'sysconsole_read_products_boards';
DELETE FROM temp_roles WHERE permission LIKE 'sysconsole_write_products_boards';
DELETE FROM temp_roles WHERE permission LIKE '%playbook%';
DELETE FROM temp_roles WHERE permission LIKE 'run_create';
DELETE FROM temp_roles WHERE permission LIKE 'run_manage_members';
DELETE FROM temp_roles WHERE permission LIKE 'run_manage_properties';
DELETE FROM temp_roles WHERE permission LIKE 'run_view';
DELETE FROM temp_roles WHERE permission LIKE '%custom_group%';
/* Temporarily set to the maximum permitted value, since the call to group_concat
below needs a value bigger than the default */
SET group_concat_max_len = 18446744073709551615;
/* 3. Update the Permissions column in the Roles table with the filtered, sorted permissions,
concatenated again as a space-separated string */
UPDATE
Roles INNER JOIN (
SELECT temp_roles.id as Id, TRIM(group_concat(temp_roles.permission ORDER BY temp_roles.permission SEPARATOR ' ')) as Permissions
FROM Roles JOIN temp_roles ON Roles.Id = temp_roles.id
GROUP BY temp_roles.id
) AS Sorted
ON Roles.Id = Sorted.Id
SET Roles.Permissions = Sorted.Permissions;
/* Reset group_concat_max_len to its default value */
SET group_concat_max_len = 1024;
END; //
DELIMITER ;
CALL sortAndFilterPermissionsInRoles();
DROP TEMPORARY TABLE IF EXISTS temp_roles;

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,168 @@
/* Product notices are controlled externally, via the mattermost/notices repository.
When there is a new notice specified there, the server may have time, right after
the migration and before it is shut down, to download it and modify the
ProductNoticeViewState table, adding a row for all users that have not seen it or
removing old notices that no longer need to be shown. This can happen in the
UpdateProductNotices function that is executed periodically to update the notices
cache. The script will never do this, so we need to remove all rows in that table
to avoid any unwanted diff. */
DELETE FROM ProductNoticeViewState;
/* Remove migration-related tables that are only updated through the server to track which
migrations have been applied */
DROP TABLE IF EXISTS db_lock;
DROP TABLE IF EXISTS db_migrations;
/* The security update check in the server may update the LastSecurityTime system value. To
avoid any spurious difference in the migrations, we update it to a fixed value. */
UPDATE Systems SET Value = 1 WHERE Name = 'LastSecurityTime';
/* The server migration may contain a row in the Systems table marking the onboarding as complete.
There are no migrations related to this, so we can simply drop it here. */
DELETE FROM Systems WHERE Name = 'FirstAdminSetupComplete';
/* The server migration contains an in-app migration that add playbooks permissions to certain roles:
getPlaybooksPermissionsAddManageRoles, defined in https://github.com/mattermost/mattermost-server/blob/56a093ceaee6389a01a35b6d4626ef5a9fea4759/app/permissions_migrations.go#L1056-L1072
The specific roles ('%playbook%') are removed in the procedure below, but the migrations also add new rows to the Systems table marking the migrations as complete.
This in-app migration does not happen in the script, so we remove that rows here. */
DELETE FROM Systems WHERE Name = 'playbooks_manage_roles';
/* The server migration contains an in-app migration that adds boards permissions to certain roles:
getProductsBoardsPermissions, defined in https://github.com/mattermost/mattermost-server/blob/282bd351e3767dcfd8c8340da2e0915197c0dbcb/app/permissions_migrations.go#L1074-L1093
The specific roles (sysconsole_read_product_boards and sysconsole_write_product_boards) are removed in the procedure below,
but the migrations also adds a new row to the Systems table marking the migrations as complete.
This in-app migration does not happen in the script, so we remove that row here. */
DELETE FROM Systems WHERE Name = 'products_boards';
/* The server migration contains an in-app migration that adds Ids to the Teams whose InviteId is an empty string:
doRemainingSchemaMigrations, defined in https://github.com/mattermost/mattermost-server/blob/282bd351e3767dcfd8c8340da2e0915197c0dbcb/app/migrations.go#L515-L540
The migration is not replicated in the script, since it happens in-app, but the server adds a new row to the
Systems table marking the table as complete, which the script doesn't do, so we remove that row here. */
DELETE FROM Systems WHERE Name = 'RemainingSchemaMigrations';
/* The server migration contains three in-app migration that adds a new role and new permissions
related to custom groups. The migrations are:
- doCustomGroupAdminRoleCreationMigration https://github.com/mattermost/mattermost-server/blob/282bd351e3767dcfd8c8340da2e0915197c0dbcb/app/migrations.go#L345-L469
- getAddCustomUserGroupsPermissions https://github.com/mattermost/mattermost-server/blob/282bd351e3767dcfd8c8340da2e0915197c0dbcb/app/permissions_migrations.go#L974-L995
- getAddCustomUserGroupsPermissionRestore https://github.com/mattermost/mattermost-server/blob/282bd351e3767dcfd8c8340da2e0915197c0dbcb/app/permissions_migrations.go#L997-L1019
The specific roles and permissions are removed in the procedure below, but the migrations also
adds a new row to the Roles table for the new role and new rows to the Systems table marking the
migrations as complete.
This in-app migration does not happen in the script, so we remove that row here. */
DELETE FROM Roles WHERE Name = 'system_custom_group_admin';
DELETE FROM Systems WHERE Name = 'CustomGroupAdminRoleCreationMigrationComplete';
DELETE FROM Systems WHERE Name = 'custom_groups_permissions';
DELETE FROM Systems WHERE Name = 'custom_groups_permission_restore';
/* The server migration contains an in-app migration that updates the config, setting ServiceSettings.PostPriority
to true, doPostPriorityConfigDefaultTrueMigration, defined in https://github.com/mattermost/mattermost-server/blob/282bd351e3767dcfd8c8340da2e0915197c0dbcb/app/migrations.go#L542-L560
The migration is not replicated in the script, since it happens in-app, but the server adds a new row to the
Systems table marking the table as complete, which the script doesn't do, so we remove that row here. */
DELETE FROM Systems WHERE Name = 'PostPriorityConfigDefaultTrueMigrationComplete';
/* The rest of this script defines and executes a procedure to update the Roles table. It performs several changes:
1. Set the UpdateAt column of all rows to a fixed value, so that the server migration changes to this column
do not appear in the diff.
2. Remove the set of specific permissions added in the server migration that is not covered by the script, as
this logic happens all in-app after the normal DB migrations.
3. Set a consistent order in the Permissions column, which is modelled a space-separated string containing each of
the different permissions each role has. This change is the reason why we need a complex procedure, which creates
a temporary table that pairs each single permission to its corresponding ID. So if the Roles table contains two
rows like:
Id: 'abcd'
Permissions: 'view_team read_public_channel invite_user'
Id: 'efgh'
Permissions: 'view_team create_emojis'
then the new temporary table will contain five rows like:
Id: 'abcd'
Permissions: 'view_team'
Id: 'abcd'
Permissions: 'read_public_channel'
Id: 'abcd'
Permissions: 'invite_user'
Id: 'efgh'
Permissions: 'view_team'
Id: 'efgh'
Permissions: 'create_emojis'
*/
DROP PROCEDURE IF EXISTS splitPermissions;
DROP PROCEDURE IF EXISTS sortAndFilterPermissionsInRoles;
DROP TEMPORARY TABLE IF EXISTS temp_roles;
CREATE TEMPORARY TABLE temp_roles(id varchar(26), permission longtext);
DELIMITER //
/* Auxiliary procedure that splits the space-separated permissions string into single rows that are inserted
in the temporary temp_roles table along with their corresponding ID. */
CREATE PROCEDURE splitPermissions(
IN id varchar(26),
IN permissionsString longtext
)
BEGIN
DECLARE idx INT DEFAULT 0;
SELECT TRIM(permissionsString) INTO permissionsString;
SELECT LOCATE(' ', permissionsString) INTO idx;
WHILE idx > 0 DO
INSERT INTO temp_roles SELECT id, TRIM(LEFT(permissionsString, idx));
SELECT SUBSTR(permissionsString, idx+1) INTO permissionsString;
SELECT LOCATE(' ', permissionsString) INTO idx;
END WHILE;
INSERT INTO temp_roles(id, permission) VALUES(id, TRIM(permissionsString));
END; //
/* Main procedure that does update the Roles table */
CREATE PROCEDURE sortAndFilterPermissionsInRoles()
BEGIN
DECLARE done INT DEFAULT FALSE;
DECLARE rolesId varchar(26) DEFAULT '';
DECLARE rolesPermissions longtext DEFAULT '';
DECLARE cur1 CURSOR FOR SELECT Id, Permissions FROM Roles;
DECLARE CONTINUE HANDLER FOR NOT FOUND SET done = TRUE;
/* 1. Set a fixed value in the UpdateAt column for all rows in Roles table */
UPDATE Roles SET UpdateAt = 1;
/* Call splitPermissions for every row in the Roles table, thus populating the
temp_roles table. */
OPEN cur1;
read_loop: LOOP
FETCH cur1 INTO rolesId, rolesPermissions;
IF done THEN
LEAVE read_loop;
END IF;
CALL splitPermissions(rolesId, rolesPermissions);
END LOOP;
CLOSE cur1;
/* 2. Filter out the new permissions added by the in-app migrations */
DELETE FROM temp_roles WHERE permission LIKE 'sysconsole_read_products_boards';
DELETE FROM temp_roles WHERE permission LIKE 'sysconsole_write_products_boards';
DELETE FROM temp_roles WHERE permission LIKE 'playbook_public_manage_roles';
DELETE FROM temp_roles WHERE permission LIKE 'playbook_private_manage_roles';
DELETE FROM temp_roles WHERE permission LIKE '%custom_group%';
/* Temporarily set to the maximum permitted value, since the call to group_concat
below needs a value bigger than the default */
SET group_concat_max_len = 18446744073709551615;
/* 3. Update the Permissions column in the Roles table with the filtered, sorted permissions,
concatenated again as a space-separated string */
UPDATE
Roles INNER JOIN (
SELECT temp_roles.id as Id, TRIM(group_concat(temp_roles.permission ORDER BY temp_roles.permission SEPARATOR ' ')) as Permissions
FROM Roles JOIN temp_roles ON Roles.Id = temp_roles.id
GROUP BY temp_roles.id
) AS Sorted
ON Roles.Id = Sorted.Id
SET Roles.Permissions = Sorted.Permissions;
/* Reset group_concat_max_len to its default value */
SET group_concat_max_len = 1024;
END; //
DELIMITER ;
CALL sortAndFilterPermissionsInRoles();
DROP TEMPORARY TABLE IF EXISTS temp_roles;

View File

@ -0,0 +1,599 @@
/* ==> mysql/000041_create_upload_sessions.up.sql <== */
/* Release 5.37 was meant to contain the index idx_uploadsessions_type, but a bug prevented that.
This part of the migration #41 adds such index */
SET @preparedStatement = (SELECT IF(
(
SELECT COUNT(*) FROM INFORMATION_SCHEMA.STATISTICS
WHERE table_name = 'UploadSessions'
AND table_schema = DATABASE()
AND index_name = 'idx_uploadsessions_type'
) > 0,
'SELECT 1',
'CREATE INDEX idx_uploadsessions_type ON UploadSessions(Type);'
));
PREPARE createIndexIfNotExists FROM @preparedStatement;
EXECUTE createIndexIfNotExists;
DEALLOCATE PREPARE createIndexIfNotExists;
/* ==> mysql/000075_alter_upload_sessions_index.up.sql <== */
DELIMITER //
CREATE PROCEDURE AlterIndex()
BEGIN
DECLARE columnName varchar(26) default '';
SELECT IFNULL(GROUP_CONCAT(column_name ORDER BY seq_in_index), '') INTO columnName
FROM information_schema.statistics
WHERE table_schema = DATABASE()
AND table_name = 'UploadSessions'
AND index_name = 'idx_uploadsessions_user_id'
GROUP BY index_name;
IF columnName = 'Type' THEN
DROP INDEX idx_uploadsessions_user_id ON UploadSessions;
CREATE INDEX idx_uploadsessions_user_id ON UploadSessions(UserId);
END IF;
END//
DELIMITER ;
CALL AlterIndex();
DROP PROCEDURE IF EXISTS AlterIndex;
/* ==> mysql/000076_upgrade_lastrootpostat.up.sql <== */
DELIMITER //
CREATE PROCEDURE Migrate_LastRootPostAt_Default ()
BEGIN
IF (
SELECT COUNT(*) FROM INFORMATION_SCHEMA.COLUMNS
WHERE TABLE_NAME = 'Channels'
AND TABLE_SCHEMA = DATABASE()
AND COLUMN_NAME = 'LastRootPostAt'
AND (COLUMN_DEFAULT IS NULL OR COLUMN_DEFAULT != 0)
) = 1 THEN
ALTER TABLE Channels ALTER COLUMN LastRootPostAt SET DEFAULT 0;
END IF;
END//
DELIMITER ;
CALL Migrate_LastRootPostAt_Default ();
DROP PROCEDURE IF EXISTS Migrate_LastRootPostAt_Default;
DELIMITER //
CREATE PROCEDURE Migrate_LastRootPostAt_Fix ()
BEGIN
IF (
SELECT COUNT(*)
FROM Channels
WHERE LastRootPostAt IS NULL
) > 0 THEN
-- fixes migrate cte and sets the LastRootPostAt for channels that don't have it set
UPDATE
Channels
INNER JOIN (
SELECT
Channels.Id channelid,
COALESCE(MAX(Posts.CreateAt), 0) AS lastrootpost
FROM
Channels
LEFT JOIN Posts FORCE INDEX (idx_posts_channel_id_update_at) ON Channels.Id = Posts.ChannelId
WHERE
Posts.RootId = ''
GROUP BY
Channels.Id) AS q ON q.channelid = Channels.Id
SET
LastRootPostAt = lastrootpost
WHERE
LastRootPostAt IS NULL;
-- sets LastRootPostAt to 0, for channels with no posts
UPDATE Channels SET LastRootPostAt=0 WHERE LastRootPostAt IS NULL;
END IF;
END//
DELIMITER ;
CALL Migrate_LastRootPostAt_Fix ();
DROP PROCEDURE IF EXISTS Migrate_LastRootPostAt_Fix;
/* ==> mysql/000077_upgrade_users_v6.5.up.sql <== */
SET @preparedStatement = (SELECT IF(
(
SELECT COUNT(*) FROM INFORMATION_SCHEMA.COLUMNS
WHERE table_name = 'Users'
AND table_schema = DATABASE()
AND column_name = 'AcceptedServiceTermsId'
) > 0,
'ALTER TABLE Users DROP COLUMN AcceptedServiceTermsId;',
'SELECT 1'
));
PREPARE alterIfExists FROM @preparedStatement;
EXECUTE alterIfExists;
DEALLOCATE PREPARE alterIfExists;
/* ==> mysql/000078_create_oauth_mattermost_app_id.up.sql <== */
SET @preparedStatement = (SELECT IF(
(
SELECT COUNT(*) FROM INFORMATION_SCHEMA.COLUMNS
WHERE table_name = 'OAuthApps'
AND table_schema = DATABASE()
AND column_name = 'MattermostAppID'
) > 0,
'SELECT 1',
'ALTER TABLE OAuthApps ADD COLUMN MattermostAppID varchar(32);'
));
PREPARE alterIfExists FROM @preparedStatement;
EXECUTE alterIfExists;
DEALLOCATE PREPARE alterIfExists;
/* ==> mysql/000079_usergroups_displayname_index.up.sql <== */
SET @preparedStatement = (SELECT IF(
(
SELECT COUNT(*) FROM INFORMATION_SCHEMA.STATISTICS
WHERE table_name = 'UserGroups'
AND table_schema = DATABASE()
AND index_name = 'idx_usergroups_displayname'
) > 0,
'SELECT 1',
'CREATE INDEX idx_usergroups_displayname ON UserGroups(DisplayName);'
));
PREPARE createIndexIfNotExists FROM @preparedStatement;
EXECUTE createIndexIfNotExists;
DEALLOCATE PREPARE createIndexIfNotExists;
/* ==> mysql/000080_posts_createat_id.up.sql <== */
SET @preparedStatement = (SELECT IF(
(
SELECT COUNT(*) FROM INFORMATION_SCHEMA.STATISTICS
WHERE table_name = 'Posts'
AND table_schema = DATABASE()
AND index_name = 'idx_posts_create_at_id'
) > 0,
'SELECT 1;',
'CREATE INDEX idx_posts_create_at_id on Posts(CreateAt, Id) LOCK=NONE;'
));
PREPARE createIndexIfNotExists FROM @preparedStatement;
EXECUTE createIndexIfNotExists;
DEALLOCATE PREPARE createIndexIfNotExists;
/* ==> mysql/000081_threads_deleteat.up.sql <== */
-- Replaced by 000083_threads_threaddeleteat.up.sql
/* ==> mysql/000082_upgrade_oauth_mattermost_app_id.up.sql <== */
SET @preparedStatement = (SELECT IF(
(
SELECT COUNT(*) FROM INFORMATION_SCHEMA.COLUMNS
WHERE table_name = 'OAuthApps'
AND table_schema = DATABASE()
AND column_name = 'MattermostAppID'
) > 0,
'UPDATE OAuthApps SET MattermostAppID = "" WHERE MattermostAppID IS NULL;',
'SELECT 1'
));
PREPARE alterIfExists FROM @preparedStatement;
EXECUTE alterIfExists;
DEALLOCATE PREPARE alterIfExists;
SET @preparedStatement = (SELECT IF(
(
SELECT COUNT(*) FROM INFORMATION_SCHEMA.COLUMNS
WHERE table_name = 'OAuthApps'
AND table_schema = DATABASE()
AND column_name = 'MattermostAppID'
) > 0,
'ALTER TABLE OAuthApps MODIFY MattermostAppID varchar(32) NOT NULL DEFAULT "";',
'SELECT 1'
));
PREPARE alterIfExists FROM @preparedStatement;
EXECUTE alterIfExists;
DEALLOCATE PREPARE alterIfExists;
/* ==> mysql/000083_threads_threaddeleteat.up.sql <== */
-- Drop any existing DeleteAt column from 000081_threads_deleteat.up.sql
SET @preparedStatement = (SELECT IF(
EXISTS(
SELECT 1 FROM INFORMATION_SCHEMA.STATISTICS
WHERE table_name = 'Threads'
AND table_schema = DATABASE()
AND column_name = 'DeleteAt'
) > 0,
'ALTER TABLE Threads DROP COLUMN DeleteAt;',
'SELECT 1;'
));
PREPARE removeColumnIfExists FROM @preparedStatement;
EXECUTE removeColumnIfExists;
DEALLOCATE PREPARE removeColumnIfExists;
SET @preparedStatement = (SELECT IF(
NOT EXISTS(
SELECT 1 FROM INFORMATION_SCHEMA.COLUMNS
WHERE table_name = 'Threads'
AND table_schema = DATABASE()
AND column_name = 'ThreadDeleteAt'
),
'ALTER TABLE Threads ADD COLUMN ThreadDeleteAt bigint(20);',
'SELECT 1;'
));
PREPARE addColumnIfNotExists FROM @preparedStatement;
EXECUTE addColumnIfNotExists;
DEALLOCATE PREPARE addColumnIfNotExists;
UPDATE Threads, Posts
SET Threads.ThreadDeleteAt = Posts.DeleteAt
WHERE Posts.Id = Threads.PostId
AND Threads.ThreadDeleteAt IS NULL;
/* ==> mysql/000084_recent_searches.up.sql <== */
CREATE TABLE IF NOT EXISTS RecentSearches (
UserId CHAR(26),
SearchPointer int,
Query json,
CreateAt bigint NOT NULL,
PRIMARY KEY (UserId, SearchPointer)
);
/* ==> mysql/000085_fileinfo_add_archived_column.up.sql <== */
SET @preparedStatement = (SELECT IF(
(
SELECT COUNT(*) FROM INFORMATION_SCHEMA.COLUMNS
WHERE table_name = 'FileInfo'
AND table_schema = DATABASE()
AND column_name = 'Archived'
) > 0,
'SELECT 1',
'ALTER TABLE FileInfo ADD COLUMN Archived boolean NOT NULL DEFAULT false;'
));
PREPARE alterIfExists FROM @preparedStatement;
EXECUTE alterIfExists;
DEALLOCATE PREPARE alterIfExists;
/* ==> mysql/000086_add_cloud_limits_archived.up.sql <== */
SET @preparedStatement = (SELECT IF(
NOT EXISTS(
SELECT 1 FROM INFORMATION_SCHEMA.COLUMNS
WHERE table_name = 'Teams'
AND table_schema = DATABASE()
AND column_name = 'CloudLimitsArchived'
),
'ALTER TABLE Teams ADD COLUMN CloudLimitsArchived BOOLEAN NOT NULL DEFAULT FALSE;',
'SELECT 1'
));
PREPARE alterIfNotExists FROM @preparedStatement;
EXECUTE alterIfNotExists;
DEALLOCATE PREPARE alterIfNotExists;
/* ==> mysql/000087_sidebar_categories_index.up.sql <== */
SET @preparedStatement = (SELECT IF(
(
SELECT COUNT(*) FROM INFORMATION_SCHEMA.STATISTICS
WHERE table_name = 'SidebarCategories'
AND table_schema = DATABASE()
AND index_name = 'idx_sidebarcategories_userid_teamid'
) > 0,
'SELECT 1;',
'CREATE INDEX idx_sidebarcategories_userid_teamid on SidebarCategories(UserId, TeamId) LOCK=NONE;'
));
PREPARE createIndexIfNotExists FROM @preparedStatement;
EXECUTE createIndexIfNotExists;
DEALLOCATE PREPARE createIndexIfNotExists;
/* ==> mysql/000088_remaining_migrations.up.sql <== */
DROP TABLE IF EXISTS JobStatuses;
DROP TABLE IF EXISTS PasswordRecovery;
SET @preparedStatement = (SELECT IF(
(
SELECT COUNT(*) FROM INFORMATION_SCHEMA.COLUMNS
WHERE table_name = 'Users'
AND table_schema = DATABASE()
AND column_name = 'ThemeProps'
) > 0,
'INSERT INTO Preferences(UserId, Category, Name, Value) SELECT Id, \'\', \'\', ThemeProps FROM Users WHERE Users.ThemeProps != \'null\'',
'SELECT 1'
));
PREPARE migrateTheme FROM @preparedStatement;
EXECUTE migrateTheme;
DEALLOCATE PREPARE migrateTheme;
-- We have to do this twice because the prepared statement doesn't support multiple SQL queries
-- in a single string.
SET @preparedStatement = (SELECT IF(
(
SELECT COUNT(*) FROM INFORMATION_SCHEMA.COLUMNS
WHERE table_name = 'Users'
AND table_schema = DATABASE()
AND column_name = 'ThemeProps'
) > 0,
'ALTER TABLE Users DROP COLUMN ThemeProps',
'SELECT 1'
));
PREPARE migrateTheme FROM @preparedStatement;
EXECUTE migrateTheme;
DEALLOCATE PREPARE migrateTheme;
/* ==> mysql/000089_add-channelid-to-reaction.up.sql <== */
SET @preparedStatement = (SELECT IF(
NOT EXISTS(
SELECT 1 FROM INFORMATION_SCHEMA.COLUMNS
WHERE table_name = 'Reactions'
AND table_schema = DATABASE()
AND column_name = 'ChannelId'
),
'ALTER TABLE Reactions ADD COLUMN ChannelId varchar(26) NOT NULL DEFAULT "";',
'SELECT 1;'
));
PREPARE addColumnIfNotExists FROM @preparedStatement;
EXECUTE addColumnIfNotExists;
DEALLOCATE PREPARE addColumnIfNotExists;
UPDATE Reactions SET ChannelId = COALESCE((select ChannelId from Posts where Posts.Id = Reactions.PostId), '') WHERE ChannelId="";
SET @preparedStatement = (SELECT IF(
(
SELECT COUNT(*) FROM INFORMATION_SCHEMA.STATISTICS
WHERE table_name = 'Reactions'
AND table_schema = DATABASE()
AND index_name = 'idx_reactions_channel_id'
) > 0,
'SELECT 1',
'CREATE INDEX idx_reactions_channel_id ON Reactions(ChannelId);'
));
PREPARE createIndexIfNotExists FROM @preparedStatement;
EXECUTE createIndexIfNotExists;
DEALLOCATE PREPARE createIndexIfNotExists;
/* ==> mysql/000090_create_enums.up.sql <== */
SET @preparedStatement = (SELECT IF(
(
SELECT COUNT(*) FROM INFORMATION_SCHEMA.COLUMNS
WHERE table_name = 'Channels'
AND table_schema = DATABASE()
AND column_name = 'Type'
AND column_type != 'ENUM("D", "O", "G", "P")'
) > 0,
'ALTER TABLE Channels MODIFY COLUMN Type ENUM("D", "O", "G", "P");',
'SELECT 1'
));
PREPARE alterIfExists FROM @preparedStatement;
EXECUTE alterIfExists;
DEALLOCATE PREPARE alterIfExists;
SET @preparedStatement = (SELECT IF(
(
SELECT COUNT(*) FROM INFORMATION_SCHEMA.COLUMNS
WHERE table_name = 'Teams'
AND table_schema = DATABASE()
AND column_name = 'Type'
AND column_type != 'ENUM("I", "O")'
) > 0,
'ALTER TABLE Teams MODIFY COLUMN Type ENUM("I", "O");',
'SELECT 1'
));
PREPARE alterIfExists FROM @preparedStatement;
EXECUTE alterIfExists;
DEALLOCATE PREPARE alterIfExists;
SET @preparedStatement = (SELECT IF(
(
SELECT COUNT(*) FROM INFORMATION_SCHEMA.COLUMNS
WHERE table_name = 'UploadSessions'
AND table_schema = DATABASE()
AND column_name = 'Type'
AND column_type != 'ENUM("attachment", "import")'
) > 0,
'ALTER TABLE UploadSessions MODIFY COLUMN Type ENUM("attachment", "import");',
'SELECT 1'
));
PREPARE alterIfExists FROM @preparedStatement;
EXECUTE alterIfExists;
DEALLOCATE PREPARE alterIfExists;
/* ==> mysql/000091_create_post_reminder.up.sql <== */
CREATE TABLE IF NOT EXISTS PostReminders (
PostId varchar(26) NOT NULL,
UserId varchar(26) NOT NULL,
TargetTime bigint,
PRIMARY KEY (PostId, UserId)
);
SET @preparedStatement = (SELECT IF(
(
SELECT COUNT(*) FROM INFORMATION_SCHEMA.STATISTICS
WHERE table_name = 'PostReminders'
AND table_schema = DATABASE()
AND index_name = 'idx_postreminders_targettime'
) > 0,
'SELECT 1',
'CREATE INDEX idx_postreminders_targettime ON PostReminders(TargetTime);'
));
PREPARE createIndexIfNotExists FROM @preparedStatement;
EXECUTE createIndexIfNotExists;
DEALLOCATE PREPARE createIndexIfNotExists;
/* ==> mysql/000092_add_createat_to_teammembers.up.sql <== */
SET @preparedStatement = (SELECT IF(
NOT EXISTS(
SELECT 1 FROM INFORMATION_SCHEMA.COLUMNS
WHERE table_name = 'TeamMembers'
AND table_schema = DATABASE()
AND column_name = 'CreateAt'
),
'ALTER TABLE TeamMembers ADD COLUMN CreateAt bigint DEFAULT 0;',
'SELECT 1;'
));
PREPARE addColumnIfNotExists FROM @preparedStatement;
EXECUTE addColumnIfNotExists;
DEALLOCATE PREPARE addColumnIfNotExists;
SET @preparedStatement = (SELECT IF(
(
SELECT COUNT(*) FROM INFORMATION_SCHEMA.STATISTICS
WHERE table_name = 'TeamMembers'
AND table_schema = DATABASE()
AND index_name = 'idx_teammembers_create_at'
) > 0,
'SELECT 1',
'CREATE INDEX idx_teammembers_createat ON TeamMembers(CreateAt);'
));
PREPARE createIndexIfNotExists FROM @preparedStatement;
EXECUTE createIndexIfNotExists;
DEALLOCATE PREPARE createIndexIfNotExists;
/* ==> mysql/000093_notify_admin.up.sql <== */
CREATE TABLE IF NOT EXISTS NotifyAdmin (
UserId varchar(26) NOT NULL,
CreateAt bigint(20) DEFAULT NULL,
RequiredPlan varchar(26) NOT NULL,
RequiredFeature varchar(100) NOT NULL,
Trial BOOLEAN NOT NULL,
PRIMARY KEY (UserId, RequiredFeature, RequiredPlan)
);
/* ==> mysql/000094_threads_teamid.up.sql <== */
-- Replaced by 000096_threads_threadteamid.up.sql
/* ==> mysql/000095_remove_posts_parentid.up.sql <== */
-- While upgrading from 5.x to 6.x with manual queries, there is a chance that this
-- migration is skipped. In that case, we need to make sure that the column is dropped.
SET @preparedStatement = (SELECT IF(
(
SELECT COUNT(*) FROM INFORMATION_SCHEMA.COLUMNS
WHERE table_name = 'Posts'
AND table_schema = DATABASE()
AND column_name = 'ParentId'
) > 0,
'ALTER TABLE Posts DROP COLUMN ParentId;',
'SELECT 1'
));
PREPARE alterIfExists FROM @preparedStatement;
EXECUTE alterIfExists;
DEALLOCATE PREPARE alterIfExists;
/* ==> mysql/000096_threads_threadteamid.up.sql <== */
-- Drop any existing TeamId column from 000094_threads_teamid.up.sql
SET @preparedStatement = (SELECT IF(
EXISTS(
SELECT 1 FROM INFORMATION_SCHEMA.STATISTICS
WHERE table_name = 'Threads'
AND table_schema = DATABASE()
AND column_name = 'TeamId'
) > 0,
'ALTER TABLE Threads DROP COLUMN TeamId;',
'SELECT 1;'
));
PREPARE removeColumnIfExists FROM @preparedStatement;
EXECUTE removeColumnIfExists;
DEALLOCATE PREPARE removeColumnIfExists;
SET @preparedStatement = (SELECT IF(
NOT EXISTS(
SELECT 1 FROM INFORMATION_SCHEMA.COLUMNS
WHERE table_name = 'Threads'
AND table_schema = DATABASE()
AND column_name = 'ThreadTeamId'
),
'ALTER TABLE Threads ADD COLUMN ThreadTeamId varchar(26) DEFAULT NULL;',
'SELECT 1;'
));
PREPARE addColumnIfNotExists FROM @preparedStatement;
EXECUTE addColumnIfNotExists;
DEALLOCATE PREPARE addColumnIfNotExists;
UPDATE Threads, Channels
SET Threads.ThreadTeamId = Channels.TeamId
WHERE Channels.Id = Threads.ChannelId
AND Threads.ThreadTeamId IS NULL;
/* ==> mysql/000097_create_posts_priority.up.sql <== */
CREATE TABLE IF NOT EXISTS PostsPriority (
PostId varchar(26) NOT NULL,
ChannelId varchar(26) NOT NULL,
Priority varchar(32) NOT NULL,
RequestedAck tinyint(1),
PersistentNotifications tinyint(1),
PRIMARY KEY (PostId)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;
SET @preparedStatement = (SELECT IF(
NOT EXISTS(
SELECT 1 FROM INFORMATION_SCHEMA.COLUMNS
WHERE table_name = 'ChannelMembers'
AND table_schema = DATABASE()
AND column_name = 'UrgentMentionCount'
),
'ALTER TABLE ChannelMembers ADD COLUMN UrgentMentionCount bigint(20);',
'SELECT 1;'
));
PREPARE alterIfNotExists FROM @preparedStatement;
EXECUTE alterIfNotExists;
DEALLOCATE PREPARE alterIfNotExists;
/* ==> mysql/000098_create_post_acknowledgements.up.sql <== */
CREATE TABLE IF NOT EXISTS PostAcknowledgements (
PostId varchar(26) NOT NULL,
UserId varchar(26) NOT NULL,
AcknowledgedAt bigint(20) DEFAULT NULL,
PRIMARY KEY (PostId, UserId)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;
/* ==> mysql/000099_create_drafts.up.sql <== */
CREATE TABLE IF NOT EXISTS Drafts (
CreateAt bigint(20) DEFAULT NULL,
UpdateAt bigint(20) DEFAULT NULL,
DeleteAt bigint(20) DEFAULT NULL,
UserId varchar(26) NOT NULL,
ChannelId varchar(26) NOT NULL,
RootId varchar(26) DEFAULT '',
Message text,
Props text,
FileIds text,
PRIMARY KEY (UserId, ChannelId, RootId)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;
/* ==> mysql/000100_add_draft_priority_column.up.sql <== */
SET @preparedStatement = (SELECT IF(
(
SELECT COUNT(*) FROM INFORMATION_SCHEMA.COLUMNS
WHERE table_name = 'Drafts'
AND table_schema = DATABASE()
AND column_name = 'Priority'
) > 0,
'SELECT 1',
'ALTER TABLE Drafts ADD COLUMN Priority text;'
));
PREPARE alterIfExists FROM @preparedStatement;
EXECUTE alterIfExists;
DEALLOCATE PREPARE alterIfExists;
/* ==> mysql/000101_create_true_up_review_history.up.sql <== */
CREATE TABLE IF NOT EXISTS TrueUpReviewHistory (
DueDate bigint(20),
Completed boolean,
PRIMARY KEY (DueDate)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;

View File

@ -0,0 +1,23 @@
/* The sessions in the DB dump may have expired before the CI tests run, making
the server remove the rows and generating a spurious diff that we want to avoid.
In order to do so, we mark all sessions' ExpiresAt value to 0, so they never expire. */
UPDATE Sessions SET ExpiresAt = 0;
/* The dump may not contain a system-bot user, in which case the server will create
one if it's not shutdown before a job requests it. This situation creates a flaky
tests in which, in rare ocassions, the system-bot is indeed created, generating a
spurious diff. We avoid this by making sure that there is a system-bot user and
corresponding bot */
DELIMITER //
CREATE PROCEDURE AddSystemBotIfNeeded ()
BEGIN
DECLARE CreateSystemBot BOOLEAN;
SELECT COUNT(*) = 0 FROM Users WHERE Username = 'system-bot' INTO CreateSystemBot;
IF CreateSystemBot THEN
/* These values are retrieved from a real system-bot created by a server */
INSERT INTO `Bots` VALUES ('nc7y5x1i8jgr9btabqo5m3579c','','phxrtijfrtfg7k4bwj9nophqyc',0,1681308600015,1681308600015,0);
INSERT INTO `Users` VALUES ('nc7y5x1i8jgr9btabqo5m3579c',1681308600014,1681308600014,0,'system-bot','',NULL,'','system-bot@localhost',0,'','System','','','system_user',0,'{}','{\"push\": \"mention\", \"email\": \"true\", \"channel\": \"true\", \"desktop\": \"mention\", \"comments\": \"never\", \"first_name\": \"false\", \"push_status\": \"away\", \"mention_keys\": \"\", \"push_threads\": \"all\", \"desktop_sound\": \"true\", \"email_threads\": \"all\", \"desktop_threads\": \"all\"}',1681308600014,0,0,'en','{\"manualTimezone\": \"\", \"automaticTimezone\": \"\", \"useAutomaticTimezone\": \"true\"}',0,'',NULL);
END IF;
END//
DELIMITER ;
CALL AddSystemBotIfNeeded();

View File

@ -3,7 +3,7 @@
import {Block, createBlock} from './block'
type AttachmentBlockFields = {
attachmentId: string
fileId: string
}
type AttachmentBlock = Block & {
@ -18,7 +18,7 @@ function createAttachmentBlock(block?: Block): AttachmentBlock {
...createBlock(block),
type: 'attachment',
fields: {
attachmentId: block?.fields.attachmentId || '',
fileId: block?.fields.attachmentId || block?.fields.fileId || '',
},
isUploading: false,
uploadingPercent: 0,

View File

@ -151,7 +151,7 @@ const CardDialog = (props: Props): JSX.Element => {
Utils.selectLocalFile(async (attachment) => {
const uploadingBlock = createBlock()
uploadingBlock.title = attachment.name
uploadingBlock.fields.attachmentId = attachment.name
uploadingBlock.fields.fileId = attachment.name
uploadingBlock.boardId = boardId
if (card) {
uploadingBlock.parentId = card.id
@ -177,11 +177,11 @@ const CardDialog = (props: Props): JSX.Element => {
xhr.onload = () => {
if (xhr.status === 200 && xhr.readyState === 4) {
const json = JSON.parse(xhr.response)
const attachmentId = json.fileId
if (attachmentId) {
const fileId = json.fileId
if (fileId) {
removeUploadingAttachment(uploadingBlock)
const block = createAttachmentBlock()
block.fields.attachmentId = attachmentId || ''
block.fields.fileId = fileId || ''
block.title = attachment.name
sendFlashMessage({content: intl.formatMessage({id: 'AttachmentBlock.uploadSuccess', defaultMessage: 'Attachment uploaded.'}), severity: 'normal'})
resolve(block)

View File

@ -39,7 +39,7 @@ describe('component/content/FileBlock', () => {
type: 'attachment',
title: 'test-title',
fields: {
attachmentId: 'test.txt',
fileId: 'test.txt',
},
createdBy: 'test-user-id',
createAt: 0,

View File

@ -50,7 +50,7 @@ const AttachmentElement = (props: Props): JSX.Element|null => {
})
return
}
const attachmentInfo = await octoClient.getFileInfo(block.boardId, block.fields.attachmentId)
const attachmentInfo = await octoClient.getFileInfo(block.boardId, block.fields.fileId)
setFileInfo(attachmentInfo)
}
loadFile()
@ -113,7 +113,7 @@ const AttachmentElement = (props: Props): JSX.Element|null => {
}
const attachmentDownloadHandler = async () => {
const attachment = await octoClient.getFileAsDataUrl(block.boardId, block.fields.attachmentId)
const attachment = await octoClient.getFileAsDataUrl(block.boardId, block.fields.fileId)
const anchor = document.createElement('a')
anchor.href = attachment.url || ''
anchor.download = fileInfo.name || ''

View File

@ -25,6 +25,7 @@ import CompassIcon from 'src/widgets/icons/compassIcon'
import OptionsIcon from 'src/widgets/icons/options'
import Menu from 'src/widgets/menu'
import MenuWrapper from 'src/widgets/menuWrapper'
import {UserSettings} from 'src/userSettings'
import './sidebarCategory.scss'
import {Category, CategoryBoardMetadata, CategoryBoards} from 'src/store/sidebar'
@ -202,12 +203,24 @@ const SidebarCategory = (props: Props) => {
setTimeout(() => {
showBoard(props.boards[nextBoardId as number].id)
}, 120)
} else {
setTimeout(() => {
const newPath = generatePath('/team/:teamId', {teamId: teamID,})
history.push(newPath)
}, 120)
}
},
async () => {
showBoard(deleteBoard.id)
},
)
if (
UserSettings.lastBoardId &&
UserSettings.lastBoardId[deleteBoard.teamId] == deleteBoard.id
) {
UserSettings.setLastBoardID(deleteBoard.teamId, null)
UserSettings.setLastViewId(deleteBoard.id, null)
}
}, [showBoard, deleteBoard, props.boards])
const updateCategory = useCallback(async (value: boolean) => {

View File

@ -186,7 +186,9 @@ const BoardPage = (props: Props): JSX.Element => {
const joinBoard = async (myUser: IUser, boardTeamId: string, boardId: string, allowAdmin: boolean) => {
const member = await octoClient.joinBoard(boardId, allowAdmin)
if (!member) {
if (myUser.permissions?.find((s) => s === 'manage_system' || s === 'manage_team')) {
// if allowAdmin is true, then we failed to join the board
// as an admin, normally, this is deleted/missing board
if (!allowAdmin && myUser.permissions?.find((s) => s === 'manage_system' || s === 'manage_team')) {
setShowJoinBoardDialog(true)
return
}

View File

@ -12,8 +12,8 @@ import {
import {logout, loadMe, loadMeREST} from 'mattermost-redux/actions/users';
import {Preferences} from 'mattermost-redux/constants';
import {getConfig, isPerformanceDebuggingEnabled} from 'mattermost-redux/selectors/entities/general';
import {getCurrentTeamId, getMyTeams, getTeam, getMyTeamMember, getTeamMemberships} from 'mattermost-redux/selectors/entities/teams';
import {getBool, isCollapsedThreadsEnabled, isGraphQLEnabled} from 'mattermost-redux/selectors/entities/preferences';
import {getCurrentTeamId, getMyTeams, getTeam, getMyTeamMember, getTeamMemberships, getActiveTeamsList} from 'mattermost-redux/selectors/entities/teams';
import {getBool, getIsOnboardingFlowEnabled, isCollapsedThreadsEnabled, isGraphQLEnabled} from 'mattermost-redux/selectors/entities/preferences';
import {getCurrentUser, getCurrentUserId, isFirstAdmin} from 'mattermost-redux/selectors/entities/users';
import {getCurrentChannelStats, getCurrentChannelId, getMyChannelMember, getRedirectChannelNameForTeam, getChannelsNameMapInTeam, getAllDirectChannels, getChannelMessageCount} from 'mattermost-redux/selectors/entities/channels';
import {appsEnabled} from 'mattermost-redux/selectors/entities/apps';
@ -352,7 +352,7 @@ export async function redirectUserToDefaultTeam() {
// Assume we need to load the user if they don't have any team memberships loaded or the user loaded
let user = getCurrentUser(state);
const shouldLoadUser = Utils.isEmptyObject(getTeamMemberships(state)) || !user;
const onboardingFlowEnabled = getIsOnboardingFlowEnabled(state);
if (shouldLoadUser) {
if (isGraphQLEnabled(state)) {
await dispatch(loadMe());
@ -374,8 +374,9 @@ export async function redirectUserToDefaultTeam() {
const teamId = LocalStorageStore.getPreviousTeamId(user.id);
let myTeams = getMyTeams(state);
if (myTeams.length === 0) {
if (isUserFirstAdmin) {
const teams = getActiveTeamsList(state);
if (teams.length === 0) {
if (isUserFirstAdmin && onboardingFlowEnabled) {
getHistory().push('/preparing-workspace');
return;
}

View File

@ -16,7 +16,6 @@ import {getCurrentUserId} from 'mattermost-redux/selectors/entities/users';
import {getCurrentTeamId} from 'mattermost-redux/selectors/entities/teams';
import {selectLhsItem} from 'actions/views/lhs';
import {GlobalState} from 'types/store';
import {LhsItemType, LhsPage} from 'types/store/lhs';
import {CardSizes, InsightsWidgetTypes, TimeFrame, TimeFrames} from '@mattermost/types/insights';
@ -41,17 +40,20 @@ type SelectOption = {
const Insights = () => {
const dispatch = useDispatch();
// check if either of focalboard plugin or boards product is enabled
const focalboardPluginEnabled = useSelector((state: GlobalState) => state.plugins.plugins?.focalboard);
let focalboardProductEnabled = false;
const products = useProducts();
if (products) {
focalboardProductEnabled = products.some((product) => product.pluginId === suitePluginIds.focalboard || product.pluginId === suitePluginIds.boards);
}
const focalboardEnabled = focalboardPluginEnabled || focalboardProductEnabled;
const playbooksEnabled = useSelector((state: GlobalState) => state.plugins.plugins?.playbooks);
let focalboardEnabled = false;
let playbooksEnabled = false;
if (products) {
products.forEach((product) => {
if (product.pluginId === suitePluginIds.boards) {
focalboardEnabled = true;
} else if (product.pluginId === suitePluginIds.playbooks) {
playbooksEnabled = true;
}
});
}
const currentUserId = useSelector(getCurrentUserId);
const currentTeamId = useSelector(getCurrentTeamId);

View File

@ -59,13 +59,13 @@ const ContactSalesCard = (props: Props) => {
title = (
<FormattedMessage
id='admin.billing.subscription.privateCloudCard.cloudEnterprise.title'
defaultMessage='Looking for an annual discount? '
defaultMessage='Looking to rollout Mattermost for your entire organization? '
/>
);
description = (
<FormattedMessage
id='admin.billing.subscription.privateCloudCard.cloudEnterprise.description'
defaultMessage='At Mattermost, we work with you and your team to meet your needs throughout the product. If you are looking for an annual discount, please reach out to our sales team.'
defaultMessage='At Mattermost, we work with you and your organization to meet your needs throughout the product. If youre considering a wider rollout, talk to us.'
/>
);
} else {
@ -103,13 +103,13 @@ const ContactSalesCard = (props: Props) => {
title = (
<FormattedMessage
id='admin.billing.subscription.privateCloudCard.cloudEnterprise.title'
defaultMessage='Looking for an annual discount? '
defaultMessage='Looking to rollout Mattermost for your entire organization? '
/>
);
description = (
<FormattedMessage
id='admin.billing.subscription.privateCloudCard.cloudEnterprise.description'
defaultMessage='At Mattermost, we work with you and your team to meet your needs throughout the product. If you are looking for an annual discount, please reach out to our sales team.'
defaultMessage='At Mattermost, we work with you and your organization to meet your needs throughout the product. If youre considering a wider rollout, talk to us.'
/>
);
break;

View File

@ -163,7 +163,7 @@ const ToYearlyNudgeBannerDismissable = () => {
type={announcementType}
showCloseButton={daysToProMonthlyEnd > 10}
onButtonClick={() => openPurchaseModal({trackingLocation: 'to_yearly_nudge_annoucement_bar'})}
modalButtonText={t('cloud_billing.nudge_to_yearly.learn_more')}
modalButtonText={t('cloud_billing.nudge_to_yearly.update_billing')}
modalButtonDefaultText='Update billing'
message={message}
showLinkAsButton={true}

View File

@ -55,7 +55,7 @@ export const noBillingHistory = (
</div>
);
export const freeTrial = (onUpgradeMattermostCloud: (callerInfo: string) => void, daysLeftOnTrial: number) => (
export const freeTrial = (onUpgradeMattermostCloud: (callerInfo: string) => void, daysLeftOnTrial: number, reverseTrial: boolean) => (
<div className='UpgradeMattermostCloud'>
<div className='UpgradeMattermostCloud__image'>
<UpgradeSvg
@ -104,10 +104,21 @@ export const freeTrial = (onUpgradeMattermostCloud: (callerInfo: string) => void
onClick={() => onUpgradeMattermostCloud('billing_summary_free_trial_upgrade_button')}
className='UpgradeMattermostCloud__upgradeButton'
>
<FormattedMessage
id='admin.billing.subscription.cloudTrial.subscribeButton'
defaultMessage='Upgrade Now'
/>
{
reverseTrial ? (
<FormattedMessage
id='admin.billing.subscription.cloudTrial.purchaseButton'
defaultMessage='Purchase Now'
/>
) : (
<FormattedMessage
id='admin.billing.subscription.cloudTrial.subscribeButton'
defaultMessage='Upgrade Now'
/>
)
}
</button>
</div>
);

View File

@ -5,6 +5,7 @@ import React from 'react';
import {useSelector} from 'react-redux';
import {getSubscriptionProduct, checkHadPriorTrial, getCloudSubscription} from 'mattermost-redux/selectors/entities/cloud';
import {cloudReverseTrial} from 'mattermost-redux/selectors/entities/preferences';
import {CloudProducts} from 'utils/constants';
@ -27,20 +28,23 @@ type BillingSummaryProps = {
const BillingSummary = ({isFreeTrial, daysLeftOnTrial, onUpgradeMattermostCloud}: BillingSummaryProps) => {
const subscription = useSelector(getCloudSubscription);
const product = useSelector(getSubscriptionProduct);
const reverseTrial = useSelector(cloudReverseTrial);
let body = noBillingHistory;
const isPreTrial = subscription?.is_free_trial === 'false' && subscription?.trial_end_at === 0;
const hasPriorTrial = useSelector(checkHadPriorTrial);
const showTryEnterprise = product?.sku === CloudProducts.STARTER && isPreTrial;
const showUpgradeProfessional = product?.sku === CloudProducts.STARTER && hasPriorTrial;
const isStarterPreTrial = product?.sku === CloudProducts.STARTER && isPreTrial;
const isStarterPostTrial = product?.sku === CloudProducts.STARTER && hasPriorTrial;
if (showTryEnterprise) {
if (isStarterPreTrial && reverseTrial) {
body = <UpgradeToProfessionalCard/>;
} else if (isStarterPreTrial) {
body = tryEnterpriseCard;
} else if (showUpgradeProfessional) {
} else if (isStarterPostTrial) {
body = <UpgradeToProfessionalCard/>;
} else if (isFreeTrial) {
body = freeTrial(onUpgradeMattermostCloud, daysLeftOnTrial);
body = freeTrial(onUpgradeMattermostCloud, daysLeftOnTrial, reverseTrial);
} else if (subscription?.last_invoice && !subscription?.upcoming_invoice) {
const invoice = subscription.last_invoice;
const fullCharges = invoice.line_items.filter((item) => item.type === 'full');

View File

@ -12,6 +12,10 @@
}
}
&__Icon {
padding-top: 8px;
}
&__Title {
color: var(--sys-denim-center-channel-text);
font-family: Metropolis;
@ -21,15 +25,17 @@
}
&__Usage {
color: var(--center-channel-color);
text-align: left;
&-Highlighted {
color: black;
font-weight: 700;
font-weight: bold;
}
}
&__Warning {
color: var(--center-channel-color);
text-align: left;
}

View File

@ -184,8 +184,8 @@ export default function DeleteWorkspaceModal(props: Props) {
className='DeleteWorkspaceModal'
onExited={handleClickCancel}
>
<div>
<LaptopAlertSVG/>
<div className='DeleteWorkspaceModal__Icon'>
<LaptopAlertSVG height={156}/>
</div>
<div className='DeleteWorkspaceModal__Title'>
<FormattedMessage
@ -196,7 +196,7 @@ export default function DeleteWorkspaceModal(props: Props) {
<div className='DeleteWorkspaceModal__Usage'>
<FormattedMessage
id='admin.billing.subscription.deleteWorkspaceModal.usage'
defaultMessage='As part of your paid subscription to Mattermost {product_name} you have created '
defaultMessage='As part of your subscription to Mattermost {sku} you have created '
values={{
sku: product?.name,
}}

View File

@ -31,6 +31,7 @@ describe('components/feature_discovery', () => {
hadPrevCloudTrial={false}
isSubscriptionLoaded={true}
isPaidSubscription={false}
cloudFreeDeprecated={false}
actions={{
getPrevTrialLicense: jest.fn(),
getCloudSubscription: jest.fn(),
@ -58,6 +59,7 @@ describe('components/feature_discovery', () => {
isCloudTrial={false}
hadPrevCloudTrial={false}
isPaidSubscription={false}
cloudFreeDeprecated={false}
isSubscriptionLoaded={true}
actions={{
getPrevTrialLicense: jest.fn(),
@ -87,6 +89,7 @@ describe('components/feature_discovery', () => {
isCloudTrial={false}
hadPrevCloudTrial={false}
isSubscriptionLoaded={false}
cloudFreeDeprecated={false}
isPaidSubscription={false}
actions={{
getPrevTrialLicense: jest.fn(),

View File

@ -59,6 +59,7 @@ type Props = {
isSubscriptionLoaded: boolean;
isPaidSubscription: boolean;
customer?: CloudCustomer;
cloudFreeDeprecated: boolean;
}
type State = {
@ -205,6 +206,23 @@ export default class FeatureDiscovery extends React.PureComponent<Props, State>
extraClass='btn btn-primary'
/>
);
if (this.props.cloudFreeDeprecated) {
ctaPrimaryButton = (
<button
className='btn btn-primary'
data-testid='featureDiscovery_primaryCallToAction'
onClick={() => {
trackEvent(TELEMETRY_CATEGORIES.SELF_HOSTED_ADMIN, 'click_enterprise_contact_sales_feature_discovery');
this.contactSalesFunc();
}}
>
<FormattedMessage
id='admin.ldap_feature_discovery_cloud.call_to_action.primary_sales'
defaultMessage='Contact sales'
/>
</button>
);
}
} else if (hadPrevCloudTrial) {
// if it is cloud, but this account already had a free trial, then the cta button must be Upgrade now
ctaPrimaryButton = (
@ -259,7 +277,7 @@ export default class FeatureDiscovery extends React.PureComponent<Props, State>
/>
</ExternalLink>
{gettingTrialError}
{(!this.props.isCloud || canRequestCloudFreeTrial) && <p className='trial-legal-terms'>
{((!this.props.isCloud || canRequestCloudFreeTrial) && !this.props.cloudFreeDeprecated) && <p className='trial-legal-terms'>
{canRequestCloudFreeTrial ? (
<FormattedMessage
id='admin.feature_discovery.trial-request.accept-terms.cloudFree'

View File

@ -9,6 +9,7 @@ import {getCloudSubscription} from 'mattermost-redux/actions/cloud';
import {Action, GenericAction} from 'mattermost-redux/types/actions';
import {checkHadPriorTrial, getCloudCustomer} from 'mattermost-redux/selectors/entities/cloud';
import {getLicense} from 'mattermost-redux/selectors/entities/general';
import {deprecateCloudFree} from 'mattermost-redux/selectors/entities/preferences';
import {ModalData} from 'types/actions';
import {GlobalState} from 'types/store';
@ -29,6 +30,7 @@ function mapStateToProps(state: GlobalState) {
const hasPriorTrial = checkHadPriorTrial(state);
const isCloudTrial = subscription?.is_free_trial === 'true';
const customer = getCloudCustomer(state);
const cloudFreeDeprecated = deprecateCloudFree(state);
return {
stats: state.entities.admin.analytics,
prevTrialLicense: state.entities.admin.prevTrialLicense,
@ -38,6 +40,7 @@ function mapStateToProps(state: GlobalState) {
hadPrevCloudTrial: hasPriorTrial,
isPaidSubscription: isCloud && license?.SkuShortName !== LicenseSkus.Starter && !isCloudTrial,
customer,
cloudFreeDeprecated,
};
}

View File

@ -0,0 +1,97 @@
// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved.
// See LICENSE.txt for license information.
import React from 'react';
import {screen} from '@testing-library/react';
import {renderWithIntlAndStore} from 'tests/react_testing_utils';
import * as cloudActions from 'mattermost-redux/actions/cloud';
import {CloudProducts} from 'utils/constants';
import PaymentAnnouncementBar from './';
jest.mock('mattermost-redux/actions/cloud', () => {
const original = jest.requireActual('mattermost-redux/actions/cloud');
return {
...original,
__esModule: true,
// just testing that it fired, not that the result updated or anything like that
getCloudCustomer: jest.fn(() => ({type: 'bogus'})),
};
});
describe('PaymentAnnouncementBar', () => {
const happyPathStore = {
entities: {
users: {
currentUserId: 'me',
profiles: {
me: {
roles: 'system_admin',
},
},
},
general: {
license: {
Cloud: 'true',
},
},
cloud: {
subscription: {
product_id: 'prod_something',
last_invoice: {
status: 'failed',
},
},
customer: {
payment_method: {
exp_month: 12,
exp_year: (new Date()).getFullYear() + 1,
},
},
products: {
prod_something: {
id: 'prod_something',
sku: CloudProducts.PROFESSIONAL,
},
},
},
},
views: {
announcementBar: {
announcementBarState: {
announcementBarCount: 1,
},
},
},
};
it('when most recent payment failed, shows that', () => {
renderWithIntlAndStore(<PaymentAnnouncementBar/>, happyPathStore);
screen.getByText('Your most recent payment failed');
});
it('when card is expired, shows that', () => {
const store = JSON.parse(JSON.stringify(happyPathStore));
store.entities.cloud.customer.payment_method.exp_year = (new Date()).getFullYear() - 1;
store.entities.cloud.subscription.last_invoice.status = 'success';
renderWithIntlAndStore(<PaymentAnnouncementBar/>, store);
screen.getByText('Your credit card has expired', {exact: false});
});
it('when needed, fetches, customer', () => {
const store = JSON.parse(JSON.stringify(happyPathStore));
store.entities.cloud.customer = null;
store.entities.cloud.subscription.last_invoice.status = 'success';
renderWithIntlAndStore(<PaymentAnnouncementBar/>, store);
expect(cloudActions.getCloudCustomer).toHaveBeenCalled();
});
it('when not an admin, does not fetch customer', () => {
const store = JSON.parse(JSON.stringify(happyPathStore));
store.entities.users.profiles.me.roles = '';
renderWithIntlAndStore(<PaymentAnnouncementBar/>, store);
expect(cloudActions.getCloudCustomer).not.toHaveBeenCalled();
});
});

View File

@ -1,53 +0,0 @@
// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved.
// See LICENSE.txt for license information.
import {connect} from 'react-redux';
import {bindActionCreators, Dispatch} from 'redux';
import {savePreferences} from 'mattermost-redux/actions/preferences';
import {getLicense} from 'mattermost-redux/selectors/entities/general';
import {GenericAction} from 'mattermost-redux/types/actions';
import {getCloudSubscription, getCloudCustomer} from 'mattermost-redux/actions/cloud';
import {isCurrentUserSystemAdmin} from 'mattermost-redux/selectors/entities/users';
import {
getCloudSubscription as selectCloudSubscription,
getCloudCustomer as selectCloudCustomer,
getSubscriptionProduct,
} from 'mattermost-redux/selectors/entities/cloud';
import {CloudProducts} from 'utils/constants';
import {openModal} from 'actions/views/modals';
import {GlobalState} from 'types/store';
import PaymentAnnouncementBar from './payment_announcement_bar';
function mapStateToProps(state: GlobalState) {
const subscription = selectCloudSubscription(state);
const customer = selectCloudCustomer(state);
const subscriptionProduct = getSubscriptionProduct(state);
return {
userIsAdmin: isCurrentUserSystemAdmin(state),
isCloud: getLicense(state).Cloud === 'true',
subscription,
customer,
isStarterFree: subscriptionProduct?.sku === CloudProducts.STARTER,
};
}
function mapDispatchToProps(dispatch: Dispatch<GenericAction>) {
return {
actions: bindActionCreators(
{
savePreferences,
openModal,
getCloudSubscription,
getCloudCustomer,
},
dispatch,
),
};
}
export default connect(mapStateToProps, mapDispatchToProps)(PaymentAnnouncementBar);

View File

@ -0,0 +1,89 @@
// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved.
// See LICENSE.txt for license information.
import React, {useEffect, useState} from 'react';
import {FormattedMessage} from 'react-intl';
import {useSelector, useDispatch} from 'react-redux';
import {isEmpty} from 'lodash';
import {DispatchFunc} from 'mattermost-redux/types/actions';
import {getCloudCustomer} from 'mattermost-redux/actions/cloud';
import {getLicense} from 'mattermost-redux/selectors/entities/general';
import {
getCloudSubscription as selectCloudSubscription,
getCloudCustomer as selectCloudCustomer,
getSubscriptionProduct,
} from 'mattermost-redux/selectors/entities/cloud';
import {isCurrentUserSystemAdmin} from 'mattermost-redux/selectors/entities/users';
import {getHistory} from 'utils/browser_history';
import {isCustomerCardExpired} from 'utils/cloud_utils';
import {AnnouncementBarTypes, CloudProducts, ConsolePages} from 'utils/constants';
import {t} from 'utils/i18n';
import AnnouncementBar from '../default_announcement_bar';
export default function PaymentAnnouncementBar() {
const [requestedCustomer, setRequestedCustomer] = useState(false);
const dispatch = useDispatch<DispatchFunc>();
const subscription = useSelector(selectCloudSubscription);
const customer = useSelector(selectCloudCustomer);
const isStarterFree = useSelector(getSubscriptionProduct)?.sku === CloudProducts.STARTER;
const userIsAdmin = useSelector(isCurrentUserSystemAdmin);
const isCloud = useSelector(getLicense).Cloud === 'true';
useEffect(() => {
if (isCloud && !isStarterFree && isEmpty(customer) && userIsAdmin && !requestedCustomer) {
setRequestedCustomer(true);
dispatch(getCloudCustomer());
}
},
[isCloud, isStarterFree, customer, userIsAdmin, requestedCustomer]);
const mostRecentPaymentFailed = subscription?.last_invoice?.status === 'failed';
if (
// Prevents banner flashes if the subscription hasn't been loaded yet
isEmpty(subscription) ||
isStarterFree ||
!isCloud ||
!userIsAdmin ||
isEmpty(customer) ||
(!isCustomerCardExpired(customer) && !mostRecentPaymentFailed)
) {
return null;
}
const updatePaymentInfo = () => {
getHistory().push(ConsolePages.PAYMENT_INFO);
};
let message = (
<FormattedMessage
id='admin.billing.subscription.creditCardExpired'
defaultMessage='Your credit card has expired. Update your payment information to avoid disruption.'
/>
);
if (mostRecentPaymentFailed) {
message = (
<FormattedMessage
id='admin.billing.subscription.mostRecentPaymentFailed'
defaultMessage='Your most recent payment failed'
/>
);
}
return (
<AnnouncementBar
type={AnnouncementBarTypes.CRITICAL}
showCloseButton={false}
onButtonClick={updatePaymentInfo}
modalButtonText={t('admin.billing.subscription.updatePaymentInfo')}
modalButtonDefaultText={'Update payment info'}
message={message}
showLinkAsButton={true}
isTallBanner={true}
/>
);
}

View File

@ -1,96 +0,0 @@
// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved.
// See LICENSE.txt for license information.
import React from 'react';
import {isEmpty} from 'lodash';
import {CloudCustomer, Subscription} from '@mattermost/types/cloud';
import {getHistory} from 'utils/browser_history';
import {isCustomerCardExpired} from 'utils/cloud_utils';
import {AnnouncementBarTypes} from 'utils/constants';
import {t} from 'utils/i18n';
import AnnouncementBar from '../default_announcement_bar';
type Props = {
userIsAdmin: boolean;
isCloud: boolean;
subscription?: Subscription;
customer?: CloudCustomer;
isStarterFree: boolean;
actions: {
getCloudSubscription: () => void;
getCloudCustomer: () => void;
};
};
class PaymentAnnouncementBar extends React.PureComponent<Props> {
async componentDidMount() {
if (isEmpty(this.props.customer)) {
await this.props.actions.getCloudCustomer();
}
}
isMostRecentPaymentFailed = () => {
return this.props.subscription?.last_invoice?.status === 'failed';
};
shouldShowBanner = () => {
const {userIsAdmin, isCloud, subscription} = this.props;
// Prevents banner flashes if the subscription hasn't been loaded yet
if (subscription === null) {
return false;
}
if (this.props.isStarterFree) {
return false;
}
if (!isCloud) {
return false;
}
if (!userIsAdmin) {
return false;
}
if (!isCustomerCardExpired(this.props.customer) && !this.isMostRecentPaymentFailed()) {
return false;
}
return true;
};
updatePaymentInfo = () => {
getHistory().push('/admin_console/billing/payment_info');
};
render() {
if (isEmpty(this.props.customer) || isEmpty(this.props.subscription)) {
return null;
}
if (!this.shouldShowBanner()) {
return null;
}
return (
<AnnouncementBar
type={AnnouncementBarTypes.CRITICAL}
showCloseButton={false}
onButtonClick={this.updatePaymentInfo}
modalButtonText={t('admin.billing.subscription.updatePaymentInfo')}
modalButtonDefaultText={'Update payment info'}
message={this.isMostRecentPaymentFailed() ? t('admin.billing.subscription.mostRecentPaymentFailed') : t('admin.billing.subscription.creditCardExpired')}
showLinkAsButton={true}
isTallBanner={true}
/>
);
}
}
export default PaymentAnnouncementBar;

View File

@ -6,6 +6,7 @@ import {useIntl} from 'react-intl';
import {useSelector, useDispatch} from 'react-redux';
import {useLocation, useHistory} from 'react-router-dom';
import {redirectUserToDefaultTeam} from 'actions/global_actions';
import {trackEvent} from 'actions/telemetry_actions.jsx';
import LaptopAlertSVG from 'components/common/svg_images_components/laptop_alert_svg';
@ -14,6 +15,7 @@ import LoadingScreen from 'components/loading_screen';
import {clearErrors, logError} from 'mattermost-redux/actions/errors';
import {verifyUserEmail, getMe} from 'mattermost-redux/actions/users';
import {getIsOnboardingFlowEnabled} from 'mattermost-redux/selectors/entities/preferences';
import {getCurrentUserId} from 'mattermost-redux/selectors/entities/users';
import {DispatchFunc} from 'mattermost-redux/types/actions';
@ -38,6 +40,7 @@ const DoVerifyEmail = () => {
const token = params.get('token') ?? '';
const loggedIn = Boolean(useSelector(getCurrentUserId));
const onboardingFlowEnabled = useSelector(getIsOnboardingFlowEnabled);
const [verifyStatus, setVerifyStatus] = useState(VerifyStatus.PENDING);
const [serverError, setServerError] = useState('');
@ -49,11 +52,15 @@ const DoVerifyEmail = () => {
const handleRedirect = () => {
if (loggedIn) {
// need info about whether admin or not,
// and whether admin has already completed
// first time onboarding. Instead of fetching and orchestrating that here,
// let the default root component handle it.
history.push('/');
if (onboardingFlowEnabled) {
// need info about whether admin or not,
// and whether admin has already completed
// first time onboarding. Instead of fetching and orchestrating that here,
// let the default root component handle it.
history.push('/');
return;
}
redirectUserToDefaultTeam();
return;
}

View File

@ -9,7 +9,7 @@ Object {
aria-controls="CENTER_dropdown_post_id_1"
aria-expanded="false"
aria-haspopup="true"
aria-label="Actions"
aria-label="more"
class="post-menu__item"
data-testid="PostDotMenu-Button-post_id_1"
id="CENTER_button_post_id_1"
@ -34,7 +34,7 @@ Object {
aria-controls="CENTER_dropdown_post_id_1"
aria-expanded="false"
aria-haspopup="true"
aria-label="Actions"
aria-label="more"
class="post-menu__item"
data-testid="PostDotMenu-Button-post_id_1"
id="CENTER_button_post_id_1"
@ -121,7 +121,7 @@ exports[`components/dot_menu/DotMenu should match snapshot, on Center 1`] = `
}
menuButton={
Object {
"aria-label": "Actions",
"aria-label": "more",
"children": <DotsHorizontalIcon
size={16}
/>,

View File

@ -497,7 +497,7 @@ export class DotMenuClass extends React.PureComponent<Props, State> {
class: classNames('post-menu__item', {
'post-menu__item--active': this.props.isMenuOpen,
}),
'aria-label': formatMessage({id: 'post_info.dot_menu.tooltip.actions', defaultMessage: 'Actions'}),
'aria-label': formatMessage({id: 'post_info.dot_menu.tooltip.more', defaultMessage: 'More'}).toLowerCase(),
children: <DotsHorizontalIcon size={16}/>,
}}
menu={{

View File

@ -13,6 +13,7 @@ import {checkHadPriorTrial} from 'mattermost-redux/selectors/entities/cloud';
import {isCurrentUserSystemAdmin} from 'mattermost-redux/selectors/entities/users';
import {getLicense} from 'mattermost-redux/selectors/entities/general';
import {getPrevTrialLicense} from 'mattermost-redux/actions/admin';
import {deprecateCloudFree} from 'mattermost-redux/selectors/entities/preferences';
import CloudStartTrialButton from 'components/cloud_start_trial/cloud_start_trial_btn';
import StartTrialBtn from 'components/learn_more_trial_modal/start_trial_btn';
@ -59,6 +60,7 @@ const FeatureRestrictedModal = ({
dispatch(getPrevTrialLicense());
}, []);
const cloudFreeDeprecated = useSelector(deprecateCloudFree);
const hasCloudPriorTrial = useSelector(checkHadPriorTrial);
const prevTrialLicense = useSelector((state: GlobalState) => state.entities.admin.prevTrialLicense);
const hasSelfHostedPriorTrial = prevTrialLicense.IsLicensed === 'true';
@ -100,7 +102,7 @@ const FeatureRestrictedModal = ({
const getTitle = () => {
if (isSystemAdmin) {
return hasPriorTrial ? titleAdminPostTrial : titleAdminPreTrial;
return (hasPriorTrial || cloudFreeDeprecated) ? titleAdminPostTrial : titleAdminPreTrial;
}
return titleEndUser;
@ -108,13 +110,13 @@ const FeatureRestrictedModal = ({
const getMessage = () => {
if (isSystemAdmin) {
return hasPriorTrial ? messageAdminPostTrial : messageAdminPreTrial;
return (hasPriorTrial || cloudFreeDeprecated) ? messageAdminPostTrial : messageAdminPreTrial;
}
return messageEndUser;
};
const showStartTrial = isSystemAdmin && !hasPriorTrial;
const showStartTrial = isSystemAdmin && !hasPriorTrial && !cloudFreeDeprecated;
// define what is the secondary button text and action, by default will be the View Plan button
let secondaryBtnMsg = formatMessage({id: 'feature_restricted_modal.button.plans', defaultMessage: 'View plans'});

View File

@ -8,6 +8,7 @@ import {withRouter} from 'react-router-dom';
import {getConfig} from 'mattermost-redux/selectors/entities/general';
import {GenericAction} from 'mattermost-redux/types/actions';
import {getCurrentRelativeTeamUrl} from 'mattermost-redux/selectors/entities/teams';
import {getIsOnboardingFlowEnabled} from 'mattermost-redux/selectors/entities/preferences';
import {isFirstAdmin} from 'mattermost-redux/selectors/entities/users';
import {getUserGuideDropdownPluginMenuItems} from 'selectors/plugins';
@ -31,6 +32,7 @@ function mapStateToProps(state: GlobalState) {
teamUrl: getCurrentRelativeTeamUrl(state),
pluginMenuItems: getUserGuideDropdownPluginMenuItems(state),
isFirstAdmin: isFirstAdmin(state),
onboardingFlowEnabled: getIsOnboardingFlowEnabled(state),
};
}

View File

@ -34,6 +34,7 @@ describe('components/channel_header/components/UserGuideDropdown', () => {
},
pluginMenuItems: [],
isFirstAdmin: false,
onboardingFlowEnabled: false,
};
test('should match snapshot', () => {

View File

@ -14,6 +14,7 @@ import {isCurrentUserSystemAdmin} from 'mattermost-redux/selectors/entities/user
import {getSubscriptionProduct, checkHadPriorTrial} from 'mattermost-redux/selectors/entities/cloud';
import {DispatchFunc} from 'mattermost-redux/types/actions';
import {getPrevTrialLicense} from 'mattermost-redux/actions/admin';
import {deprecateCloudFree} from 'mattermost-redux/selectors/entities/preferences';
import {closeModal, openModal} from 'actions/views/modals';
@ -43,6 +44,7 @@ export type Props = {
export default function InviteAs(props: Props) {
const {formatMessage} = useIntl();
const license = useSelector(getLicense);
const cloudFreeDeprecated = useSelector(deprecateCloudFree);
const dispatch = useDispatch<DispatchFunc>();
useEffect(() => {
@ -85,7 +87,7 @@ export default function InviteAs(props: Props) {
if (isFreeTrial) {
ctaExtraContentMsg = formatMessage({id: 'free.professional_feature.professional', defaultMessage: 'Professional feature'});
} else {
ctaExtraContentMsg = hasPriorTrial ? formatMessage({id: 'free.professional_feature.upgrade', defaultMessage: 'Upgrade'}) : formatMessage({id: 'free.professional_feature.try_free', defaultMessage: 'Professional feature- try it out free'});
ctaExtraContentMsg = (hasPriorTrial || cloudFreeDeprecated) ? formatMessage({id: 'free.professional_feature.upgrade', defaultMessage: 'Upgrade'}) : formatMessage({id: 'free.professional_feature.try_free', defaultMessage: 'Professional feature- try it out free'});
}
const restrictedIndicator = (

View File

@ -33,6 +33,12 @@ describe('components/learn_more_trial_modal/learn_more_trial_modal', () => {
entities: {
users: {
currentUserId: 'current_user_id',
profiles: {
current_user_id: {
id: 'current_user_id',
roles: '',
},
},
},
admin: {
analytics: {

View File

@ -2,7 +2,7 @@
// See LICENSE.txt for license information.
import React, {useCallback, useEffect, useMemo, useState} from 'react';
import {useIntl} from 'react-intl';
import {FormattedMessage, useIntl} from 'react-intl';
import {useSelector, useDispatch} from 'react-redux';
import {trackEvent} from 'actions/telemetry_actions';
@ -16,10 +16,13 @@ import MonitorImacLikeSVG from 'components/common/svg_images_components/monitor_
import SystemRolesSVG from 'components/admin_console/feature_discovery/features/images/system_roles_svg';
import CloudStartTrialButton from 'components/cloud_start_trial/cloud_start_trial_btn';
import {BtnStyle} from 'components/common/carousel/carousel_button';
import useOpenSalesLink from 'components/common/hooks/useOpenSalesLink';
import ExternalLink from 'components/external_link';
import {closeModal} from 'actions/views/modals';
import {DispatchFunc} from 'mattermost-redux/types/actions';
import {getLicense} from 'mattermost-redux/selectors/entities/general';
import {deprecateCloudFree} from 'mattermost-redux/selectors/entities/preferences';
import StartTrialBtn from './start_trial_btn';
@ -43,8 +46,11 @@ const LearnMoreTrialModal = (
const [embargoed, setEmbargoed] = useState(false);
const dispatch = useDispatch<DispatchFunc>();
const [, salesLink] = useOpenSalesLink();
// Cloud conditions
const license = useSelector(getLicense);
const cloudFreeDeprecated = useSelector(deprecateCloudFree);
const isCloud = license?.Cloud === 'true';
const handleEmbargoError = useCallback(() => {
@ -78,6 +84,20 @@ const LearnMoreTrialModal = (
extraClass={'btn btn-primary start-cloud-trial-btn'}
/>
);
if (cloudFreeDeprecated) {
startTrialBtn = (
<ExternalLink
location='learn_more_trial_modal'
href={salesLink}
className='btn btn-primary start-cloud-trial-btn'
>
<FormattedMessage
id='learn_more_trial_modal.contact_sales'
defaultMessage='Contact sales'
/>
</ExternalLink>
);
}
}
const handleOnClose = useCallback(() => {

View File

@ -2,10 +2,11 @@
// See LICENSE.txt for license information.
import React from 'react';
import {useSelector} from 'react-redux';
import {FormattedMessage} from 'react-intl';
import TrialBenefitsModalStepMore from 'components/trial_benefits_modal/trial_benefits_modal_step_more';
import {deprecateCloudFree} from 'mattermost-redux/selectors/entities/preferences';
import './learn_more_trial_modal_step.scss';
import {AboutLinks, LicenseLinks} from 'utils/constants';
@ -35,6 +36,7 @@ const LearnMoreTrialModalStep = (
buttonLabel,
handleOnClose,
}: LearnMoreTrialModalStepProps) => {
const cloudFreeDeprecated = useSelector(deprecateCloudFree);
return (
<div
id={`learnMoreTrialModalStep-${id}`}
@ -59,32 +61,36 @@ const LearnMoreTrialModalStep = (
telemetryId={'learn_more_trial_modal'}
/>
)}
<div className='disclaimer'>
<span>
<FormattedMessage
id='start_trial.modal.disclaimer'
defaultMessage='By clicking “Start trial”, I agree to the <linkEvaluation>Mattermost Software and Services License Agreement</linkEvaluation>, <linkPrivacy>privacy policy</linkPrivacy> and receiving product emails.'
values={{
linkEvaluation: (msg: React.ReactNode) => (
<ExternalLink
href={LicenseLinks.SOFTWARE_SERVICES_LICENSE_AGREEMENT}
location='learn_more_trial_modal_step'
>
{msg}
</ExternalLink>
),
linkPrivacy: (msg: React.ReactNode) => (
<ExternalLink
href={AboutLinks.PRIVACY_POLICY}
location='learn_more_trial_modal_step'
>
{msg}
</ExternalLink>
),
}}
/>
</span>
</div>
{
cloudFreeDeprecated ? '' : (
<div className='disclaimer'>
<span>
<FormattedMessage
id='start_trial.modal.disclaimer'
defaultMessage='By clicking “Start trial”, I agree to the <linkEvaluation>Mattermost Software and Services License Agreement</linkEvaluation>, <linkPrivacy>privacy policy</linkPrivacy> and receiving product emails.'
values={{
linkEvaluation: (msg: React.ReactNode) => (
<ExternalLink
href={LicenseLinks.SOFTWARE_SERVICES_LICENSE_AGREEMENT}
location='learn_more_trial_modal_step'
>
{msg}
</ExternalLink>
),
linkPrivacy: (msg: React.ReactNode) => (
<ExternalLink
href={AboutLinks.PRIVACY_POLICY}
location='learn_more_trial_modal_step'
>
{msg}
</ExternalLink>
),
}}
/>
</span>
</div>
)
}
{bottomLeftMessage && (
<div className='bottom-text-left-message'>
{bottomLeftMessage}

Some files were not shown because too many files have changed in this diff Show More