mirror of
https://github.com/mattermost/mattermost.git
synced 2025-02-25 18:55:24 -06:00
Fix MM56373 (#25946)
* Fix MM56373 * Fix typo --------- Co-authored-by: Mattermost Build <build@mattermost.com>
This commit is contained in:
parent
b668c9b048
commit
2415438d88
34
webapp/channels/src/utils/markdown/renderer.test.tsx
Normal file
34
webapp/channels/src/utils/markdown/renderer.test.tsx
Normal file
@ -0,0 +1,34 @@
|
||||
// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved.
|
||||
// See LICENSE.txt for license information.
|
||||
|
||||
import Renderer from './renderer';
|
||||
|
||||
describe('code', () => {
|
||||
test('too many tokens result in no search rendering', () => {
|
||||
const renderer = new Renderer({}, {searchPatterns: [{pattern: new RegExp('\\b()(foo)\\b', 'gi'), term: 'foo'}]});
|
||||
let originalString = 'foo '.repeat(501);
|
||||
let result = renderer.code(originalString, '');
|
||||
|
||||
expect(result.includes('post-code__search-highlighting')).toBeTruthy();
|
||||
|
||||
originalString = originalString.repeat(2);
|
||||
result = renderer.code(originalString, '');
|
||||
|
||||
expect(result.includes('post-code__search-highlighting')).toBeFalsy();
|
||||
});
|
||||
});
|
||||
|
||||
describe('codespan', () => {
|
||||
test('too many tokens result in no search rendering', () => {
|
||||
const renderer = new Renderer({}, {searchPatterns: [{pattern: new RegExp('\\b()(foo)\\b', 'gi'), term: 'foo'}]});
|
||||
let originalString = 'foo '.repeat(501);
|
||||
let result = renderer.codespan(originalString);
|
||||
|
||||
expect(result.includes('search-highlight')).toBeTruthy();
|
||||
|
||||
originalString = originalString.repeat(2);
|
||||
result = renderer.codespan(originalString);
|
||||
|
||||
expect(result.includes('search-highlight')).toBeFalsy();
|
||||
});
|
||||
});
|
@ -40,23 +40,29 @@ export default class Renderer extends marked.Renderer {
|
||||
let searchedContent = '';
|
||||
|
||||
if (this.formattingOptions.searchPatterns) {
|
||||
const tokens = new Map();
|
||||
try {
|
||||
const tokens = new TextFormatting.Tokens();
|
||||
|
||||
let searched = TextFormatting.sanitizeHtml(code);
|
||||
searched = TextFormatting.highlightSearchTerms(
|
||||
searched,
|
||||
tokens,
|
||||
this.formattingOptions.searchPatterns,
|
||||
);
|
||||
|
||||
if (tokens.size > 0) {
|
||||
searched = TextFormatting.replaceTokens(searched, tokens);
|
||||
|
||||
searchedContent = (
|
||||
'<div class="post-code__search-highlighting">' +
|
||||
searched +
|
||||
'</div>'
|
||||
let searched = TextFormatting.sanitizeHtml(code);
|
||||
searched = TextFormatting.highlightSearchTerms(
|
||||
searched,
|
||||
tokens,
|
||||
this.formattingOptions.searchPatterns,
|
||||
);
|
||||
|
||||
if (tokens.size > 0) {
|
||||
searched = TextFormatting.replaceTokens(searched, tokens);
|
||||
|
||||
searchedContent = (
|
||||
'<div class="post-code__search-highlighting">' +
|
||||
searched +
|
||||
'</div>'
|
||||
);
|
||||
}
|
||||
} catch (error) {
|
||||
if (!TextFormatting.isFormatTokenLimitError(error)) {
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@ -69,13 +75,20 @@ export default class Renderer extends marked.Renderer {
|
||||
let output = text;
|
||||
|
||||
if (this.formattingOptions.searchPatterns) {
|
||||
const tokens = new Map();
|
||||
output = TextFormatting.highlightSearchTerms(
|
||||
output,
|
||||
tokens,
|
||||
this.formattingOptions.searchPatterns,
|
||||
);
|
||||
output = TextFormatting.replaceTokens(output, tokens);
|
||||
try {
|
||||
const tokens = new TextFormatting.Tokens();
|
||||
output = TextFormatting.highlightSearchTerms(
|
||||
output,
|
||||
tokens,
|
||||
this.formattingOptions.searchPatterns,
|
||||
);
|
||||
output = TextFormatting.replaceTokens(output, tokens);
|
||||
} catch (error) {
|
||||
if (!TextFormatting.isFormatTokenLimitError(error)) {
|
||||
throw error;
|
||||
}
|
||||
output = text;
|
||||
}
|
||||
}
|
||||
|
||||
return (
|
||||
|
@ -17,11 +17,64 @@ import {
|
||||
highlightWithoutNotificationKeywords,
|
||||
parseSearchTerms,
|
||||
autolinkChannelMentions,
|
||||
Tokens,
|
||||
isFormatTokenLimitError,
|
||||
doFormatText,
|
||||
replaceTokens,
|
||||
} from 'utils/text_formatting';
|
||||
import type {ChannelNamesMap} from 'utils/text_formatting';
|
||||
|
||||
const emptyEmojiMap = new EmojiMap(new Map());
|
||||
|
||||
describe('tokens', () => {
|
||||
test('should throw an error when too many elements are added to the map', () => {
|
||||
const tokens = new Tokens();
|
||||
const testValue = {value: 'test', originalText: 'test'};
|
||||
for (let i = 0; i < 999; i++) {
|
||||
tokens.set(`${i}`, testValue);
|
||||
}
|
||||
expect(() => tokens.set('999', testValue)).not.toThrow();
|
||||
expect(() => tokens.set('0', testValue)).not.toThrow();
|
||||
expect(() => tokens.set('1000', testValue)).toThrow('maximum number of tokens reached');
|
||||
});
|
||||
});
|
||||
|
||||
describe('isFormatTokenLimitError', () => {
|
||||
const ttcc = [
|
||||
{
|
||||
name: 'undefined',
|
||||
error: undefined,
|
||||
expected: false,
|
||||
},
|
||||
{
|
||||
name: 'string',
|
||||
error: 'some error',
|
||||
expected: false,
|
||||
},
|
||||
{
|
||||
name: 'object',
|
||||
error: {someProperty: 'foo'},
|
||||
expected: false,
|
||||
},
|
||||
{
|
||||
name: 'other error',
|
||||
error: new Error('foo'),
|
||||
expected: false,
|
||||
},
|
||||
{
|
||||
name: 'correct error',
|
||||
error: new Error('maximum number of tokens reached'),
|
||||
expected: true,
|
||||
},
|
||||
];
|
||||
|
||||
for (const tc of ttcc) {
|
||||
test(`should return ${tc.expected} when the error is ${tc.name}`, () => {
|
||||
expect(isFormatTokenLimitError(tc.error)).toEqual(tc.expected);
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
describe('formatText', () => {
|
||||
test('jumbo emoji should be able to handle up to 3 spaces before the emoji character', () => {
|
||||
const emoji = ':)';
|
||||
@ -492,3 +545,34 @@ describe('parseSearchTerms', () => {
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
describe('doFormatText', () => {
|
||||
test('too many tokens results in returning the same input string', () => {
|
||||
let originalText = '@sysadmin '.repeat(501);
|
||||
let result = doFormatText(originalText, {atMentions: true}, emptyEmojiMap);
|
||||
expect(result).not.toEqual(originalText);
|
||||
|
||||
originalText = originalText.repeat(2);
|
||||
result = doFormatText(originalText, {atMentions: true}, emptyEmojiMap);
|
||||
expect(result).toEqual(originalText);
|
||||
});
|
||||
});
|
||||
|
||||
describe('replaceTokens', () => {
|
||||
describe('properly escape especial replace patterns', () => {
|
||||
const ttcc = [
|
||||
'foo$&foo',
|
||||
'foo$`foo',
|
||||
'foo$\'foo',
|
||||
];
|
||||
|
||||
for (const tc of ttcc) {
|
||||
test(tc, () => {
|
||||
const tokens = new Tokens([['$alias$', {originalText: 'foo', value: tc}]]);
|
||||
|
||||
const result = replaceTokens('$alias$', tokens);
|
||||
expect(result).toEqual(tc);
|
||||
});
|
||||
}
|
||||
});
|
||||
});
|
||||
|
@ -20,6 +20,13 @@ const AT_MENTION_PATTERN = /(?:\B|\b_+)@([a-z0-9.\-_]+)/gi;
|
||||
const UNICODE_EMOJI_REGEX = emojiRegex();
|
||||
const htmlEmojiPattern = /^<p>\s*(?:<img class="emoticon"[^>]*>|<span data-emoticon[^>]*>[^<]*<\/span>\s*|<span class="emoticon emoticon--unicode">[^<]*<\/span>\s*)+<\/p>$/;
|
||||
|
||||
const FORMAT_TOKEN_LIMIT = 1000;
|
||||
const FORMAT_TOKEN_LIMIT_ERROR = 'maximum number of tokens reached';
|
||||
|
||||
export function isFormatTokenLimitError(error: unknown) {
|
||||
return Boolean(error && typeof error === 'object' && 'message' in error && error.message === FORMAT_TOKEN_LIMIT_ERROR);
|
||||
}
|
||||
|
||||
// Performs formatting of user posts including highlighting mentions and search terms and converting urls, hashtags,
|
||||
// @mentions and ~channels to links by taking a user's message and returning a string of formatted html. Also takes
|
||||
// a number of options as part of the second parameter:
|
||||
@ -30,11 +37,6 @@ export type ChannelNamesMap = {
|
||||
} | string;
|
||||
};
|
||||
|
||||
export type Tokens = Map<
|
||||
string,
|
||||
{ value: string; originalText: string; hashtag?: string }
|
||||
>;
|
||||
|
||||
export type SearchPattern = {
|
||||
pattern: RegExp;
|
||||
term: string;
|
||||
@ -202,6 +204,16 @@ export interface TextFormattingOptionsBase {
|
||||
|
||||
export type TextFormattingOptions = Partial<TextFormattingOptionsBase>;
|
||||
|
||||
export class Tokens extends Map<string, {value: string; originalText: string; hashtag?: string}> {
|
||||
set(key: string, value: {value: string; originalText: string; hashtag?: string}) {
|
||||
super.set(key, value);
|
||||
if (this.size > FORMAT_TOKEN_LIMIT) {
|
||||
throw new Error(FORMAT_TOKEN_LIMIT_ERROR);
|
||||
}
|
||||
return this;
|
||||
}
|
||||
}
|
||||
|
||||
const DEFAULT_OPTIONS: TextFormattingOptions = {
|
||||
mentionHighlight: true,
|
||||
disableGroupHighlight: false,
|
||||
@ -328,57 +340,64 @@ export function formatText(
|
||||
export function doFormatText(text: string, options: TextFormattingOptions, emojiMap: EmojiMap): string {
|
||||
let output = text;
|
||||
|
||||
const tokens = new Map();
|
||||
const tokens = new Tokens();
|
||||
|
||||
// replace important words and phrases with tokens
|
||||
if (options.atMentions) {
|
||||
output = autolinkAtMentions(output, tokens);
|
||||
try {
|
||||
// replace important words and phrases with tokens
|
||||
if (options.atMentions) {
|
||||
output = autolinkAtMentions(output, tokens);
|
||||
}
|
||||
|
||||
if (options.atSumOfMembersMentions) {
|
||||
output = autoLinkSumOfMembersMentions(output, tokens);
|
||||
}
|
||||
|
||||
if (options.atPlanMentions) {
|
||||
output = autoPlanMentions(output, tokens);
|
||||
}
|
||||
|
||||
if (options.channelNamesMap) {
|
||||
output = autolinkChannelMentions(
|
||||
output,
|
||||
tokens,
|
||||
options.channelNamesMap,
|
||||
options.team,
|
||||
);
|
||||
}
|
||||
|
||||
output = autolinkEmails(output, tokens);
|
||||
output = autolinkHashtags(output, tokens, options.minimumHashtagLength);
|
||||
|
||||
if (!('emoticons' in options) || options.emoticons) {
|
||||
output = Emoticons.handleEmoticons(output, tokens);
|
||||
}
|
||||
|
||||
if (options.searchPatterns) {
|
||||
output = highlightSearchTerms(output, tokens, options.searchPatterns);
|
||||
}
|
||||
|
||||
if (!('mentionHighlight' in options) || options.mentionHighlight) {
|
||||
output = highlightCurrentMentions(output, tokens, options.mentionKeys);
|
||||
}
|
||||
|
||||
if (options.highlightKeys && options.highlightKeys.length > 0) {
|
||||
output = highlightWithoutNotificationKeywords(output, tokens, options.highlightKeys);
|
||||
}
|
||||
|
||||
if (!('emoticons' in options) || options.emoticons) {
|
||||
output = handleUnicodeEmoji(output, emojiMap, UNICODE_EMOJI_REGEX);
|
||||
}
|
||||
|
||||
// reinsert tokens with formatted versions of the important words and phrases
|
||||
output = replaceTokens(output, tokens) || text;
|
||||
return output;
|
||||
} catch (error) {
|
||||
if (isFormatTokenLimitError(error)) {
|
||||
return text;
|
||||
}
|
||||
|
||||
throw error;
|
||||
}
|
||||
|
||||
if (options.atSumOfMembersMentions) {
|
||||
output = autoLinkSumOfMembersMentions(output, tokens);
|
||||
}
|
||||
|
||||
if (options.atPlanMentions) {
|
||||
output = autoPlanMentions(output, tokens);
|
||||
}
|
||||
|
||||
if (options.channelNamesMap) {
|
||||
output = autolinkChannelMentions(
|
||||
output,
|
||||
tokens,
|
||||
options.channelNamesMap,
|
||||
options.team,
|
||||
);
|
||||
}
|
||||
|
||||
output = autolinkEmails(output, tokens);
|
||||
output = autolinkHashtags(output, tokens, options.minimumHashtagLength);
|
||||
|
||||
if (!('emoticons' in options) || options.emoticons) {
|
||||
output = Emoticons.handleEmoticons(output, tokens);
|
||||
}
|
||||
|
||||
if (options.searchPatterns) {
|
||||
output = highlightSearchTerms(output, tokens, options.searchPatterns);
|
||||
}
|
||||
|
||||
if (!('mentionHighlight' in options) || options.mentionHighlight) {
|
||||
output = highlightCurrentMentions(output, tokens, options.mentionKeys);
|
||||
}
|
||||
|
||||
if (options.highlightKeys && options.highlightKeys.length > 0) {
|
||||
output = highlightWithoutNotificationKeywords(output, tokens, options.highlightKeys);
|
||||
}
|
||||
|
||||
if (!('emoticons' in options) || options.emoticons) {
|
||||
output = handleUnicodeEmoji(output, emojiMap, UNICODE_EMOJI_REGEX);
|
||||
}
|
||||
|
||||
// reinsert tokens with formatted versions of the important words and phrases
|
||||
output = replaceTokens(output, tokens);
|
||||
|
||||
return output;
|
||||
}
|
||||
|
||||
export function sanitizeHtml(text: string): string {
|
||||
@ -618,6 +637,10 @@ export function escapeRegex(text?: string): string {
|
||||
return text?.replace(/[-/\\^$*+?.()|[\]{}]/g, '\\$&') || '';
|
||||
}
|
||||
|
||||
export function escapeReplaceSpecialPatterns(text?: string): string {
|
||||
return text?.replace(/[$]/g, '$$$$') || '';
|
||||
}
|
||||
|
||||
const htmlEntities = {
|
||||
'&': '&',
|
||||
'<': '<',
|
||||
@ -1022,7 +1045,7 @@ export function replaceTokens(text: string, tokens: Tokens): string {
|
||||
for (let i = aliases.length - 1; i >= 0; i--) {
|
||||
const alias = aliases[i];
|
||||
const token = tokens.get(alias);
|
||||
output = output.replace(alias, token ? token.value : '');
|
||||
output = output.replace(alias, escapeReplaceSpecialPatterns(token?.value || ''));
|
||||
}
|
||||
|
||||
return output;
|
||||
|
Loading…
Reference in New Issue
Block a user