Feature: CommonMark support

This adds the markdown.it engine to Discourse.
https://github.com/markdown-it/markdown-it

As the migration is going to take a while the new engine is default
disabled. To enable it you must change the hidden site setting:
enable_experimental_markdown_it.

This commit is a squash of many other commits, it also includes some
improvements to autospec (ability to run plugins), and a dev dependency
on the og gem for html normalization.
This commit is contained in:
Sam
2017-06-08 18:02:30 -04:00
parent 6048ca2b7d
commit 234694b50f
57 changed files with 11146 additions and 96 deletions

View File

@@ -6,7 +6,7 @@ import { categoryHashtagTriggerRule } from 'discourse/lib/category-hashtags';
import { TAG_HASHTAG_POSTFIX } from 'discourse/lib/tag-hashtags';
import { search as searchCategoryTag } from 'discourse/lib/category-tag-search';
import { SEPARATOR } from 'discourse/lib/category-hashtags';
import { cook } from 'discourse/lib/text';
import { cookAsync } from 'discourse/lib/text';
import { translations } from 'pretty-text/emoji/data';
import { emojiSearch, isSkinTonableEmoji } from 'pretty-text/emoji';
import { emojiUrlFor } from 'discourse/lib/text';
@@ -279,14 +279,14 @@ export default Ember.Component.extend({
const value = this.get('value');
const markdownOptions = this.get('markdownOptions') || {};
markdownOptions.siteSettings = this.siteSettings;
this.set('preview', cook(value));
Ember.run.scheduleOnce('afterRender', () => {
if (this._state !== "inDOM") { return; }
const $preview = this.$('.d-editor-preview');
if ($preview.length === 0) return;
this.sendAction('previewUpdated', $preview);
cookAsync(value, markdownOptions).then(cooked => {
this.set('preview', cooked);
Ember.run.scheduleOnce('afterRender', () => {
if (this._state !== "inDOM") { return; }
const $preview = this.$('.d-editor-preview');
if ($preview.length === 0) return;
this.sendAction('previewUpdated', $preview);
});
});
},

View File

@@ -2,22 +2,37 @@ import { default as PrettyText, buildOptions } from 'pretty-text/pretty-text';
import { performEmojiUnescape, buildEmojiUrl } from 'pretty-text/emoji';
import WhiteLister from 'pretty-text/white-lister';
import { sanitize as textSanitize } from 'pretty-text/sanitizer';
import loadScript from 'discourse/lib/load-script';
function getOpts() {
function getOpts(opts) {
const siteSettings = Discourse.__container__.lookup('site-settings:main');
return buildOptions({
opts = _.merge({
getURL: Discourse.getURLWithCDN,
currentUser: Discourse.__container__.lookup('current-user:main'),
siteSettings
});
}, opts);
return buildOptions(opts);
}
// Use this to easily create a pretty text instance with proper options
export function cook(text) {
return new Handlebars.SafeString(new PrettyText(getOpts()).cook(text));
export function cook(text, options) {
return new Handlebars.SafeString(new PrettyText(getOpts(options)).cook(text));
}
// everything should eventually move to async API and this should be renamed
// cook
export function cookAsync(text, options) {
if (Discourse.MarkdownItURL) {
return loadScript(Discourse.MarkdownItURL)
.then(()=>cook(text, options));
} else {
return Ember.RSVP.Promise.resolve(cook(text));
}
}
export function sanitize(text) {
return textSanitize(text, new WhiteLister(getOpts()));
}

View File

@@ -0,0 +1,11 @@
//= require markdown-it.js
//= require ./pretty-text/engines/markdown-it/helpers
//= require ./pretty-text/engines/markdown-it/mentions
//= require ./pretty-text/engines/markdown-it/quotes
//= require ./pretty-text/engines/markdown-it/emoji
//= require ./pretty-text/engines/markdown-it/onebox
//= require ./pretty-text/engines/markdown-it/bbcode-block
//= require ./pretty-text/engines/markdown-it/bbcode-inline
//= require ./pretty-text/engines/markdown-it/code
//= require ./pretty-text/engines/markdown-it/category-hashtag
//= require ./pretty-text/engines/markdown-it/censored

View File

@@ -4,6 +4,7 @@
//= require ./pretty-text/emoji/data
//= require ./pretty-text/emoji
//= require ./pretty-text/engines/discourse-markdown
//= require ./pretty-text/engines/discourse-markdown-it
//= require_tree ./pretty-text/engines/discourse-markdown
//= require xss.min
//= require better_markdown.js

View File

@@ -2,9 +2,11 @@ function escapeRegexp(text) {
return text.replace(/[-/\\^$*+?.()|[\]{}]/g, '\\$&');
}
export function censor(text, censoredWords, censoredPattern) {
let patterns = [],
originalText = text;
export function censorFn(censoredWords, censoredPattern, replacementLetter) {
let patterns = [];
replacementLetter = replacementLetter || "■";
if (censoredWords && censoredWords.length) {
patterns = censoredWords.split("|").map(t => `(${escapeRegexp(t)})`);
@@ -21,19 +23,35 @@ export function censor(text, censoredWords, censoredPattern) {
censorRegexp = new RegExp("(\\b(?:" + patterns.join("|") + ")\\b)(?![^\\(]*\\))", "ig");
if (censorRegexp) {
let m = censorRegexp.exec(text);
while (m && m[0]) {
if (m[0].length > originalText.length) { return originalText; } // regex is dangerous
const replacement = new Array(m[0].length+1).join('■');
text = text.replace(new RegExp(`(\\b${escapeRegexp(m[0])}\\b)(?![^\\(]*\\))`, "ig"), replacement);
m = censorRegexp.exec(text);
}
return function(text) {
let original = text;
try {
let m = censorRegexp.exec(text);
while (m && m[0]) {
if (m[0].length > original.length) { return original; } // regex is dangerous
const replacement = new Array(m[0].length+1).join(replacementLetter);
text = text.replace(new RegExp(`(\\b${escapeRegexp(m[0])}\\b)(?![^\\(]*\\))`, "ig"), replacement);
m = censorRegexp.exec(text);
}
return text;
} catch (e) {
return original;
}
};
}
} catch(e) {
return originalText;
// fall through
}
}
return text;
return function(t){ return t;};
}
export function censor(text, censoredWords, censoredPattern, replacementLetter) {
return censorFn(censoredWords, censoredPattern, replacementLetter)(text);
}

View File

@@ -0,0 +1,139 @@
import { default as WhiteLister, whiteListFeature } from 'pretty-text/white-lister';
import { sanitize } from 'pretty-text/sanitizer';
function deprecate(feature, name){
return function() {
if (console && console.log) {
console.log(feature + ': ' + name + ' is deprecated, please use the new markdown it APIs');
}
};
}
function createHelper(featureName, opts, optionCallbacks, pluginCallbacks, getOptions) {
let helper = {};
helper.markdownIt = true;
helper.whiteList = info => whiteListFeature(featureName, info);
helper.registerInline = deprecate(featureName,'registerInline');
helper.replaceBlock = deprecate(featureName,'replaceBlock');
helper.addPreProcessor = deprecate(featureName,'addPreProcessor');
helper.inlineReplace = deprecate(featureName,'inlineReplace');
helper.postProcessTag = deprecate(featureName,'postProcessTag');
helper.inlineRegexp = deprecate(featureName,'inlineRegexp');
helper.inlineBetween = deprecate(featureName,'inlineBetween');
helper.postProcessText = deprecate(featureName,'postProcessText');
helper.onParseNode = deprecate(featureName,'onParseNode');
helper.registerBlock = deprecate(featureName,'registerBlock');
// hack to allow moving of getOptions
helper.getOptions = () => getOptions.f();
helper.registerOptions = function(callback){
optionCallbacks.push([featureName, callback]);
};
helper.registerPlugin = function(callback){
pluginCallbacks.push([featureName, callback]);
};
return helper;
}
// TODO we may just use a proper ruler from markdown it... this is a basic proxy
class Ruler {
constructor() {
this.rules = [];
}
getRules() {
return this.rules;
}
push(name, rule) {
this.rules.push({name, rule});
}
}
// block bb code ruler for parsing of quotes / code / polls
function setupBlockBBCode(md) {
md.block.bbcode_ruler = new Ruler();
}
export function setup(opts, siteSettings, state) {
if (opts.setup) {
return;
}
opts.markdownIt = true;
let optionCallbacks = [];
let pluginCallbacks = [];
// ideally I would like to change the top level API a bit, but in the mean time this will do
let getOptions = {
f: () => opts
};
const check = /discourse-markdown\/|markdown-it\//;
let features = [];
Object.keys(require._eak_seen).forEach(entry => {
if (check.test(entry)) {
const module = require(entry);
if (module && module.setup) {
const featureName = entry.split('/').reverse()[0];
features.push(featureName);
module.setup(createHelper(featureName, opts, optionCallbacks, pluginCallbacks, getOptions));
}
}
});
optionCallbacks.forEach(([,callback])=>{
callback(opts, siteSettings, state);
});
// enable all features by default
features.forEach(feature => {
if (!opts.features.hasOwnProperty(feature)) {
opts.features[feature] = true;
}
});
let copy = {};
Object.keys(opts).forEach(entry => {
copy[entry] = opts[entry];
delete opts[entry];
});
opts.discourse = copy;
getOptions.f = () => opts.discourse;
opts.engine = window.markdownit({
discourse: opts.discourse,
html: true,
breaks: opts.discourse.features.newline,
xhtmlOut: false,
linkify: true,
typographer: false
});
setupBlockBBCode(opts.engine);
pluginCallbacks.forEach(([feature, callback])=>{
if (opts.discourse.features[feature]) {
opts.engine.use(callback);
}
});
// top level markdown it notifier
opts.markdownIt = true;
opts.setup = true;
if (!opts.discourse.sanitizer) {
opts.sanitizer = opts.discourse.sanitizer = (!!opts.discourse.sanitize) ? sanitize : a=>a;
}
}
export function cook(raw, opts) {
const whiteLister = new WhiteLister(opts.discourse);
return opts.discourse.sanitizer(opts.engine.render(raw), whiteLister).trim();
}

View File

@@ -385,14 +385,25 @@ export function cook(raw, opts) {
currentOpts = opts;
hoisted = {};
raw = hoistCodeBlocksAndSpans(raw);
preProcessors.forEach(p => raw = p(raw));
if (!currentOpts.enableExperimentalMarkdownIt) {
raw = hoistCodeBlocksAndSpans(raw);
preProcessors.forEach(p => raw = p(raw));
}
const whiteLister = new WhiteLister(opts);
const tree = parser.toHTMLTree(raw, 'Discourse');
let result = opts.sanitizer(parser.renderJsonML(parseTree(tree, opts)), whiteLister);
let result;
if (currentOpts.enableExperimentalMarkdownIt) {
result = opts.sanitizer(
require('pretty-text/engines/markdown-it/instance').default(opts).render(raw),
whiteLister
);
} else {
const tree = parser.toHTMLTree(raw, 'Discourse');
result = opts.sanitizer(parser.renderJsonML(parseTree(tree, opts)), whiteLister);
}
// If we hoisted out anything, put it back
const keys = Object.keys(hoisted);

View File

@@ -21,6 +21,7 @@ const urlReplacerArgs = {
};
export function setup(helper) {
if (helper.markdownIt) { return; }
helper.inlineRegexp(_.merge({start: 'http'}, urlReplacerArgs));
helper.inlineRegexp(_.merge({start: 'www'}, urlReplacerArgs));
}

View File

@@ -102,6 +102,8 @@ export function builders(helper) {
export function setup(helper) {
if (helper.markdownIt) { return; }
helper.whiteList(['span.bbcode-b', 'span.bbcode-i', 'span.bbcode-u', 'span.bbcode-s']);
const { replaceBBCode, rawBBCode, removeEmptyLines, replaceBBCodeParamsRaw } = builders(helper);

View File

@@ -35,6 +35,8 @@ function unhoist(obj,from,to){
export function setup(helper) {
if (helper.markdownIt) { return; }
function replaceMarkdown(match, tag) {
const hash = guid();

View File

@@ -1,4 +1,7 @@
export function setup(helper) {
if (helper.markdownIt) { return; }
helper.inlineRegexp({
start: '#',
matcher: /^#([\w-:]{1,101})/i,

View File

@@ -8,6 +8,9 @@ registerOption((siteSettings, opts) => {
});
export function setup(helper) {
if (helper.markdownIt) { return; }
helper.addPreProcessor(text => {
const options = helper.getOptions();
return censor(text, options.censoredWords, options.censoredPattern);

View File

@@ -21,6 +21,8 @@ registerOption((siteSettings, opts) => {
export function setup(helper) {
if (helper.markdownIt) { return; }
helper.whiteList({
custom(tag, name, value) {
if (tag === 'code' && name === 'class') {

View File

@@ -35,6 +35,8 @@ registerOption((siteSettings, opts, state) => {
export function setup(helper) {
if (helper.markdownIt) { return; }
helper.whiteList('img.emoji');
function imageFor(code) {

View File

@@ -21,6 +21,8 @@ function splitAtLast(tag, block, next, first) {
export function setup(helper) {
if (helper.markdownIt) { return; }
// If a row begins with HTML tags, don't parse it.
helper.registerBlock('html', function(block, next) {
let split, pos;

View File

@@ -5,6 +5,8 @@
**/
export function setup(helper) {
if (helper.markdownIt) { return; }
// We have to prune @mentions that are within links.
helper.onParseNode(event => {
const node = event.node,

View File

@@ -2,6 +2,9 @@
// in the tree, replace any new lines with `br`s.
export function setup(helper) {
if (helper.markdownIt) { return; }
helper.postProcessText((text, event) => {
const { options, insideCounts } = event;
if (options.traditionalMarkdownLinebreaks || (insideCounts.pre > 0)) { return; }

View File

@@ -25,6 +25,9 @@ function isOnOneLine(link, parent) {
// We only onebox stuff that is on its own line.
export function setup(helper) {
if (helper.markdownIt) { return; }
helper.onParseNode(event => {
const node = event.node,
path = event.path;

View File

@@ -9,6 +9,9 @@ registerOption((siteSettings, opts) => {
export function setup(helper) {
if (helper.markdownIt) { return; }
register(helper, 'quote', {noWrap: true, singlePara: true}, (contents, bbParams, options) => {
const params = {'class': 'quote'};

View File

@@ -18,6 +18,8 @@ registerOption((siteSettings, opts) => {
export function setup(helper) {
if (helper.markdownIt) { return; }
helper.whiteList(['table', 'table.md-table', 'tbody', 'thead', 'tr', 'th', 'td']);
helper.replaceBlock({

View File

@@ -0,0 +1,200 @@
// parse a tag [test a=1 b=2] to a data structure
// {tag: "test", attrs={a: "1", b: "2"}
export function parseBBCodeTag(src, start, max) {
let i;
let tag;
let attrs = {};
let closed = false;
let length = 0;
let closingTag = false;
// closing tag
if (src.charCodeAt(start+1) === 47) {
closingTag = true;
start += 1;
}
for (i=start+1;i<max;i++) {
let letter = src[i];
if (!( (letter >= 'a' && letter <= 'z') ||
(letter >= 'A' && letter <= 'Z'))) {
break;
}
}
tag = src.slice(start+1, i);
if (!tag) {
return;
}
if (closingTag) {
if (src[i] === ']') {
return {tag, length: tag.length+3, closing: true};
}
return;
}
for (;i<max;i++) {
let letter = src[i];
if (letter === ']') {
closed = true;
break;
}
}
if (closed) {
length = i;
let raw = src.slice(start+tag.length+1, i);
// trivial parser that is going to have to be rewritten at some point
if (raw) {
// reading a key 0, reading a val = 1
let readingKey = true;
let startSplit = 0;
let key;
for(i=0; i<raw.length; i++) {
if (raw[i] === '=' || i === (raw.length-1)) {
// one more offset to allow room to capture last
if (raw[i] !== '=' || i === (raw.length-1)) {
i+=1;
}
let cur = raw.slice(startSplit, i).trim();
if (readingKey) {
key = cur || '_default';
} else {
let val = raw.slice(startSplit, i).trim();
if (val && val.length > 0) {
val = val.replace(/^["'](.*)["']$/, '$1');
attrs[key] = val;
}
}
readingKey = !readingKey;
startSplit = i+1;
}
}
}
tag = tag.toLowerCase();
return {tag, attrs, length};
}
}
function applyBBCode(state, startLine, endLine, silent, md) {
var i, pos, nextLine,
old_parent, old_line_max, rule,
auto_closed = false,
start = state.bMarks[startLine] + state.tShift[startLine],
initial = start,
max = state.eMarks[startLine];
// [ === 91
if (91 !== state.src.charCodeAt(start)) { return false; }
let info = parseBBCodeTag(state.src, start, max);
if (!info) {
return false;
}
let rules = md.block.bbcode_ruler.getRules();
for(i=0;i<rules.length;i++) {
let r = rules[i].rule;
if (r.tag === info.tag) {
rule = r;
break;
}
}
if (!rule) { return false; }
// Since start is found, we can report success here in validation mode
if (silent) { return true; }
// Search for the end of the block
nextLine = startLine;
for (;;) {
nextLine++;
if (nextLine >= endLine) {
// unclosed block should be autoclosed by end of document.
// also block seems to be autoclosed by end of parent
break;
}
start = state.bMarks[nextLine] + state.tShift[nextLine];
max = state.eMarks[nextLine];
if (start < max && state.sCount[nextLine] < state.blkIndent) {
// non-empty line with negative indent should stop the list:
// - ```
// test
break;
}
// bbcode close [ === 91
if (91 !== state.src.charCodeAt(start)) { continue; }
if (state.sCount[nextLine] - state.blkIndent >= 4) {
// closing fence should be indented less than 4 spaces
continue;
}
if (state.src.slice(start+2, max-1) !== rule.tag) { continue; }
if (pos < max) { continue; }
// found!
auto_closed = true;
break;
}
old_parent = state.parentType;
old_line_max = state.lineMax;
// this will prevent lazy continuations from ever going past our end marker
state.lineMax = nextLine;
rule.before.call(this, state, info.attrs, md, state.src.slice(initial, initial + info.length + 1));
let lastToken = state.tokens[state.tokens.length-1];
lastToken.map = [ startLine, nextLine ];
state.md.block.tokenize(state, startLine + 1, nextLine);
rule.after.call(this, state, lastToken, md);
lastToken = state.tokens[state.tokens.length-1];
state.parentType = old_parent;
state.lineMax = old_line_max;
state.line = nextLine + (auto_closed ? 1 : 0);
return true;
}
export function setup(helper) {
if (!helper.markdownIt) { return; }
helper.registerPlugin(md => {
md.block.ruler.after('fence', 'bbcode', (state, startLine, endLine, silent)=> {
return applyBBCode(state, startLine, endLine, silent, md);
});
});
}

View File

@@ -0,0 +1,109 @@
import { parseBBCodeTag } from 'pretty-text/engines/markdown-it/bbcode-block';
const rules = {
'b': {tag: 'span', 'class': 'bbcode-b'},
'i': {tag: 'span', 'class': 'bbcode-i'},
'u': {tag: 'span', 'class': 'bbcode-u'},
's': {tag: 'span', 'class': 'bbcode-s'}
};
function tokanizeBBCode(state, silent) {
let pos = state.pos;
// 91 = [
if (silent || state.src.charCodeAt(pos) !== 91) {
return false;
}
const tagInfo = parseBBCodeTag(state.src, pos, state.posMax);
if (!tagInfo) {
return false;
}
const rule = rules[tagInfo.tag];
if (!rule) {
return false;
}
tagInfo.rule = rule;
let token = state.push('text', '' , 0);
token.content = state.src.slice(pos, pos+tagInfo.length);
state.delimiters.push({
bbInfo: tagInfo,
marker: 'bb' + tagInfo.tag,
open: !tagInfo.closing,
close: !!tagInfo.closing,
token: state.tokens.length - 1,
level: state.level,
end: -1,
jump: 0
});
state.pos = pos + tagInfo.length;
return true;
}
function processBBCode(state, silent) {
let i,
startDelim,
endDelim,
token,
tagInfo,
delimiters = state.delimiters,
max = delimiters.length;
if (silent) {
return;
}
for (i=0; i<max-1; i++) {
startDelim = delimiters[i];
tagInfo = startDelim.bbInfo;
if (!tagInfo) {
continue;
}
if (startDelim.end === -1) {
continue;
}
endDelim = delimiters[startDelim.end];
token = state.tokens[startDelim.token];
token.type = 'bbcode_' + tagInfo.tag + '_open';
token.attrs = [['class', tagInfo.rule['class']]];
token.tag = tagInfo.rule.tag;
token.nesting = 1;
token.markup = token.content;
token.content = '';
token = state.tokens[endDelim.token];
token.type = 'bbcode_' + tagInfo.tag + '_close';
token.tag = tagInfo.rule.tag;
token.nesting = -1;
token.markup = token.content;
token.content = '';
}
return false;
}
export function setup(helper) {
if (!helper.markdownIt) { return; }
helper.whiteList(['span.bbcode-b', 'span.bbcode-i', 'span.bbcode-u', 'span.bbcode-s']);
helper.registerOptions(opts => {
opts.features['bbcode-inline'] = true;
});
helper.registerPlugin(md => {
md.inline.ruler.push('bbcode-inline', tokanizeBBCode);
md.inline.ruler2.before('text_collapse', 'bbcode-inline', processBBCode);
});
}

View File

@@ -0,0 +1,58 @@
import { inlineRegexRule } from 'pretty-text/engines/markdown-it/helpers';
function emitter(matches, state) {
const options = state.md.options.discourse;
const [hashtag, slug] = matches;
const categoryHashtagLookup = options.categoryHashtagLookup;
const result = categoryHashtagLookup && categoryHashtagLookup(slug);
let token;
if (result) {
token = state.push('link_open', 'a', 1);
token.attrs = [['class', 'hashtag'], ['href', result[0]]];
token.block = false;
token = state.push('text', '', 0);
token.content = '#';
token = state.push('span_open', 'span', 1);
token.block = false;
token = state.push('text', '', 0);
token.content = result[1];
state.push('span_close', 'span', -1);
state.push('link_close', 'a', -1);
} else {
token = state.push('span_open', 'span', 1);
token.attrs = [['class', 'hashtag']];
token = state.push('text', '', 0);
token.content = hashtag;
token = state.push('span_close', 'span', -1);
}
return true;
}
export function setup(helper) {
if (!helper.markdownIt) { return; }
helper.registerPlugin(md=>{
const rule = inlineRegexRule(md, {
start: '#',
matcher: /^#([\w-:]{1,101})/i,
skipInLink: true,
maxLength: 102,
emitter: emitter
});
md.inline.ruler.push('category-hashtag', rule);
});
}

View File

@@ -0,0 +1,44 @@
import { censorFn } from 'pretty-text/censored-words';
function recurse(tokens, apply) {
let i;
for(i=0;i<tokens.length;i++) {
apply(tokens[i]);
if (tokens[i].children) {
recurse(tokens[i].children, apply);
}
}
}
function censorTree(state, censor) {
if (!state.tokens) {
return;
}
recurse(state.tokens, token => {
if (token.content) {
token.content = censor(token.content);
}
});
}
export function setup(helper) {
if (!helper.markdownIt) { return; }
helper.registerOptions((opts, siteSettings) => {
opts.censoredWords = siteSettings.censored_words;
opts.censoredPattern = siteSettings.censored_pattern;
});
helper.registerPlugin(md => {
const words = md.options.discourse.censoredWords;
const patterns = md.options.discourse.censoredPattern;
if ((words && words.length > 0) || (patterns && patterns.length > 0)) {
const replacement = String.fromCharCode(9632);
const censor = censorFn(words, patterns, replacement);
md.core.ruler.push('censored', state => censorTree(state, censor));
}
});
}

View File

@@ -0,0 +1,51 @@
// we need a custom renderer for code blocks cause we have a slightly non compliant
// format with special handling for text and so on
const TEXT_CODE_CLASSES = ["text", "pre", "plain"];
function render(tokens, idx, options, env, slf, md) {
let token = tokens[idx],
info = token.info ? md.utils.unescapeAll(token.info) : '',
langName = md.options.discourse.defaultCodeLang,
className,
escapedContent = md.utils.escapeHtml(token.content);
if (info) {
// strip off any additional languages
info = info.split(/\s+/g)[0];
}
const acceptableCodeClasses = md.options.discourse.acceptableCodeClasses;
if (acceptableCodeClasses && info && acceptableCodeClasses.indexOf(info) !== -1) {
langName = info;
}
className = TEXT_CODE_CLASSES.indexOf(langName) !== -1 ? 'lang-nohighlight' : 'lang-' + langName;
return `<pre><code class='${className}'>${escapedContent}</code></pre>\n`;
}
export function setup(helper) {
if (!helper.markdownIt) { return; }
helper.registerOptions((opts, siteSettings) => {
opts.defaultCodeLang = siteSettings.default_code_lang;
opts.acceptableCodeClasses = (siteSettings.highlighted_languages || "").split("|").concat(['auto', 'nohighlight']);
});
helper.whiteList({
custom(tag, name, value) {
if (tag === 'code' && name === 'class') {
const m = /^lang\-(.+)$/.exec(value);
if (m) {
return helper.getOptions().acceptableCodeClasses.indexOf(m[1]) !== -1;
}
}
}
});
helper.registerPlugin(md=>{
md.renderer.rules.fence = (tokens,idx,options,env,slf)=>render(tokens,idx,options,env,slf,md);
});
}

View File

@@ -0,0 +1,241 @@
import { buildEmojiUrl, isCustomEmoji } from 'pretty-text/emoji';
import { translations } from 'pretty-text/emoji/data';
import { textReplace } from 'pretty-text/engines/markdown-it/helpers';
const MAX_NAME_LENGTH = 60;
let translationTree = null;
// This allows us to efficiently search for aliases
// We build a data structure that allows us to quickly
// search through our N next chars to see if any match
// one of our alias emojis.
//
function buildTranslationTree() {
let tree = [];
let lastNode;
Object.keys(translations).forEach(function(key){
let i;
let node = tree;
for(i=0;i<key.length;i++) {
let code = key.charCodeAt(i);
let j;
let found = false;
for (j=0;j<node.length;j++){
if (node[j][0] === code) {
node = node[j][1];
found = true;
break;
}
}
if (!found) {
// token, children, value
let tmp = [code, []];
node.push(tmp);
lastNode = tmp;
node = tmp[1];
}
}
lastNode[1] = translations[key];
});
return tree;
}
function imageFor(code, opts) {
code = code.toLowerCase();
const url = buildEmojiUrl(code, opts);
if (url) {
const title = `:${code}:`;
const classes = isCustomEmoji(code, opts) ? "emoji emoji-custom" : "emoji";
return {url, title, classes};
}
}
function getEmojiName(content, pos, state) {
if (content.charCodeAt(pos) !== 58) {
return;
}
if (pos > 0) {
let prev = content.charCodeAt(pos-1);
if (!state.md.utils.isSpace(prev) && !state.md.utils.isPunctChar(String.fromCharCode(prev))) {
return;
}
}
pos++;
if (content.charCodeAt(pos) === 58) {
return;
}
let length = 0;
while(length < MAX_NAME_LENGTH) {
length++;
if (content.charCodeAt(pos+length) === 58) {
// check for t2-t6
if (content.substr(pos+length+1, 3).match(/t[2-6]:/)) {
length += 3;
}
break;
}
if (pos+length > content.length) {
return;
}
}
if (length === MAX_NAME_LENGTH) {
return;
}
return content.substr(pos, length);
}
// straight forward :smile: to emoji image
function getEmojiTokenByName(name, state) {
let info;
if (info = imageFor(name, state.md.options.discourse)) {
let token = new state.Token('emoji', 'img', 0);
token.attrs = [['src', info.url],
['title', info.title],
['class', info.classes],
['alt', info.title]];
return token;
}
}
function getEmojiTokenByTranslation(content, pos, state) {
translationTree = translationTree || buildTranslationTree();
let currentTree = translationTree;
let i;
let search = true;
let found = false;
let start = pos;
while(search) {
search = false;
let code = content.charCodeAt(pos);
for (i=0;i<currentTree.length;i++) {
if(currentTree[i][0] === code) {
currentTree = currentTree[i][1];
pos++;
search = true;
if (typeof currentTree === "string") {
found = currentTree;
}
break;
}
}
}
if (!found) {
return;
}
// quick boundary check
if (start > 0) {
let leading = content.charAt(start-1);
if (!state.md.utils.isSpace(leading.charCodeAt(0)) && !state.md.utils.isPunctChar(leading)) {
return;
}
}
// check trailing for punct or space
if (pos < content.length) {
let trailing = content.charCodeAt(pos);
if (!state.md.utils.isSpace(trailing)){
return;
}
}
let token = getEmojiTokenByName(found, state);
if (token) {
return { pos, token };
}
}
function applyEmoji(content, state) {
let i;
let result = null;
let contentToken = null;
let start = 0;
let endToken = content.length;
for (i=0; i<content.length-1; i++) {
let offset = 0;
let emojiName = getEmojiName(content,i,state);
let token = null;
if (emojiName) {
token = getEmojiTokenByName(emojiName, state);
if (token) {
offset = emojiName.length+2;
}
}
if (!token) {
// handle aliases (note: we can't do this in inline cause ; is not a split point)
//
let info = getEmojiTokenByTranslation(content, i, state);
if (info) {
offset = info.pos - i;
token = info.token;
}
}
if (token) {
result = result || [];
if (i-start>0) {
contentToken = new state.Token('text', '', 0);
contentToken.content = content.slice(start,i);
result.push(contentToken);
}
result.push(token);
endToken = start = i + offset;
}
}
if (endToken < content.length) {
contentToken = new state.Token('text', '', 0);
contentToken.content = content.slice(endToken);
result.push(contentToken);
}
return result;
}
export function setup(helper) {
if (!helper.markdownIt) { return; }
helper.registerOptions((opts, siteSettings, state)=>{
opts.features.emoji = !!siteSettings.enable_emoji;
opts.emojiSet = siteSettings.emoji_set || "";
opts.customEmoji = state.customEmoji;
});
helper.registerPlugin((md)=>{
md.core.ruler.push('emoji', state => textReplace(state, applyEmoji));
});
}

View File

@@ -0,0 +1,106 @@
// since the markdown.it interface is a bit on the verbose side
// we can keep some general patterns here
export default null;
// creates a rule suitable for inline parsing and replacement
//
// example:
// const rule = inlineRegexRule(md, {
// start: '#',
// matcher: /^#([\w-:]{1,101})/i,
// emitter: emitter
// });
export function inlineRegexRule(md, options) {
const start = options.start.charCodeAt(0);
const maxLength = (options.maxLength || 500) + 1;
return function(state) {
const pos = state.pos;
if (state.src.charCodeAt(pos) !== start) {
return false;
}
// test prev
if (pos > 0) {
let prev = state.src.charCodeAt(pos-1);
if (!md.utils.isSpace(prev) && !md.utils.isPunctChar(String.fromCharCode(prev))) {
return false;
}
}
// skip if in a link
if (options.skipInLink && state.tokens) {
let last = state.tokens[state.tokens.length-1];
if (last) {
if (last.type === 'link_open') {
return false;
}
if (last.type === 'html_inline' && last.content.substr(0,2) === "<a") {
return false;
}
}
}
const substr = state.src.slice(pos, Math.min(pos + maxLength,state.posMax));
const matches = options.matcher.exec(substr);
if (!matches) {
return false;
}
// got to test trailing boundary
const finalPos = pos+matches[0].length;
if (finalPos < state.posMax) {
const trailing = state.src.charCodeAt(finalPos);
if (!md.utils.isSpace(trailing) && !md.utils.isPunctChar(String.fromCharCode(trailing))) {
return false;
}
}
if (options.emitter(matches, state)) {
state.pos = Math.min(state.posMax, finalPos);
return true;
}
return false;
};
}
// based off https://github.com/markdown-it/markdown-it-emoji/blob/master/dist/markdown-it-emoji.js
//
export function textReplace(state, callback) {
var i, j, l, tokens, token,
blockTokens = state.tokens,
autolinkLevel = 0;
for (j = 0, l = blockTokens.length; j < l; j++) {
if (blockTokens[j].type !== 'inline') { continue; }
tokens = blockTokens[j].children;
// We scan from the end, to keep position when new tags added.
// Use reversed logic in links start/end match
for (i = tokens.length - 1; i >= 0; i--) {
token = tokens[i];
if (token.type === 'link_open' || token.type === 'link_close') {
if (token.info === 'auto') { autolinkLevel -= token.nesting; }
}
if (token.type === 'text' && autolinkLevel === 0) {
let split;
if(split = callback(token.content, state)) {
// replace current node
blockTokens[j].children = tokens = state.md.utils.arrayReplaceAt(
tokens, i, split
);
}
}
}
}
}

View File

@@ -0,0 +1,88 @@
const regex = /^(\w[\w.-]{0,59})\b/i;
function applyMentions(state, silent, isSpace, isPunctChar, mentionLookup, getURL) {
let pos = state.pos;
// 64 = @
if (silent || state.src.charCodeAt(pos) !== 64) {
return false;
}
if (pos > 0) {
let prev = state.src.charCodeAt(pos-1);
if (!isSpace(prev) && !isPunctChar(String.fromCharCode(prev))) {
return false;
}
}
// skip if in a link
if (state.tokens) {
let last = state.tokens[state.tokens.length-1];
if (last) {
if (last.type === 'link_open') {
return false;
}
if (last.type === 'html_inline' && last.content.substr(0,2) === "<a") {
return false;
}
}
}
let maxMention = state.src.substr(pos+1, 60);
let matches = maxMention.match(regex);
if (!matches) {
return false;
}
let username = matches[1];
let type = mentionLookup && mentionLookup(username);
let tag = 'a';
let className = 'mention';
let href = null;
if (type === 'user') {
href = getURL('/u/') + username.toLowerCase();
} else if (type === 'group') {
href = getURL('/groups/') + username;
className = 'mention-group';
} else {
tag = 'span';
}
let token = state.push('mention_open', tag, 1);
token.attrs = [['class', className]];
if (href) {
token.attrs.push(['href', href]);
}
token = state.push('text', '', 0);
token.content = '@'+username;
state.push('mention_close', tag, -1);
state.pos = pos + username.length + 1;
return true;
}
export function setup(helper) {
if (!helper.markdownIt) { return; }
helper.registerPlugin(md => {
md.inline.ruler.push('mentions', (state,silent)=> applyMentions(
state,
silent,
md.utils.isSpace,
md.utils.isPunctChar,
md.options.discourse.mentionLookup,
md.options.discourse.getURL
));
});
}

View File

@@ -0,0 +1,66 @@
function applyOnebox(state, silent) {
if (silent || !state.tokens || state.tokens.length < 3) {
return;
}
let i;
for(i=1;i<state.tokens.length;i++) {
let token = state.tokens[i];
let prev = state.tokens[i-1];
let prevAccepted = prev.type === "paragraph_open" && prev.level === 0;
if (token.type === "inline" && prevAccepted) {
let j;
for(j=0;j<token.children.length;j++){
let child = token.children[j];
if (child.type === "link_open") {
// look behind for soft or hard break
if (j > 0 && token.children[j-1].tag !== 'br') {
continue;
}
// look ahead for soft or hard break
let text = token.children[j+1];
let close = token.children[j+2];
let lookahead = token.children[j+3];
if (lookahead && lookahead.tag !== 'br') {
continue;
}
// check attrs only include a href
let attrs = child["attrs"];
if (!attrs || attrs.length !== 1 || attrs[0][0] !== "href") {
continue;
}
// check text matches href
if (text.type !== "text" || attrs[0][1] !== text.content) {
continue;
}
if (!close || close.type !== "link_close") {
continue;
}
// decorate...
attrs.push(["class", "onebox"]);
}
}
}
}
}
export function setup(helper) {
if (!helper.markdownIt) { return; }
helper.registerPlugin(md => {
md.core.ruler.after('linkify', 'onebox', applyOnebox);
});
}

View File

@@ -0,0 +1,134 @@
import { performEmojiUnescape } from 'pretty-text/emoji';
const rule = {
tag: 'quote',
before: function(state, attrs, md) {
let options = md.options.discourse;
let quoteInfo = attrs['_default'];
let username, postNumber, topicId, avatarImg, full;
if (quoteInfo) {
let split = quoteInfo.split(/\,\s*/);
username = split[0];
let i;
for(i=1;i<split.length;i++) {
if (split[i].indexOf("post:") === 0) {
postNumber = parseInt(split[i].substr(5),10);
continue;
}
if (split[i].indexOf("topic:") === 0) {
topicId = parseInt(split[i].substr(6),10);
continue;
}
if (split[i].indexOf(/full:\s*true/) === 0) {
full = true;
continue;
}
}
}
let token = state.push('bbcode_open', 'aside', 1);
token.attrs = [['class', 'quote']];
if (postNumber) {
token.attrs.push(['data-post', postNumber]);
}
if (topicId) {
token.attrs.push(['data-topic', topicId]);
}
if (full) {
token.attrs.push(['data-full', 'true']);
}
if (options.lookupAvatarByPostNumber) {
// client-side, we can retrieve the avatar from the post
avatarImg = options.lookupAvatarByPostNumber(postNumber, topicId);
} else if (options.lookupAvatar) {
// server-side, we need to lookup the avatar from the username
avatarImg = options.lookupAvatar(username);
}
if (username) {
let offTopicQuote = options.topicId &&
postNumber &&
options.getTopicInfo &&
topicId !== options.topicId;
// on topic quote
token = state.push('quote_header_open', 'div', 1);
token.attrs = [['class', 'title']];
token = state.push('quote_controls_open', 'div', 1);
token.attrs = [['class', 'quote-controls']];
token = state.push('quote_controls_close', 'div', -1);
if (avatarImg) {
token = state.push('html_inline', '', 0);
token.content = avatarImg;
}
if (offTopicQuote) {
const topicInfo = options.getTopicInfo(topicId);
if (topicInfo) {
var href = topicInfo.href;
if (postNumber > 0) { href += "/" + postNumber; }
let title = topicInfo.title;
if (options.enableEmoji) {
title = performEmojiUnescape(topicInfo.title, {
getURL: options.getURL, emojiSet: options.emojiSet
});
}
token = state.push('link_open', 'a', 1);
token.attrs = [[ 'href', href ]];
token.block = false;
token = state.push('html_inline', '', 0);
token.content = title;
token = state.push('link_close', 'a', -1);
token.block = false;
}
} else {
token = state.push('text', '', 0);
token.content = ` ${username}:`;
}
token = state.push('quote_header_close', 'div', -1);
}
token = state.push('bbcode_open', 'blockquote', 1);
},
after: function(state) {
state.push('bbcode_close', 'blockquote', -1);
state.push('bbcode_close', 'aside', -1);
}
};
export function setup(helper) {
if (!helper.markdownIt) { return; }
helper.registerOptions((opts, siteSettings) => {
opts.enableEmoji = siteSettings.enable_emoji;
opts.emojiSet = siteSettings.emoji_set;
});
helper.registerPlugin(md=>{
md.block.bbcode_ruler.push('quotes', rule);
});
}

View File

@@ -1,4 +1,5 @@
import { cook, setup } from 'pretty-text/engines/discourse-markdown';
import { cook as cookIt, setup as setupIt } from 'pretty-text/engines/discourse-markdown-it';
import { sanitize } from 'pretty-text/sanitizer';
import WhiteLister from 'pretty-text/white-lister';
@@ -10,8 +11,6 @@ export function registerOption(fn) {
}
export function buildOptions(state) {
setup();
const {
siteSettings,
getURL,
@@ -21,9 +20,14 @@ export function buildOptions(state) {
categoryHashtagLookup,
userId,
getCurrentUser,
currentUser
currentUser,
lookupAvatarByPostNumber
} = state;
if (!siteSettings.enable_experimental_markdown_it) {
setup();
}
const features = {
'bold-italics': true,
'auto-link': true,
@@ -33,7 +37,7 @@ export function buildOptions(state) {
'html': true,
'category-hashtag': true,
'onebox': true,
'newline': true
'newline': !siteSettings.traditional_markdown_linebreaks
};
const options = {
@@ -47,11 +51,18 @@ export function buildOptions(state) {
userId,
getCurrentUser,
currentUser,
lookupAvatarByPostNumber,
mentionLookup: state.mentionLookup,
allowedHrefSchemes: siteSettings.allowed_href_schemes ? siteSettings.allowed_href_schemes.split('|') : null
allowedHrefSchemes: siteSettings.allowed_href_schemes ? siteSettings.allowed_href_schemes.split('|') : null,
markdownIt: siteSettings.enable_experimental_markdown_it
};
_registerFns.forEach(fn => fn(siteSettings, options, state));
if (siteSettings.enable_experimental_markdown_it) {
setupIt(options, siteSettings, state);
} else {
// TODO deprecate this
_registerFns.forEach(fn => fn(siteSettings, options, state));
}
return options;
}
@@ -61,13 +72,22 @@ export default class {
this.opts = opts || {};
this.opts.features = this.opts.features || {};
this.opts.sanitizer = (!!this.opts.sanitize) ? (this.opts.sanitizer || sanitize) : identity;
setup();
// We used to do a failsafe call to setup here
// under new engine we always expect setup to be called by buildOptions.
// setup();
}
cook(raw) {
if (!raw || raw.length === 0) { return ""; }
const result = cook(raw, this.opts);
let result;
if (this.opts.markdownIt) {
result = cookIt(raw, this.opts);
} else {
result = cook(raw, this.opts);
}
return result ? result : "";
}

View File

@@ -155,6 +155,7 @@ whiteListFeature('default', [
'kbd',
'li',
'ol',
'ol[start]',
'p',
'pre',
's',