fix: Convert to ESM (#2227)

BREAKING CHANGE:

- Default export removed. Use `import { marked } from 'marked'` or `const { marked } = require('marked')` instead.
- `/lib/marked.js` removed. Use `/marked.min.js` in script tag instead.
- When using marked in a script tag use `marked.parse(...)` instead of `marked(...)`
This commit is contained in:
Ben McCann 2021-11-02 07:32:17 -07:00 committed by GitHub
parent 911dc9c8b1
commit 4afb228d95
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
48 changed files with 5931 additions and 3631 deletions

View File

@ -1,9 +1,9 @@
all:
@cp lib/marked.js marked.js
@uglifyjs --comments '/\*[^\0]+?Copyright[^\0]+?\*/' -o marked.min.js lib/marked.js
@cp lib/marked.cjs marked.cjs
@uglifyjs --comments '/\*[^\0]+?Copyright[^\0]+?\*/' -o marked.min.js lib/marked.cjs
clean:
@rm marked.js
@rm marked.cjs
@rm marked.min.js
bench:

View File

@ -69,7 +69,7 @@ $ cat hello.html
<script src="https://cdn.jsdelivr.net/npm/marked/marked.min.js"></script>
<script>
document.getElementById('content').innerHTML =
marked('# Marked in the browser\n\nRendered by **marked**.');
marked.parse('# Marked in the browser\n\nRendered by **marked**.');
</script>
</body>
</html>

125
bin/marked → bin/marked.js Executable file → Normal file
View File

@ -5,16 +5,17 @@
* Copyright (c) 2011-2013, Christopher Jeffrey (MIT License)
*/
const fs = require('fs'),
path = require('path'),
marked = require('../');
import { promises } from 'fs';
import { marked } from '../lib/marked.esm.js';
const { readFile, writeFile } = promises;
/**
* Man Page
*/
function help() {
const spawn = require('child_process').spawn;
async function help() {
const { spawn } = await import('child_process');
const options = {
cwd: process.cwd(),
@ -23,16 +24,18 @@ function help() {
stdio: 'inherit'
};
spawn('man', [path.resolve(__dirname, '../man/marked.1')], options)
.on('error', function() {
fs.readFile(path.resolve(__dirname, '../man/marked.1.txt'), 'utf8', function(err, data) {
if (err) throw err;
console.log(data);
});
const { dirname, resolve } = await import('path');
const { fileURLToPath } = await import('url');
const __dirname = dirname(fileURLToPath(import.meta.url));
spawn('man', [resolve(__dirname, '../man/marked.1')], options)
.on('error', async() => {
console.log(await readFile(resolve(__dirname, '../man/marked.1.txt'), 'utf8'));
});
}
function version() {
async function version() {
const { createRequire } = await import('module');
const require = createRequire(import.meta.url);
const pkg = require('../package.json');
console.log(pkg.version);
}
@ -41,15 +44,15 @@ function version() {
* Main
*/
function main(argv, callback) {
const files = [],
options = {};
let input,
output,
string,
arg,
tokens,
opt;
async function main(argv) {
const files = [];
const options = {};
let input;
let output;
let string;
let arg;
let tokens;
let opt;
function getarg() {
let arg = argv.shift();
@ -82,8 +85,6 @@ function main(argv, callback) {
while (argv.length) {
arg = getarg();
switch (arg) {
case '--test':
return require('../test').main(process.argv.slice());
case '-o':
case '--output':
output = argv.shift();
@ -102,10 +103,10 @@ function main(argv, callback) {
break;
case '-h':
case '--help':
return help();
return await help();
case '-v':
case '--version':
return version();
return await version();
default:
if (arg.indexOf('--') === 0) {
opt = camelize(arg.replace(/^--(no-)?/, ''));
@ -128,62 +129,57 @@ function main(argv, callback) {
}
}
function getData(callback) {
async function getData() {
if (!input) {
if (files.length <= 2) {
if (string) {
return callback(null, string);
return string;
}
return getStdin(callback);
return await getStdin();
}
input = files.pop();
}
return fs.readFile(input, 'utf8', callback);
return await readFile(input, 'utf8');
}
return getData(function(err, data) {
if (err) return callback(err);
const data = await getData();
data = tokens
? JSON.stringify(marked.lexer(data, options), null, 2)
: marked(data, options);
const html = tokens
? JSON.stringify(marked.lexer(data, options), null, 2)
: marked(data, options);
if (!output) {
process.stdout.write(data + '\n');
return callback();
}
if (output) {
return await writeFile(output, data);
}
return fs.writeFile(output, data, callback);
});
process.stdout.write(html + '\n');
}
/**
* Helpers
*/
function getStdin(callback) {
const stdin = process.stdin;
let buff = '';
function getStdin() {
return new Promise((resolve, reject) => {
const stdin = process.stdin;
let buff = '';
stdin.setEncoding('utf8');
stdin.setEncoding('utf8');
stdin.on('data', function(data) {
buff += data;
});
stdin.on('data', function(data) {
buff += data;
});
stdin.on('error', function(err) {
return callback(err);
});
stdin.on('error', function(err) {
reject(err);
});
stdin.on('end', function() {
return callback(null, buff);
});
stdin.on('end', function() {
resolve(buff);
});
try {
stdin.resume();
} catch (e) {
callback(e);
}
});
}
function camelize(text) {
@ -204,12 +200,9 @@ function handleError(err) {
* Expose / Entry Point
*/
if (!module.parent) {
process.title = 'marked';
main(process.argv.slice(), function(err, code) {
if (err) return handleError(err);
return process.exit(code || 0);
});
} else {
module.exports = main;
}
process.title = 'marked';
main(process.argv.slice()).then(code => {
process.exit(code || 0);
}).catch(err => {
handleError(err);
});

View File

@ -10,7 +10,7 @@
"markup",
"html"
],
"main": "lib/marked.js",
"main": "lib/marked.cjs",
"license": "MIT",
"ignore": [
"**/.*",

View File

@ -1,8 +1,10 @@
const { mkdir, rmdir, readdir, stat, readFile, writeFile, copyFile } = require('fs').promises;
const { join, dirname, parse, format } = require('path');
const marked = require('./');
const { highlight, highlightAuto } = require('highlight.js');
const titleize = require('titleize');
import { promises } from 'fs';
import { join, dirname, parse, format } from 'path';
import { parse as marked } from './lib/marked.esm.js';
import { HighlightJS } from 'highlight.js';
import titleize from 'titleize';
const { mkdir, rm, readdir, stat, readFile, writeFile, copyFile } = promises;
const { highlight, highlightAuto } = HighlightJS;
const cwd = process.cwd();
const inputDir = join(cwd, 'docs');
const outputDir = join(cwd, 'public');
@ -12,7 +14,7 @@ const getTitle = str => str === 'INDEX' ? '' : titleize(str.replace(/_/g, ' '))
async function init() {
console.log('Cleaning up output directory ' + outputDir);
await rmdir(outputDir, { recursive: true });
await rm(outputDir, { force: true, recursive: true });
await mkdir(outputDir);
await copyFile(join(cwd, 'LICENSE.md'), join(inputDir, 'LICENSE.md'));
const tmpl = await readFile(templateFile, 'utf8');

View File

@ -4,7 +4,7 @@
"repo": "markedjs/marked",
"description": "A markdown parser built for speed",
"keywords": ["markdown", "markup", "html"],
"scripts": ["lib/marked.js"],
"main": "lib/marked.js",
"scripts": ["lib/marked.cjs"],
"main": "lib/marked.cjs",
"license": "MIT"
}

View File

@ -56,7 +56,7 @@ $ marked -s "*hello world*"
<script src="https://cdn.jsdelivr.net/npm/marked/marked.min.js"></script>
<script>
document.getElementById('content').innerHTML =
marked('# Marked in browser\n\nRendered by **marked**.');
marked.parse('# Marked in browser\n\nRendered by **marked**.');
</script>
</body>
</html>
@ -65,8 +65,10 @@ $ marked -s "*hello world*"
**Node.js**
```js
const marked = require("marked");
const html = marked('# Marked in Node.js\n\nRendered by **marked**.');
import { marked } from 'marked';
// or const { marked } = require('marked');
const html = marked.parse('# Marked in Node.js\n\nRendered by **marked**.');
```
@ -79,7 +81,7 @@ We actively support the features of the following [Markdown flavors](https://git
| Flavor | Version | Status |
| :--------------------------------------------------------- | :------ | :----------------------------------------------------------------- |
| The original markdown.pl | -- | |
| [CommonMark](http://spec.commonmark.org/0.29/) | 0.29 | [Work in progress](https://github.com/markedjs/marked/issues/1202) |
| [CommonMark](http://spec.commonmark.org/0.30/) | 0.30 | [Work in progress](https://github.com/markedjs/marked/issues/1202) |
| [GitHub Flavored Markdown](https://github.github.com/gfm/) | 0.29 | [Work in progress](https://github.com/markedjs/marked/issues/1202) |
By supporting the above Markdown flavors, it's possible that Marked can help you use other flavors as well; however, these are not actively supported by the community.

View File

@ -1,7 +1,8 @@
## The `marked` function
## The `parse` function
```js
marked(markdownString [,options] [,callback])
import { marked } from 'marked';
marked.parse(markdownString [,options] [,callback])
```
|Argument |Type |Notes |
@ -14,7 +15,7 @@ marked(markdownString [,options] [,callback])
```js
// Create reference instance
const marked = require('marked');
import { marked } from 'marked';
// Set options
// `highlight` example uses https://highlightjs.org
@ -36,7 +37,7 @@ marked.setOptions({
});
// Compile
console.log(marked(markdownString));
console.log(marked.parse(markdownString));
```
<h2 id="options">Options</h2>
@ -67,7 +68,7 @@ console.log(marked(markdownString));
You can parse inline markdown by running markdown through `marked.parseInline`.
```js
const blockHtml = marked('**strong** _em_');
const blockHtml = marked.parse('**strong** _em_');
console.log(blockHtml); // '<p><strong>strong</strong> <em>em</em></p>'
const inlineHtml = marked.parseInline('**strong** _em_');
@ -87,7 +88,7 @@ marked.setOptions({
}
});
marked(markdownString, (err, html) => {
marked.parse(markdownString, (err, html) => {
console.log(html);
});
```
@ -105,18 +106,18 @@ Marked can be run in a [worker thread](https://nodejs.org/api/worker_threads.htm
```js
// markedWorker.js
const marked = require('marked');
const { parentPort } = require('worker_threads');
import { marked } from 'marked';
import { parentPort } from 'worker_threads';
parentPort.on('message', (markdownString) => {
parentPort.postMessage(marked(markdownString));
parentPort.postMessage(marked.parse(markdownString));
});
```
```js
// index.js
const { Worker } = require('worker_threads');
import { Worker } from 'worker_threads';
const markedWorker = new Worker('./markedWorker.js');
const markedTimeout = setTimeout(() => {
@ -144,7 +145,7 @@ importScripts('path/to/marked.min.js');
onmessage = (e) => {
const markdownString = e.data
postMessage(marked(markdownString));
postMessage(marked.parse(markdownString));
};
```

View File

@ -8,7 +8,7 @@ To champion the single-responsibility and open/closed principles, we have tried
```js
const marked = require('marked');
import { marked } from 'marked';
marked.use({
pedantic: false,
@ -69,7 +69,7 @@ Calling `marked.use()` to override the same function multiple times will give pr
```js
// Create reference instance
const marked = require('marked');
import { marked } from 'marked';
// Override function
const renderer = {
@ -89,7 +89,7 @@ const renderer = {
marked.use({ renderer });
// Run marked
console.log(marked('# heading+'));
console.log(marked.parse('# heading+'));
```
**Output:**
@ -173,7 +173,7 @@ Calling `marked.use()` to override the same function multiple times will give pr
```js
// Create reference instance
const marked = require('marked');
import { marked } from 'marked';
// Override function
const tokenizer = {
@ -195,7 +195,7 @@ const tokenizer = {
marked.use({ tokenizer });
// Run marked
console.log(marked('$ latex code $\n\n` other code `'));
console.log(marked.parse('$ latex code $\n\n` other code `'));
```
**Output:**
@ -264,7 +264,7 @@ The walkTokens function gets called with every token. Child tokens are called be
**Example:** Overriding heading tokens to start at h2.
```js
const marked = require('marked');
import { marked } from 'marked';
// Override function
const walkTokens = (token) => {
@ -276,7 +276,7 @@ const walkTokens = (token) => {
marked.use({ walkTokens });
// Run marked
console.log(marked('# heading 2\n\n## heading 3'));
console.log(marked.parse('# heading 2\n\n## heading 3'));
```
**Output:**
@ -422,7 +422,7 @@ marked.use({ extensions: [descriptionList] });
marked.use({ extensions: [description] });
marked.use({ walkTokens })
console.log(marked('A Description List:\n'
console.log(marked.parse('A Description List:\n'
+ ': Topic 1 : Description 1\n'
+ ': **Topic 2** : *Description 2*'));
```
@ -497,7 +497,7 @@ The Lexer builds an array of tokens, which will be passed to the Parser.
The Parser processes each token in the token array:
``` js
const marked = require('marked');
import { marked } from 'marked';
const md = `
# heading

View File

@ -34,7 +34,7 @@ var inputDirty = true;
var $activeOutputElem = null;
var search = searchToObject();
var markedVersions = {
master: 'https://cdn.jsdelivr.net/gh/markedjs/marked/lib/marked.js'
master: 'https://cdn.jsdelivr.net/gh/markedjs/marked/marked.min.js'
};
var markedVersionCache = {};
var delayTime = 1;
@ -109,7 +109,7 @@ function setInitialVersion() {
.then(function(json) {
for (var i = 0; i < json.versions.length; i++) {
var ver = json.versions[i];
markedVersions[ver] = 'https://cdn.jsdelivr.net/npm/marked@' + ver + '/lib/marked.js';
markedVersions[ver] = 'https://cdn.jsdelivr.net/npm/marked@' + ver + '/marked.min.js';
var opt = document.createElement('option');
opt.textContent = ver;
opt.value = ver;
@ -122,7 +122,7 @@ function setInitialVersion() {
return res.json();
})
.then(function(json) {
markedVersions.master = 'https://cdn.jsdelivr.net/gh/markedjs/marked@' + json[0].sha + '/lib/marked.js';
markedVersions.master = 'https://cdn.jsdelivr.net/gh/markedjs/marked@' + json[0].sha + '/marked.min.js';
})
.catch(function() {
// do nothing
@ -262,7 +262,7 @@ function addCommitVersion(value, text, commit) {
if (markedVersions[value]) {
return;
}
markedVersions[value] = 'https://cdn.jsdelivr.net/gh/markedjs/marked@' + commit + '/lib/marked.js';
markedVersions[value] = 'https://cdn.jsdelivr.net/gh/markedjs/marked@' + commit + '/marked.min.js';
var opt = document.createElement('option');
opt.textContent = text;
opt.value = value;

View File

@ -1 +0,0 @@
module.exports = require('./lib/marked');

View File

@ -6,6 +6,7 @@
"helpers": [
"helpers/helpers.js"
],
"jsLoader": "import",
"stopSpecOnExpectationFailure": false,
"random": true
}

2913
lib/marked.cjs Normal file

File diff suppressed because one or more lines are too long

View File

@ -9,11 +9,7 @@
* The code in this file is generated from files in ./src/
*/
var esmEntry$1 = {exports: {}};
var defaults$5 = {exports: {}};
function getDefaults$1() {
function getDefaults() {
return {
baseUrl: null,
breaks: false,
@ -37,20 +33,15 @@ function getDefaults$1() {
};
}
function changeDefaults$1(newDefaults) {
defaults$5.exports.defaults = newDefaults;
}
let defaults = getDefaults();
defaults$5.exports = {
defaults: getDefaults$1(),
getDefaults: getDefaults$1,
changeDefaults: changeDefaults$1
};
function changeDefaults(newDefaults) {
defaults = newDefaults;
}
/**
* Helpers
*/
const escapeTest = /[&<>"']/;
const escapeReplace = /[&<>"']/g;
const escapeTestNoEncode = /[<>"']|&(?!#?\w+;)/;
@ -63,7 +54,7 @@ const escapeReplacements = {
"'": '&#39;'
};
const getEscapeReplacement = (ch) => escapeReplacements[ch];
function escape$3(html, encode) {
function escape(html, encode) {
if (encode) {
if (escapeTest.test(html)) {
return html.replace(escapeReplace, getEscapeReplacement);
@ -79,7 +70,7 @@ function escape$3(html, encode) {
const unescapeTest = /&(#(?:\d+)|(?:#x[0-9A-Fa-f]+)|(?:\w+));?/ig;
function unescape$1(html) {
function unescape(html) {
// explicitly match decimal, hex, and named HTML entities
return html.replace(unescapeTest, (_, n) => {
n = n.toLowerCase();
@ -94,7 +85,7 @@ function unescape$1(html) {
}
const caret = /(^|[^\[])\^/g;
function edit$1(regex, opt) {
function edit(regex, opt) {
regex = regex.source || regex;
opt = opt || '';
const obj = {
@ -113,11 +104,11 @@ function edit$1(regex, opt) {
const nonWordAndColonTest = /[^\w:]/g;
const originIndependentUrl = /^$|^[a-z][a-z0-9+.-]*:|^[?#]/i;
function cleanUrl$1(sanitize, base, href) {
function cleanUrl(sanitize, base, href) {
if (sanitize) {
let prot;
try {
prot = decodeURIComponent(unescape$1(href))
prot = decodeURIComponent(unescape(href))
.replace(nonWordAndColonTest, '')
.toLowerCase();
} catch (e) {
@ -151,7 +142,7 @@ function resolveUrl(base, href) {
if (justDomain.test(base)) {
baseUrls[' ' + base] = base + '/';
} else {
baseUrls[' ' + base] = rtrim$1(base, '/', true);
baseUrls[' ' + base] = rtrim(base, '/', true);
}
}
base = baseUrls[' ' + base];
@ -172,9 +163,9 @@ function resolveUrl(base, href) {
}
}
const noopTest$1 = { exec: function noopTest() {} };
const noopTest = { exec: function noopTest() {} };
function merge$2(obj) {
function merge(obj) {
let i = 1,
target,
key;
@ -191,7 +182,7 @@ function merge$2(obj) {
return obj;
}
function splitCells$1(tableRow, count) {
function splitCells(tableRow, count) {
// ensure that every cell-delimiting pipe has a space
// before it to distinguish it from an escaped pipe
const row = tableRow.replace(/\|/g, (match, offset, str) => {
@ -230,7 +221,7 @@ function splitCells$1(tableRow, count) {
// Remove trailing 'c's. Equivalent to str.replace(/c*$/, '').
// /c*$/ is vulnerable to REDOS.
// invert: Remove suffix of non-c chars instead. Default falsey.
function rtrim$1(str, c, invert) {
function rtrim(str, c, invert) {
const l = str.length;
if (l === 0) {
return '';
@ -254,7 +245,7 @@ function rtrim$1(str, c, invert) {
return str.substr(0, l - suffLen);
}
function findClosingBracket$1(str, b) {
function findClosingBracket(str, b) {
if (str.indexOf(b[1]) === -1) {
return -1;
}
@ -276,14 +267,14 @@ function findClosingBracket$1(str, b) {
return -1;
}
function checkSanitizeDeprecation$1(opt) {
function checkSanitizeDeprecation(opt) {
if (opt && opt.sanitize && !opt.silent) {
console.warn('marked(): sanitize and sanitizer parameters are deprecated since version 0.7.0, should not be used and will be removed in the future. Read more here: https://marked.js.org/#/USING_ADVANCED.md#options');
}
}
// copied from https://stackoverflow.com/a/5450113/806777
function repeatString$1(pattern, count) {
function repeatString(pattern, count) {
if (count < 1) {
return '';
}
@ -298,32 +289,9 @@ function repeatString$1(pattern, count) {
return result + pattern;
}
var helpers = {
escape: escape$3,
unescape: unescape$1,
edit: edit$1,
cleanUrl: cleanUrl$1,
resolveUrl,
noopTest: noopTest$1,
merge: merge$2,
splitCells: splitCells$1,
rtrim: rtrim$1,
findClosingBracket: findClosingBracket$1,
checkSanitizeDeprecation: checkSanitizeDeprecation$1,
repeatString: repeatString$1
};
const { defaults: defaults$4 } = defaults$5.exports;
const {
rtrim,
splitCells,
escape: escape$2,
findClosingBracket
} = helpers;
function outputLink(cap, link, raw, lexer) {
const href = link.href;
const title = link.title ? escape$2(link.title) : null;
const title = link.title ? escape(link.title) : null;
const text = cap[1].replace(/\\([\[\]])/g, '$1');
if (cap[0].charAt(0) !== '!') {
@ -344,7 +312,7 @@ function outputLink(cap, link, raw, lexer) {
raw,
href,
title,
text: escape$2(text)
text: escape(text)
};
}
}
@ -380,9 +348,9 @@ function indentCodeCompensation(raw, text) {
/**
* Tokenizer
*/
var Tokenizer_1$1 = class Tokenizer {
class Tokenizer {
constructor(options) {
this.options = options || defaults$4;
this.options = options || defaults;
}
space(src) {
@ -641,7 +609,7 @@ var Tokenizer_1$1 = class Tokenizer {
};
if (this.options.sanitize) {
token.type = 'paragraph';
token.text = this.options.sanitizer ? this.options.sanitizer(cap[0]) : escape$2(cap[0]);
token.text = this.options.sanitizer ? this.options.sanitizer(cap[0]) : escape(cap[0]);
token.tokens = [];
this.lexer.inline(token.text, token.tokens);
}
@ -771,7 +739,7 @@ var Tokenizer_1$1 = class Tokenizer {
return {
type: 'escape',
raw: cap[0],
text: escape$2(cap[1])
text: escape(cap[1])
};
}
}
@ -800,7 +768,7 @@ var Tokenizer_1$1 = class Tokenizer {
text: this.options.sanitize
? (this.options.sanitizer
? this.options.sanitizer(cap[0])
: escape$2(cap[0]))
: escape(cap[0]))
: cap[0]
};
}
@ -955,7 +923,7 @@ var Tokenizer_1$1 = class Tokenizer {
if (hasNonSpaceChars && hasSpaceCharsOnBothEnds) {
text = text.substring(1, text.length - 1);
}
text = escape$2(text, true);
text = escape(text, true);
return {
type: 'codespan',
raw: cap[0],
@ -991,10 +959,10 @@ var Tokenizer_1$1 = class Tokenizer {
if (cap) {
let text, href;
if (cap[2] === '@') {
text = escape$2(this.options.mangle ? mangle(cap[1]) : cap[1]);
text = escape(this.options.mangle ? mangle(cap[1]) : cap[1]);
href = 'mailto:' + text;
} else {
text = escape$2(cap[1]);
text = escape(cap[1]);
href = text;
}
@ -1019,7 +987,7 @@ var Tokenizer_1$1 = class Tokenizer {
if (cap = this.rules.inline.url.exec(src)) {
let text, href;
if (cap[2] === '@') {
text = escape$2(this.options.mangle ? mangle(cap[0]) : cap[0]);
text = escape(this.options.mangle ? mangle(cap[0]) : cap[0]);
href = 'mailto:' + text;
} else {
// do extended autolink path validation
@ -1028,7 +996,7 @@ var Tokenizer_1$1 = class Tokenizer {
prevCapZero = cap[0];
cap[0] = this.rules.inline._backpedal.exec(cap[0])[0];
} while (prevCapZero !== cap[0]);
text = escape$2(cap[0]);
text = escape(cap[0]);
if (cap[1] === 'www.') {
href = 'http://' + text;
} else {
@ -1056,9 +1024,9 @@ var Tokenizer_1$1 = class Tokenizer {
if (cap) {
let text;
if (this.lexer.state.inRawBlock) {
text = this.options.sanitize ? (this.options.sanitizer ? this.options.sanitizer(cap[0]) : escape$2(cap[0])) : cap[0];
text = this.options.sanitize ? (this.options.sanitizer ? this.options.sanitizer(cap[0]) : escape(cap[0])) : cap[0];
} else {
text = escape$2(this.options.smartypants ? smartypants(cap[0]) : cap[0]);
text = escape(this.options.smartypants ? smartypants(cap[0]) : cap[0]);
}
return {
type: 'text',
@ -1067,18 +1035,12 @@ var Tokenizer_1$1 = class Tokenizer {
};
}
}
};
const {
noopTest,
edit,
merge: merge$1
} = helpers;
}
/**
* Block-Level Grammar
*/
const block$1 = {
const block = {
newline: /^(?: *(?:\n|$))+/,
code: /^( {4}[^\n]+(?:\n(?: *(?:\n|$))*)?)+/,
fences: /^ {0,3}(`{3,}(?=[^`\n]*\n)|~{3,})([^\n]*)\n(?:|([\s\S]*?)\n)(?: {0,3}\1[~`]* *(?=\n|$)|$)/,
@ -1105,89 +1067,89 @@ const block$1 = {
text: /^[^\n]+/
};
block$1._label = /(?!\s*\])(?:\\[\[\]]|[^\[\]])+/;
block$1._title = /(?:"(?:\\"?|[^"\\])*"|'[^'\n]*(?:\n[^'\n]+)*\n?'|\([^()]*\))/;
block$1.def = edit(block$1.def)
.replace('label', block$1._label)
.replace('title', block$1._title)
block._label = /(?!\s*\])(?:\\[\[\]]|[^\[\]])+/;
block._title = /(?:"(?:\\"?|[^"\\])*"|'[^'\n]*(?:\n[^'\n]+)*\n?'|\([^()]*\))/;
block.def = edit(block.def)
.replace('label', block._label)
.replace('title', block._title)
.getRegex();
block$1.bullet = /(?:[*+-]|\d{1,9}[.)])/;
block$1.listItemStart = edit(/^( *)(bull) */)
.replace('bull', block$1.bullet)
block.bullet = /(?:[*+-]|\d{1,9}[.)])/;
block.listItemStart = edit(/^( *)(bull) */)
.replace('bull', block.bullet)
.getRegex();
block$1.list = edit(block$1.list)
.replace(/bull/g, block$1.bullet)
block.list = edit(block.list)
.replace(/bull/g, block.bullet)
.replace('hr', '\\n+(?=\\1?(?:(?:- *){3,}|(?:_ *){3,}|(?:\\* *){3,})(?:\\n+|$))')
.replace('def', '\\n+(?=' + block$1.def.source + ')')
.replace('def', '\\n+(?=' + block.def.source + ')')
.getRegex();
block$1._tag = 'address|article|aside|base|basefont|blockquote|body|caption'
block._tag = 'address|article|aside|base|basefont|blockquote|body|caption'
+ '|center|col|colgroup|dd|details|dialog|dir|div|dl|dt|fieldset|figcaption'
+ '|figure|footer|form|frame|frameset|h[1-6]|head|header|hr|html|iframe'
+ '|legend|li|link|main|menu|menuitem|meta|nav|noframes|ol|optgroup|option'
+ '|p|param|section|source|summary|table|tbody|td|tfoot|th|thead|title|tr'
+ '|track|ul';
block$1._comment = /<!--(?!-?>)[\s\S]*?(?:-->|$)/;
block$1.html = edit(block$1.html, 'i')
.replace('comment', block$1._comment)
.replace('tag', block$1._tag)
block._comment = /<!--(?!-?>)[\s\S]*?(?:-->|$)/;
block.html = edit(block.html, 'i')
.replace('comment', block._comment)
.replace('tag', block._tag)
.replace('attribute', / +[a-zA-Z:_][\w.:-]*(?: *= *"[^"\n]*"| *= *'[^'\n]*'| *= *[^\s"'=<>`]+)?/)
.getRegex();
block$1.paragraph = edit(block$1._paragraph)
.replace('hr', block$1.hr)
block.paragraph = edit(block._paragraph)
.replace('hr', block.hr)
.replace('heading', ' {0,3}#{1,6} ')
.replace('|lheading', '') // setex headings don't interrupt commonmark paragraphs
.replace('blockquote', ' {0,3}>')
.replace('fences', ' {0,3}(?:`{3,}(?=[^`\\n]*\\n)|~{3,})[^\\n]*\\n')
.replace('list', ' {0,3}(?:[*+-]|1[.)]) ') // only lists starting from 1 can interrupt
.replace('html', '</?(?:tag)(?: +|\\n|/?>)|<(?:script|pre|style|textarea|!--)')
.replace('tag', block$1._tag) // pars can be interrupted by type (6) html blocks
.replace('tag', block._tag) // pars can be interrupted by type (6) html blocks
.getRegex();
block$1.blockquote = edit(block$1.blockquote)
.replace('paragraph', block$1.paragraph)
block.blockquote = edit(block.blockquote)
.replace('paragraph', block.paragraph)
.getRegex();
/**
* Normal Block Grammar
*/
block$1.normal = merge$1({}, block$1);
block.normal = merge({}, block);
/**
* GFM Block Grammar
*/
block$1.gfm = merge$1({}, block$1.normal, {
block.gfm = merge({}, block.normal, {
table: '^ *([^\\n ].*\\|.*)\\n' // Header
+ ' {0,3}(?:\\| *)?(:?-+:? *(?:\\| *:?-+:? *)*)(?:\\| *)?' // Align
+ '(?:\\n((?:(?! *\\n|hr|heading|blockquote|code|fences|list|html).*(?:\\n|$))*)\\n*|$)' // Cells
});
block$1.gfm.table = edit(block$1.gfm.table)
.replace('hr', block$1.hr)
block.gfm.table = edit(block.gfm.table)
.replace('hr', block.hr)
.replace('heading', ' {0,3}#{1,6} ')
.replace('blockquote', ' {0,3}>')
.replace('code', ' {4}[^\\n]')
.replace('fences', ' {0,3}(?:`{3,}(?=[^`\\n]*\\n)|~{3,})[^\\n]*\\n')
.replace('list', ' {0,3}(?:[*+-]|1[.)]) ') // only lists starting from 1 can interrupt
.replace('html', '</?(?:tag)(?: +|\\n|/?>)|<(?:script|pre|style|textarea|!--)')
.replace('tag', block$1._tag) // tables can be interrupted by type (6) html blocks
.replace('tag', block._tag) // tables can be interrupted by type (6) html blocks
.getRegex();
/**
* Pedantic grammar (original John Gruber's loose markdown specification)
*/
block$1.pedantic = merge$1({}, block$1.normal, {
block.pedantic = merge({}, block.normal, {
html: edit(
'^ *(?:comment *(?:\\n|\\s*$)'
+ '|<(tag)[\\s\\S]+?</\\1> *(?:\\n{2,}|\\s*$)' // closed tag
+ '|<tag(?:"[^"]*"|\'[^\']*\'|\\s[^\'"/>\\s]*)*?/?> *(?:\\n{2,}|\\s*$))')
.replace('comment', block$1._comment)
.replace('comment', block._comment)
.replace(/tag/g, '(?!(?:'
+ 'a|em|strong|small|s|cite|q|dfn|abbr|data|time|code|var|samp|kbd|sub'
+ '|sup|i|b|u|mark|ruby|rt|rp|bdi|bdo|span|br|wbr|ins|del|img)'
@ -1196,10 +1158,10 @@ block$1.pedantic = merge$1({}, block$1.normal, {
def: /^ *\[([^\]]+)\]: *<?([^\s>]+)>?(?: +(["(][^\n]+[")]))? *(?:\n+|$)/,
heading: /^(#{1,6})(.*)(?:\n+|$)/,
fences: noopTest, // fences not supported
paragraph: edit(block$1.normal._paragraph)
.replace('hr', block$1.hr)
paragraph: edit(block.normal._paragraph)
.replace('hr', block.hr)
.replace('heading', ' *#{1,6} *[^\n]')
.replace('lheading', block$1.lheading)
.replace('lheading', block.lheading)
.replace('blockquote', ' {0,3}>')
.replace('|fences', '')
.replace('|list', '')
@ -1210,7 +1172,7 @@ block$1.pedantic = merge$1({}, block$1.normal, {
/**
* Inline-Level Grammar
*/
const inline$1 = {
const inline = {
escape: /^\\([!"#$%&'()*+,\-./:;<=>?@\[\]\\^_`{|}~])/,
autolink: /^<(scheme:[^\s\x00-\x1f<>]*|email)>/,
url: noopTest,
@ -1240,73 +1202,73 @@ const inline$1 = {
// list of punctuation marks from CommonMark spec
// without * and _ to handle the different emphasis markers * and _
inline$1._punctuation = '!"#$%&\'()+\\-.,/:;<=>?@\\[\\]`^{|}~';
inline$1.punctuation = edit(inline$1.punctuation).replace(/punctuation/g, inline$1._punctuation).getRegex();
inline._punctuation = '!"#$%&\'()+\\-.,/:;<=>?@\\[\\]`^{|}~';
inline.punctuation = edit(inline.punctuation).replace(/punctuation/g, inline._punctuation).getRegex();
// sequences em should skip over [title](link), `code`, <html>
inline$1.blockSkip = /\[[^\]]*?\]\([^\)]*?\)|`[^`]*?`|<[^>]*?>/g;
inline$1.escapedEmSt = /\\\*|\\_/g;
inline.blockSkip = /\[[^\]]*?\]\([^\)]*?\)|`[^`]*?`|<[^>]*?>/g;
inline.escapedEmSt = /\\\*|\\_/g;
inline$1._comment = edit(block$1._comment).replace('(?:-->|$)', '-->').getRegex();
inline._comment = edit(block._comment).replace('(?:-->|$)', '-->').getRegex();
inline$1.emStrong.lDelim = edit(inline$1.emStrong.lDelim)
.replace(/punct/g, inline$1._punctuation)
inline.emStrong.lDelim = edit(inline.emStrong.lDelim)
.replace(/punct/g, inline._punctuation)
.getRegex();
inline$1.emStrong.rDelimAst = edit(inline$1.emStrong.rDelimAst, 'g')
.replace(/punct/g, inline$1._punctuation)
inline.emStrong.rDelimAst = edit(inline.emStrong.rDelimAst, 'g')
.replace(/punct/g, inline._punctuation)
.getRegex();
inline$1.emStrong.rDelimUnd = edit(inline$1.emStrong.rDelimUnd, 'g')
.replace(/punct/g, inline$1._punctuation)
inline.emStrong.rDelimUnd = edit(inline.emStrong.rDelimUnd, 'g')
.replace(/punct/g, inline._punctuation)
.getRegex();
inline$1._escapes = /\\([!"#$%&'()*+,\-./:;<=>?@\[\]\\^_`{|}~])/g;
inline._escapes = /\\([!"#$%&'()*+,\-./:;<=>?@\[\]\\^_`{|}~])/g;
inline$1._scheme = /[a-zA-Z][a-zA-Z0-9+.-]{1,31}/;
inline$1._email = /[a-zA-Z0-9.!#$%&'*+/=?^_`{|}~-]+(@)[a-zA-Z0-9](?:[a-zA-Z0-9-]{0,61}[a-zA-Z0-9])?(?:\.[a-zA-Z0-9](?:[a-zA-Z0-9-]{0,61}[a-zA-Z0-9])?)+(?![-_])/;
inline$1.autolink = edit(inline$1.autolink)
.replace('scheme', inline$1._scheme)
.replace('email', inline$1._email)
inline._scheme = /[a-zA-Z][a-zA-Z0-9+.-]{1,31}/;
inline._email = /[a-zA-Z0-9.!#$%&'*+/=?^_`{|}~-]+(@)[a-zA-Z0-9](?:[a-zA-Z0-9-]{0,61}[a-zA-Z0-9])?(?:\.[a-zA-Z0-9](?:[a-zA-Z0-9-]{0,61}[a-zA-Z0-9])?)+(?![-_])/;
inline.autolink = edit(inline.autolink)
.replace('scheme', inline._scheme)
.replace('email', inline._email)
.getRegex();
inline$1._attribute = /\s+[a-zA-Z:_][\w.:-]*(?:\s*=\s*"[^"]*"|\s*=\s*'[^']*'|\s*=\s*[^\s"'=<>`]+)?/;
inline._attribute = /\s+[a-zA-Z:_][\w.:-]*(?:\s*=\s*"[^"]*"|\s*=\s*'[^']*'|\s*=\s*[^\s"'=<>`]+)?/;
inline$1.tag = edit(inline$1.tag)
.replace('comment', inline$1._comment)
.replace('attribute', inline$1._attribute)
inline.tag = edit(inline.tag)
.replace('comment', inline._comment)
.replace('attribute', inline._attribute)
.getRegex();
inline$1._label = /(?:\[(?:\\.|[^\[\]\\])*\]|\\.|`[^`]*`|[^\[\]\\`])*?/;
inline$1._href = /<(?:\\.|[^\n<>\\])+>|[^\s\x00-\x1f]*/;
inline$1._title = /"(?:\\"?|[^"\\])*"|'(?:\\'?|[^'\\])*'|\((?:\\\)?|[^)\\])*\)/;
inline._label = /(?:\[(?:\\.|[^\[\]\\])*\]|\\.|`[^`]*`|[^\[\]\\`])*?/;
inline._href = /<(?:\\.|[^\n<>\\])+>|[^\s\x00-\x1f]*/;
inline._title = /"(?:\\"?|[^"\\])*"|'(?:\\'?|[^'\\])*'|\((?:\\\)?|[^)\\])*\)/;
inline$1.link = edit(inline$1.link)
.replace('label', inline$1._label)
.replace('href', inline$1._href)
.replace('title', inline$1._title)
inline.link = edit(inline.link)
.replace('label', inline._label)
.replace('href', inline._href)
.replace('title', inline._title)
.getRegex();
inline$1.reflink = edit(inline$1.reflink)
.replace('label', inline$1._label)
inline.reflink = edit(inline.reflink)
.replace('label', inline._label)
.getRegex();
inline$1.reflinkSearch = edit(inline$1.reflinkSearch, 'g')
.replace('reflink', inline$1.reflink)
.replace('nolink', inline$1.nolink)
inline.reflinkSearch = edit(inline.reflinkSearch, 'g')
.replace('reflink', inline.reflink)
.replace('nolink', inline.nolink)
.getRegex();
/**
* Normal Inline Grammar
*/
inline$1.normal = merge$1({}, inline$1);
inline.normal = merge({}, inline);
/**
* Pedantic Inline Grammar
*/
inline$1.pedantic = merge$1({}, inline$1.normal, {
inline.pedantic = merge({}, inline.normal, {
strong: {
start: /^__|\*\*/,
middle: /^__(?=\S)([\s\S]*?\S)__(?!_)|^\*\*(?=\S)([\s\S]*?\S)\*\*(?!\*)/,
@ -1320,10 +1282,10 @@ inline$1.pedantic = merge$1({}, inline$1.normal, {
endUnd: /_(?!_)/g
},
link: edit(/^!?\[(label)\]\((.*?)\)/)
.replace('label', inline$1._label)
.replace('label', inline._label)
.getRegex(),
reflink: edit(/^!?\[(label)\]\s*\[([^\]]*)\]/)
.replace('label', inline$1._label)
.replace('label', inline._label)
.getRegex()
});
@ -1331,8 +1293,8 @@ inline$1.pedantic = merge$1({}, inline$1.normal, {
* GFM Inline Grammar
*/
inline$1.gfm = merge$1({}, inline$1.normal, {
escape: edit(inline$1.escape).replace('])', '~|])').getRegex(),
inline.gfm = merge({}, inline.normal, {
escape: edit(inline.escape).replace('])', '~|])').getRegex(),
_extended_email: /[A-Za-z0-9._+-]+(@)[a-zA-Z0-9-_]+(?:\.[a-zA-Z0-9-_]*[a-zA-Z0-9])+(?![-_])/,
url: /^((?:ftp|https?):\/\/|www\.)(?:[a-zA-Z0-9\-]+\.?)+[^\s<]*|^email/,
_backpedal: /(?:[^?!.,:;*_~()&]+|\([^)]*\)|&(?![a-zA-Z0-9]+;$)|[?!.,:;*_~)]+(?!$))+/,
@ -1340,31 +1302,21 @@ inline$1.gfm = merge$1({}, inline$1.normal, {
text: /^([`~]+|[^`~])(?:(?= {2,}\n)|(?=[a-zA-Z0-9.!#$%&'*+\/=?_`{\|}~-]+@)|[\s\S]*?(?:(?=[\\<!\[`*~_]|\b_|https?:\/\/|ftp:\/\/|www\.|$)|[^ ](?= {2,}\n)|[^a-zA-Z0-9.!#$%&'*+\/=?_`{\|}~-](?=[a-zA-Z0-9.!#$%&'*+\/=?_`{\|}~-]+@)))/
});
inline$1.gfm.url = edit(inline$1.gfm.url, 'i')
.replace('email', inline$1.gfm._extended_email)
inline.gfm.url = edit(inline.gfm.url, 'i')
.replace('email', inline.gfm._extended_email)
.getRegex();
/**
* GFM + Line Breaks Inline Grammar
*/
inline$1.breaks = merge$1({}, inline$1.gfm, {
br: edit(inline$1.br).replace('{2,}', '*').getRegex(),
text: edit(inline$1.gfm.text)
inline.breaks = merge({}, inline.gfm, {
br: edit(inline.br).replace('{2,}', '*').getRegex(),
text: edit(inline.gfm.text)
.replace('\\b_', '\\b_| {2,}\\n')
.replace(/\{2,\}/g, '*')
.getRegex()
});
var rules = {
block: block$1,
inline: inline$1
};
const Tokenizer$2 = Tokenizer_1$1;
const { defaults: defaults$3 } = defaults$5.exports;
const { block, inline } = rules;
const { repeatString } = helpers;
/**
* smartypants text replacement
*/
@ -1409,12 +1361,12 @@ function mangle(text) {
/**
* Block Lexer
*/
var Lexer_1$1 = class Lexer {
class Lexer {
constructor(options) {
this.tokens = [];
this.tokens.links = Object.create(null);
this.options = options || defaults$3;
this.options.tokenizer = this.options.tokenizer || new Tokenizer$2();
this.options = options || defaults;
this.options.tokenizer = this.options.tokenizer || new Tokenizer();
this.tokenizer = this.options.tokenizer;
this.tokenizer.options = this.options;
this.tokenizer.lexer = this;
@ -1850,20 +1802,14 @@ var Lexer_1$1 = class Lexer {
return tokens;
}
};
const { defaults: defaults$2 } = defaults$5.exports;
const {
cleanUrl,
escape: escape$1
} = helpers;
}
/**
* Renderer
*/
var Renderer_1$1 = class Renderer {
class Renderer {
constructor(options) {
this.options = options || defaults$2;
this.options = options || defaults;
}
code(code, infostring, escaped) {
@ -1880,15 +1826,15 @@ var Renderer_1$1 = class Renderer {
if (!lang) {
return '<pre><code>'
+ (escaped ? code : escape$1(code, true))
+ (escaped ? code : escape(code, true))
+ '</code></pre>\n';
}
return '<pre><code class="'
+ this.options.langPrefix
+ escape$1(lang, true)
+ escape(lang, true)
+ '">'
+ (escaped ? code : escape$1(code, true))
+ (escaped ? code : escape(code, true))
+ '</code></pre>\n';
}
@ -1992,7 +1938,7 @@ var Renderer_1$1 = class Renderer {
if (href === null) {
return text;
}
let out = '<a href="' + escape$1(href) + '"';
let out = '<a href="' + escape(href) + '"';
if (title) {
out += ' title="' + title + '"';
}
@ -2017,14 +1963,13 @@ var Renderer_1$1 = class Renderer {
text(text) {
return text;
}
};
}
/**
* TextRenderer
* returns only the textual part of the token
*/
var TextRenderer_1$1 = class TextRenderer {
class TextRenderer {
// no need for block level renderers
strong(text) {
return text;
@ -2061,13 +2006,12 @@ var TextRenderer_1$1 = class TextRenderer {
br() {
return '';
}
};
}
/**
* Slugger generates header id
*/
var Slugger_1$1 = class Slugger {
class Slugger {
constructor() {
this.seen = {};
}
@ -2112,27 +2056,19 @@ var Slugger_1$1 = class Slugger {
const slug = this.serialize(value);
return this.getNextSafeSlug(slug, options.dryrun);
}
};
const Renderer$2 = Renderer_1$1;
const TextRenderer$2 = TextRenderer_1$1;
const Slugger$2 = Slugger_1$1;
const { defaults: defaults$1 } = defaults$5.exports;
const {
unescape
} = helpers;
}
/**
* Parsing & Compiling
*/
var Parser_1$1 = class Parser {
class Parser {
constructor(options) {
this.options = options || defaults$1;
this.options.renderer = this.options.renderer || new Renderer$2();
this.options = options || defaults;
this.options.renderer = this.options.renderer || new Renderer();
this.renderer = this.options.renderer;
this.renderer.options = this.options;
this.textRenderer = new TextRenderer$2();
this.slugger = new Slugger$2();
this.textRenderer = new TextRenderer();
this.slugger = new Slugger();
}
/**
@ -2399,29 +2335,12 @@ var Parser_1$1 = class Parser {
}
return out;
}
};
const Lexer$1 = Lexer_1$1;
const Parser$1 = Parser_1$1;
const Tokenizer$1 = Tokenizer_1$1;
const Renderer$1 = Renderer_1$1;
const TextRenderer$1 = TextRenderer_1$1;
const Slugger$1 = Slugger_1$1;
const {
merge,
checkSanitizeDeprecation,
escape
} = helpers;
const {
getDefaults,
changeDefaults,
defaults
} = defaults$5.exports;
}
/**
* Marked
*/
function marked$1(src, opt, callback) {
function marked(src, opt, callback) {
// throw error in case of non string input
if (typeof src === 'undefined' || src === null) {
throw new Error('marked(): input parameter is undefined or null');
@ -2436,7 +2355,7 @@ function marked$1(src, opt, callback) {
opt = null;
}
opt = merge({}, marked$1.defaults, opt || {});
opt = merge({}, marked.defaults, opt || {});
checkSanitizeDeprecation(opt);
if (callback) {
@ -2444,7 +2363,7 @@ function marked$1(src, opt, callback) {
let tokens;
try {
tokens = Lexer$1.lex(src, opt);
tokens = Lexer.lex(src, opt);
} catch (e) {
return callback(e);
}
@ -2455,9 +2374,9 @@ function marked$1(src, opt, callback) {
if (!err) {
try {
if (opt.walkTokens) {
marked$1.walkTokens(tokens, opt.walkTokens);
marked.walkTokens(tokens, opt.walkTokens);
}
out = Parser$1.parse(tokens, opt);
out = Parser.parse(tokens, opt);
} catch (e) {
err = e;
}
@ -2479,7 +2398,7 @@ function marked$1(src, opt, callback) {
if (!tokens.length) return done();
let pending = 0;
marked$1.walkTokens(tokens, function(token) {
marked.walkTokens(tokens, function(token) {
if (token.type === 'code') {
pending++;
setTimeout(() => {
@ -2509,11 +2428,11 @@ function marked$1(src, opt, callback) {
}
try {
const tokens = Lexer$1.lex(src, opt);
const tokens = Lexer.lex(src, opt);
if (opt.walkTokens) {
marked$1.walkTokens(tokens, opt.walkTokens);
marked.walkTokens(tokens, opt.walkTokens);
}
return Parser$1.parse(tokens, opt);
return Parser.parse(tokens, opt);
} catch (e) {
e.message += '\nPlease report this to https://github.com/markedjs/marked.';
if (opt.silent) {
@ -2529,24 +2448,24 @@ function marked$1(src, opt, callback) {
* Options
*/
marked$1.options =
marked$1.setOptions = function(opt) {
merge(marked$1.defaults, opt);
changeDefaults(marked$1.defaults);
return marked$1;
marked.options =
marked.setOptions = function(opt) {
merge(marked.defaults, opt);
changeDefaults(marked.defaults);
return marked;
};
marked$1.getDefaults = getDefaults;
marked.getDefaults = getDefaults;
marked$1.defaults = defaults;
marked.defaults = defaults;
/**
* Use Extension
*/
marked$1.use = function(...args) {
marked.use = function(...args) {
const opts = merge({}, ...args);
const extensions = marked$1.defaults.extensions || { renderers: {}, childTokens: {} };
const extensions = marked.defaults.extensions || { renderers: {}, childTokens: {} };
let hasExtensions;
args.forEach((pack) => {
@ -2605,7 +2524,7 @@ marked$1.use = function(...args) {
// ==-- Parse "overwrite" extensions --== //
if (pack.renderer) {
const renderer = marked$1.defaults.renderer || new Renderer$1();
const renderer = marked.defaults.renderer || new Renderer();
for (const prop in pack.renderer) {
const prevRenderer = renderer[prop];
// Replace renderer with func to run extension, but fall back if false
@ -2620,7 +2539,7 @@ marked$1.use = function(...args) {
opts.renderer = renderer;
}
if (pack.tokenizer) {
const tokenizer = marked$1.defaults.tokenizer || new Tokenizer$1();
const tokenizer = marked.defaults.tokenizer || new Tokenizer();
for (const prop in pack.tokenizer) {
const prevTokenizer = tokenizer[prop];
// Replace tokenizer with func to run extension, but fall back if false
@ -2637,7 +2556,7 @@ marked$1.use = function(...args) {
// ==-- Parse WalkTokens extensions --== //
if (pack.walkTokens) {
const walkTokens = marked$1.defaults.walkTokens;
const walkTokens = marked.defaults.walkTokens;
opts.walkTokens = function(token) {
pack.walkTokens.call(this, token);
if (walkTokens) {
@ -2650,7 +2569,7 @@ marked$1.use = function(...args) {
opts.extensions = extensions;
}
marked$1.setOptions(opts);
marked.setOptions(opts);
});
};
@ -2658,32 +2577,32 @@ marked$1.use = function(...args) {
* Run callback for every token
*/
marked$1.walkTokens = function(tokens, callback) {
marked.walkTokens = function(tokens, callback) {
for (const token of tokens) {
callback.call(marked$1, token);
callback.call(marked, token);
switch (token.type) {
case 'table': {
for (const cell of token.header) {
marked$1.walkTokens(cell.tokens, callback);
marked.walkTokens(cell.tokens, callback);
}
for (const row of token.rows) {
for (const cell of row) {
marked$1.walkTokens(cell.tokens, callback);
marked.walkTokens(cell.tokens, callback);
}
}
break;
}
case 'list': {
marked$1.walkTokens(token.items, callback);
marked.walkTokens(token.items, callback);
break;
}
default: {
if (marked$1.defaults.extensions && marked$1.defaults.extensions.childTokens && marked$1.defaults.extensions.childTokens[token.type]) { // Walk any extensions
marked$1.defaults.extensions.childTokens[token.type].forEach(function(childTokens) {
marked$1.walkTokens(token[childTokens], callback);
if (marked.defaults.extensions && marked.defaults.extensions.childTokens && marked.defaults.extensions.childTokens[token.type]) { // Walk any extensions
marked.defaults.extensions.childTokens[token.type].forEach(function(childTokens) {
marked.walkTokens(token[childTokens], callback);
});
} else if (token.tokens) {
marked$1.walkTokens(token.tokens, callback);
marked.walkTokens(token.tokens, callback);
}
}
}
@ -2693,7 +2612,7 @@ marked$1.walkTokens = function(tokens, callback) {
/**
* Parse Inline
*/
marked$1.parseInline = function(src, opt) {
marked.parseInline = function(src, opt) {
// throw error in case of non string input
if (typeof src === 'undefined' || src === null) {
throw new Error('marked.parseInline(): input parameter is undefined or null');
@ -2703,15 +2622,15 @@ marked$1.parseInline = function(src, opt) {
+ Object.prototype.toString.call(src) + ', string expected');
}
opt = merge({}, marked$1.defaults, opt || {});
opt = merge({}, marked.defaults, opt || {});
checkSanitizeDeprecation(opt);
try {
const tokens = Lexer$1.lexInline(src, opt);
const tokens = Lexer.lexInline(src, opt);
if (opt.walkTokens) {
marked$1.walkTokens(tokens, opt.walkTokens);
marked.walkTokens(tokens, opt.walkTokens);
}
return Parser$1.parseInline(tokens, opt);
return Parser.parseInline(tokens, opt);
} catch (e) {
e.message += '\nPlease report this to https://github.com/markedjs/marked.';
if (opt.silent) {
@ -2726,37 +2645,23 @@ marked$1.parseInline = function(src, opt) {
/**
* Expose
*/
marked$1.Parser = Parser$1;
marked$1.parser = Parser$1.parse;
marked$1.Renderer = Renderer$1;
marked$1.TextRenderer = TextRenderer$1;
marked$1.Lexer = Lexer$1;
marked$1.lexer = Lexer$1.lex;
marked$1.Tokenizer = Tokenizer$1;
marked$1.Slugger = Slugger$1;
marked$1.parse = marked$1;
marked.Parser = Parser;
marked.parser = Parser.parse;
marked.Renderer = Renderer;
marked.TextRenderer = TextRenderer;
marked.Lexer = Lexer;
marked.lexer = Lexer.lex;
marked.Tokenizer = Tokenizer;
marked.Slugger = Slugger;
marked.parse = marked;
var marked_1 = marked$1;
const options = marked.options;
const setOptions = marked.setOptions;
const use = marked.use;
const walkTokens = marked.walkTokens;
const parseInline = marked.parseInline;
const parse = marked;
const parser = Parser.parse;
const lexer = Lexer.lex;
const marked = marked_1;
const Lexer = Lexer_1$1;
const Parser = Parser_1$1;
const Tokenizer = Tokenizer_1$1;
const Renderer = Renderer_1$1;
const TextRenderer = TextRenderer_1$1;
const Slugger = Slugger_1$1;
esmEntry$1.exports = marked;
var parse = esmEntry$1.exports.parse = marked;
var Parser_1 = esmEntry$1.exports.Parser = Parser;
var parser = esmEntry$1.exports.parser = Parser.parse;
var Renderer_1 = esmEntry$1.exports.Renderer = Renderer;
var TextRenderer_1 = esmEntry$1.exports.TextRenderer = TextRenderer;
var Lexer_1 = esmEntry$1.exports.Lexer = Lexer;
var lexer = esmEntry$1.exports.lexer = Lexer.lex;
var Tokenizer_1 = esmEntry$1.exports.Tokenizer = Tokenizer;
var Slugger_1 = esmEntry$1.exports.Slugger = Slugger;
var esmEntry = esmEntry$1.exports;
export { Lexer_1 as Lexer, Parser_1 as Parser, Renderer_1 as Renderer, Slugger_1 as Slugger, TextRenderer_1 as TextRenderer, Tokenizer_1 as Tokenizer, esmEntry as default, lexer, parse, parser };
export { Lexer, Parser, Renderer, Slugger, TextRenderer, Tokenizer, defaults, getDefaults, lexer, marked, options, parse, parseInline, parser, setOptions, use, walkTokens };

File diff suppressed because one or more lines are too long

View File

@ -6,7 +6,7 @@ marked \- a javascript markdown parser
.SH SYNOPSIS
.B marked
[\-o \fI<output>\fP] [\-i \fI<input>\fP] [\-\-help]
[\-o \fI<output>\fP] [\-i \fI<input>\fP] [\-s \fI<string>\fP] [\-\-help]
[\-\-tokens] [\-\-pedantic] [\-\-gfm]
[\-\-breaks] [\-\-sanitize]
[\-\-smart\-lists] [\-\-lang\-prefix \fI<prefix>\fP]
@ -36,28 +36,8 @@ Specify file output. If none is specified, write to stdout.
Specify file input, otherwise use last argument as input file.
If no input file is specified, read from stdin.
.TP
.BI \-\-test
Makes sure the test(s) pass.
.RS
.PP
.B \-\-glob [\fIfile\fP]
Specify which test to use.
.PP
.B \-\-fix
Fixes tests.
.PP
.B \-\-bench
Benchmarks the test(s).
.PP
.B \-\-time
Times The test(s).
.PP
.B \-\-minified
Runs test file(s) as minified.
.PP
.B \-\-stop
Stop process if a test fails.
.RE
.BI \-s,\ \-\-string\ [\fIstring\fP]
Specify string input instead of a file.
.TP
.BI \-t,\ \-\-tokens
Output a token stream instead of html.
@ -98,7 +78,8 @@ For configuring and running programmatically.
.B Example
require('marked')('*foo*', { gfm: true });
import { marked } from 'marked';
marked('*foo*', { gfm: true });
.SH BUGS
Please report any bugs to https://github.com/markedjs/marked.

View File

@ -4,9 +4,9 @@ NAME
marked - a javascript markdown parser
SYNOPSIS
marked [-o <output>] [-i <input>] [--help] [--tokens] [--pedantic]
[--gfm] [--breaks] [--sanitize] [--smart-lists] [--lang-prefix <pre-
fix>] [--no-etc...] [--silent] [filename]
marked [-o <output>] [-i <input>] [-s <string>] [--help] [--tokens]
[--pedantic] [--gfm] [--breaks] [--sanitize] [--smart-lists]
[--lang-prefix <prefix>] [--no-etc...] [--silent] [filename]
DESCRIPTION
@ -24,65 +24,55 @@ EXAMPLES
OPTIONS
-o, --output [output]
Specify file output. If none is specified, write to stdout.
Specify file output. If none is specified, write to stdout.
-i, --input [input]
Specify file input, otherwise use last argument as input file.
If no input file is specified, read from stdin.
Specify file input, otherwise use last argument as input file.
If no input file is specified, read from stdin.
--test Makes sure the test(s) pass.
--glob [file] Specify which test to use.
--fix Fixes tests.
--bench Benchmarks the test(s).
--time Times The test(s).
--minified Runs test file(s) as minified.
--stop Stop process if a test fails.
-s, --string [string]
Specify string input instead of a file.
-t, --tokens
Output a token stream instead of html.
Output a token stream instead of html.
--pedantic
Conform to obscure parts of markdown.pl as much as possible.
Don't fix original markdown bugs.
Conform to obscure parts of markdown.pl as much as possible.
Don't fix original markdown bugs.
--gfm Enable github flavored markdown.
--breaks
Enable GFM line breaks. Only works with the gfm option.
Enable GFM line breaks. Only works with the gfm option.
--sanitize
Sanitize output. Ignore any HTML input.
Sanitize output. Ignore any HTML input.
--smart-lists
Use smarter list behavior than the original markdown.
Use smarter list behavior than the original markdown.
--lang-prefix [prefix]
Set the prefix for code block classes.
Set the prefix for code block classes.
--mangle
Mangle email addresses.
Mangle email addresses.
--no-sanitize, -no-etc...
The inverse of any of the marked options above.
The inverse of any of the marked options above.
--silent
Silence error output.
Silence error output.
-h, --help
Display help information.
Display help information.
CONFIGURATION
For configuring and running programmatically.
Example
require('marked')('*foo*', { gfm: true });
import { marked } from 'marked';
marked('*foo*', { gfm: true });
BUGS
Please report any bugs to https://github.com/markedjs/marked.

2
marked.min.js vendored

File diff suppressed because one or more lines are too long

5009
package-lock.json generated

File diff suppressed because it is too large Load Diff

View File

@ -3,11 +3,12 @@
"description": "A markdown parser built for speed",
"author": "Christopher Jeffrey",
"version": "3.0.8",
"main": "./src/marked.js",
"type": "module",
"main": "./lib/marked.esm.js",
"module": "./lib/marked.esm.js",
"browser": "./lib/marked.js",
"browser": "./lib/marked.cjs",
"bin": {
"marked": "bin/marked"
"marked": "bin/marked.js"
},
"man": "./man/marked.1",
"files": [
@ -17,6 +18,13 @@
"man/",
"marked.min.js"
],
"exports": {
".": {
"import": "./lib/marked.esm.js",
"default": "./lib/marked.cjs"
},
"./package.json": "./package.json"
},
"repository": "git://github.com/markedjs/marked.git",
"homepage": "https://marked.js.org",
"bugs": {
@ -68,19 +76,19 @@
"test:all": "npm test && npm run test:lint",
"test:unit": "npm test -- test/unit/**/*-spec.js",
"test:specs": "npm test -- test/specs/**/*-spec.js",
"test:lint": "eslint bin/marked .",
"test:lint": "eslint .",
"test:redos": "node test/vuln-regex.js",
"test:update": "node test/update-specs.js",
"rules": "node test/rules.js",
"bench": "npm run rollup && node test/bench.js",
"lint": "eslint --fix bin/marked .",
"build:reset": "git checkout upstream/master lib/marked.js lib/marked.esm.js marked.min.js",
"lint": "eslint --fix .",
"build:reset": "git checkout upstream/master lib/marked.cjs lib/marked.esm.js marked.min.js",
"build": "npm run rollup && npm run minify",
"build:docs": "node build-docs.js",
"rollup": "npm run rollup:umd && npm run rollup:esm",
"rollup:umd": "rollup -c rollup.config.js",
"rollup:esm": "rollup -c rollup.config.esm.js",
"minify": "uglifyjs lib/marked.js -cm --comments /Copyright/ -o marked.min.js",
"minify": "uglifyjs lib/marked.cjs -cm --comments /Copyright/ -o marked.min.js",
"preversion": "npm run build && (git diff --quiet || git commit -am build)"
},
"engines": {

View File

@ -2,7 +2,7 @@ const commonjs = require('@rollup/plugin-commonjs');
const license = require('rollup-plugin-license');
module.exports = {
input: 'src/esm-entry.js',
input: 'src/marked.js',
output: {
file: 'lib/marked.esm.js',
format: 'esm'

View File

@ -5,10 +5,9 @@ const license = require('rollup-plugin-license');
module.exports = {
input: 'src/marked.js',
output: {
file: 'lib/marked.js',
file: 'lib/marked.cjs',
format: 'umd',
name: 'marked',
exports: 'default'
name: 'marked'
},
plugins: [
license({

View File

@ -1,7 +1,7 @@
const Tokenizer = require('./Tokenizer.js');
const { defaults } = require('./defaults.js');
const { block, inline } = require('./rules.js');
const { repeatString } = require('./helpers.js');
import { Tokenizer } from './Tokenizer.js';
import { defaults } from './defaults.js';
import { block, inline } from './rules.js';
import { repeatString } from './helpers.js';
/**
* smartypants text replacement
@ -47,7 +47,7 @@ function mangle(text) {
/**
* Block Lexer
*/
module.exports = class Lexer {
export class Lexer {
constructor(options) {
this.tokens = [];
this.tokens.links = Object.create(null);
@ -488,4 +488,4 @@ module.exports = class Lexer {
return tokens;
}
};
}

View File

@ -1,15 +1,15 @@
const Renderer = require('./Renderer.js');
const TextRenderer = require('./TextRenderer.js');
const Slugger = require('./Slugger.js');
const { defaults } = require('./defaults.js');
const {
import { Renderer } from './Renderer.js';
import { TextRenderer } from './TextRenderer.js';
import { Slugger } from './Slugger.js';
import { defaults } from './defaults.js';
import {
unescape
} = require('./helpers.js');
} from './helpers.js';
/**
* Parsing & Compiling
*/
module.exports = class Parser {
export class Parser {
constructor(options) {
this.options = options || defaults;
this.options.renderer = this.options.renderer || new Renderer();
@ -283,4 +283,4 @@ module.exports = class Parser {
}
return out;
}
};
}

View File

@ -1,13 +1,13 @@
const { defaults } = require('./defaults.js');
const {
import { defaults } from './defaults.js';
import {
cleanUrl,
escape
} = require('./helpers.js');
} from './helpers.js';
/**
* Renderer
*/
module.exports = class Renderer {
export class Renderer {
constructor(options) {
this.options = options || defaults;
}
@ -163,4 +163,4 @@ module.exports = class Renderer {
text(text) {
return text;
}
};
}

View File

@ -1,7 +1,7 @@
/**
* Slugger generates header id
*/
module.exports = class Slugger {
export class Slugger {
constructor() {
this.seen = {};
}
@ -46,4 +46,4 @@ module.exports = class Slugger {
const slug = this.serialize(value);
return this.getNextSafeSlug(slug, options.dryrun);
}
};
}

View File

@ -2,7 +2,7 @@
* TextRenderer
* returns only the textual part of the token
*/
module.exports = class TextRenderer {
export class TextRenderer {
// no need for block level renderers
strong(text) {
return text;
@ -39,4 +39,4 @@ module.exports = class TextRenderer {
br() {
return '';
}
};
}

View File

@ -1,10 +1,10 @@
const { defaults } = require('./defaults.js');
const {
import { defaults } from './defaults.js';
import {
rtrim,
splitCells,
escape,
findClosingBracket
} = require('./helpers.js');
} from './helpers.js';
function outputLink(cap, link, raw, lexer) {
const href = link.href;
@ -65,7 +65,7 @@ function indentCodeCompensation(raw, text) {
/**
* Tokenizer
*/
module.exports = class Tokenizer {
export class Tokenizer {
constructor(options) {
this.options = options || defaults;
}
@ -752,4 +752,4 @@ module.exports = class Tokenizer {
};
}
}
};
}

View File

@ -1,4 +1,4 @@
function getDefaults() {
export function getDefaults() {
return {
baseUrl: null,
breaks: false,
@ -22,12 +22,8 @@ function getDefaults() {
};
}
function changeDefaults(newDefaults) {
module.exports.defaults = newDefaults;
}
export let defaults = getDefaults();
module.exports = {
defaults: getDefaults(),
getDefaults,
changeDefaults
};
export function changeDefaults(newDefaults) {
defaults = newDefaults;
}

View File

@ -1,18 +0,0 @@
const marked = require('./marked.js');
const Lexer = require('./Lexer.js');
const Parser = require('./Parser.js');
const Tokenizer = require('./Tokenizer.js');
const Renderer = require('./Renderer.js');
const TextRenderer = require('./TextRenderer.js');
const Slugger = require('./Slugger.js');
module.exports = marked;
module.exports.parse = marked;
module.exports.Parser = Parser;
module.exports.parser = Parser.parse;
module.exports.Renderer = Renderer;
module.exports.TextRenderer = TextRenderer;
module.exports.Lexer = Lexer;
module.exports.lexer = Lexer.lex;
module.exports.Tokenizer = Tokenizer;
module.exports.Slugger = Slugger;

View File

@ -13,7 +13,7 @@ const escapeReplacements = {
"'": '&#39;'
};
const getEscapeReplacement = (ch) => escapeReplacements[ch];
function escape(html, encode) {
export function escape(html, encode) {
if (encode) {
if (escapeTest.test(html)) {
return html.replace(escapeReplace, getEscapeReplacement);
@ -29,7 +29,7 @@ function escape(html, encode) {
const unescapeTest = /&(#(?:\d+)|(?:#x[0-9A-Fa-f]+)|(?:\w+));?/ig;
function unescape(html) {
export function unescape(html) {
// explicitly match decimal, hex, and named HTML entities
return html.replace(unescapeTest, (_, n) => {
n = n.toLowerCase();
@ -44,7 +44,7 @@ function unescape(html) {
}
const caret = /(^|[^\[])\^/g;
function edit(regex, opt) {
export function edit(regex, opt) {
regex = regex.source || regex;
opt = opt || '';
const obj = {
@ -63,7 +63,7 @@ function edit(regex, opt) {
const nonWordAndColonTest = /[^\w:]/g;
const originIndependentUrl = /^$|^[a-z][a-z0-9+.-]*:|^[?#]/i;
function cleanUrl(sanitize, base, href) {
export function cleanUrl(sanitize, base, href) {
if (sanitize) {
let prot;
try {
@ -93,7 +93,7 @@ const justDomain = /^[^:]+:\/*[^/]*$/;
const protocol = /^([^:]+:)[\s\S]*$/;
const domain = /^([^:]+:\/*[^/]*)[\s\S]*$/;
function resolveUrl(base, href) {
export function resolveUrl(base, href) {
if (!baseUrls[' ' + base]) {
// we can ignore everything in base after the last slash of its path component,
// but we might need to add _that_
@ -122,9 +122,9 @@ function resolveUrl(base, href) {
}
}
const noopTest = { exec: function noopTest() {} };
export const noopTest = { exec: function noopTest() {} };
function merge(obj) {
export function merge(obj) {
let i = 1,
target,
key;
@ -141,7 +141,7 @@ function merge(obj) {
return obj;
}
function splitCells(tableRow, count) {
export function splitCells(tableRow, count) {
// ensure that every cell-delimiting pipe has a space
// before it to distinguish it from an escaped pipe
const row = tableRow.replace(/\|/g, (match, offset, str) => {
@ -180,7 +180,7 @@ function splitCells(tableRow, count) {
// Remove trailing 'c's. Equivalent to str.replace(/c*$/, '').
// /c*$/ is vulnerable to REDOS.
// invert: Remove suffix of non-c chars instead. Default falsey.
function rtrim(str, c, invert) {
export function rtrim(str, c, invert) {
const l = str.length;
if (l === 0) {
return '';
@ -204,7 +204,7 @@ function rtrim(str, c, invert) {
return str.substr(0, l - suffLen);
}
function findClosingBracket(str, b) {
export function findClosingBracket(str, b) {
if (str.indexOf(b[1]) === -1) {
return -1;
}
@ -226,14 +226,14 @@ function findClosingBracket(str, b) {
return -1;
}
function checkSanitizeDeprecation(opt) {
export function checkSanitizeDeprecation(opt) {
if (opt && opt.sanitize && !opt.silent) {
console.warn('marked(): sanitize and sanitizer parameters are deprecated since version 0.7.0, should not be used and will be removed in the future. Read more here: https://marked.js.org/#/USING_ADVANCED.md#options');
}
}
// copied from https://stackoverflow.com/a/5450113/806777
function repeatString(pattern, count) {
export function repeatString(pattern, count) {
if (count < 1) {
return '';
}
@ -247,18 +247,3 @@ function repeatString(pattern, count) {
}
return result + pattern;
}
module.exports = {
escape,
unescape,
edit,
cleanUrl,
resolveUrl,
noopTest,
merge,
splitCells,
rtrim,
findClosingBracket,
checkSanitizeDeprecation,
repeatString
};

View File

@ -1,24 +1,24 @@
const Lexer = require('./Lexer.js');
const Parser = require('./Parser.js');
const Tokenizer = require('./Tokenizer.js');
const Renderer = require('./Renderer.js');
const TextRenderer = require('./TextRenderer.js');
const Slugger = require('./Slugger.js');
const {
import { Lexer } from './Lexer.js';
import { Parser } from './Parser.js';
import { Tokenizer } from './Tokenizer.js';
import { Renderer } from './Renderer.js';
import { TextRenderer } from './TextRenderer.js';
import { Slugger } from './Slugger.js';
import {
merge,
checkSanitizeDeprecation,
escape
} = require('./helpers.js');
const {
} from './helpers.js';
import {
getDefaults,
changeDefaults,
defaults
} = require('./defaults.js');
} from './defaults.js';
/**
* Marked
*/
function marked(src, opt, callback) {
export function marked(src, opt, callback) {
// throw error in case of non string input
if (typeof src === 'undefined' || src === null) {
throw new Error('marked(): input parameter is undefined or null');
@ -333,4 +333,18 @@ marked.Tokenizer = Tokenizer;
marked.Slugger = Slugger;
marked.parse = marked;
module.exports = marked;
export const options = marked.options;
export const setOptions = marked.setOptions;
export const use = marked.use;
export const walkTokens = marked.walkTokens;
export const parseInline = marked.parseInline;
export const parse = marked;
export const parser = Parser.parse;
export const lexer = Lexer.lex;
export { defaults, getDefaults } from './defaults.js';
export { Lexer } from './Lexer.js';
export { Parser } from './Parser.js';
export { Tokenizer } from './Tokenizer.js';
export { Renderer } from './Renderer.js';
export { TextRenderer } from './TextRenderer.js';
export { Slugger } from './Slugger.js';

View File

@ -1,13 +1,13 @@
const {
import {
noopTest,
edit,
merge
} = require('./helpers.js');
} from './helpers.js';
/**
* Block-Level Grammar
*/
const block = {
export const block = {
newline: /^(?: *(?:\n|$))+/,
code: /^( {4}[^\n]+(?:\n(?: *(?:\n|$))*)?)+/,
fences: /^ {0,3}(`{3,}(?=[^`\n]*\n)|~{3,})([^\n]*)\n(?:|([\s\S]*?)\n)(?: {0,3}\1[~`]* *(?=\n|$)|$)/,
@ -139,7 +139,7 @@ block.pedantic = merge({}, block.normal, {
/**
* Inline-Level Grammar
*/
const inline = {
export const inline = {
escape: /^\\([!"#$%&'()*+,\-./:;<=>?@\[\]\\^_`{|}~])/,
autolink: /^<(scheme:[^\s\x00-\x1f<>]*|email)>/,
url: noopTest,
@ -283,8 +283,3 @@ inline.breaks = merge({}, inline.gfm, {
.replace(/\{2,\}/g, '*')
.getRegex()
});
module.exports = {
block,
inline
};

85
test/bench.js vendored
View File

@ -1,15 +1,19 @@
const path = require('path');
const htmlDiffer = require('./helpers/html-differ.js');
const { loadFiles } = require('./helpers/load.js');
import { dirname, resolve } from 'path';
import { fileURLToPath } from 'url';
import { isEqual } from './helpers/html-differ.js';
import { loadFiles } from './helpers/load.js';
let marked = require('../lib/marked.js');
const es6marked = require('../src/marked.js');
import { marked as esmMarked } from '../lib/marked.esm.js';
const __dirname = dirname(fileURLToPath(import.meta.url));
let marked;
/**
* Load specs
*/
function load() {
const dir = path.resolve(__dirname, './specs/commonmark');
export function load() {
const dir = resolve(__dirname, './specs/commonmark');
const sections = loadFiles(dir);
let specs = [];
@ -23,7 +27,7 @@ function load() {
/**
* Run all benchmarks
*/
async function runBench(options) {
export async function runBench(options) {
options = options || {};
const specs = load();
@ -38,9 +42,9 @@ async function runBench(options) {
if (options.marked) {
marked.setOptions(options.marked);
}
await bench('es5 marked', specs, marked);
await bench('cjs marked', specs, marked.parse);
es6marked.setOptions({
esmMarked.setOptions({
gfm: false,
breaks: false,
pedantic: false,
@ -48,9 +52,9 @@ async function runBench(options) {
smartLists: false
});
if (options.marked) {
es6marked.setOptions(options.marked);
esmMarked.setOptions(options.marked);
}
await bench('es6 marked', specs, es6marked);
await bench('esm marked', specs, esmMarked.parse);
// GFM
marked.setOptions({
@ -63,9 +67,9 @@ async function runBench(options) {
if (options.marked) {
marked.setOptions(options.marked);
}
await bench('es5 marked (gfm)', specs, marked);
await bench('cjs marked (gfm)', specs, marked.parse);
es6marked.setOptions({
esmMarked.setOptions({
gfm: true,
breaks: false,
pedantic: false,
@ -73,9 +77,9 @@ async function runBench(options) {
smartLists: false
});
if (options.marked) {
es6marked.setOptions(options.marked);
esmMarked.setOptions(options.marked);
}
await bench('es6 marked (gfm)', specs, es6marked);
await bench('esm marked (gfm)', specs, esmMarked.parse);
// Pedantic
marked.setOptions({
@ -88,9 +92,9 @@ async function runBench(options) {
if (options.marked) {
marked.setOptions(options.marked);
}
await bench('es5 marked (pedantic)', specs, marked);
await bench('cjs marked (pedantic)', specs, marked.parse);
es6marked.setOptions({
esmMarked.setOptions({
gfm: false,
breaks: false,
pedantic: true,
@ -98,35 +102,35 @@ async function runBench(options) {
smartLists: false
});
if (options.marked) {
es6marked.setOptions(options.marked);
esmMarked.setOptions(options.marked);
}
await bench('es6 marked (pedantic)', specs, es6marked);
await bench('esm marked (pedantic)', specs, esmMarked.parse);
try {
await bench('commonmark', specs, (() => {
const commonmark = require('commonmark');
const parser = new commonmark.Parser();
const writer = new commonmark.HtmlRenderer();
await bench('commonmark', specs, (await (async() => {
const { Parser, HtmlRenderer } = await import('commonmark');
const parser = new Parser();
const writer = new HtmlRenderer();
return function(text) {
return writer.render(parser.parse(text));
};
})());
})()));
} catch (e) {
console.error('Could not bench commonmark. (Error: %s)', e.message);
}
try {
await bench('markdown-it', specs, (() => {
const MarkdownIt = require('markdown-it');
await bench('markdown-it', specs, (await (async() => {
const MarkdownIt = (await import('markdown-it')).default;
const md = new MarkdownIt();
return md.render.bind(md);
})());
})()));
} catch (e) {
console.error('Could not bench markdown-it. (Error: %s)', e.message);
}
}
async function bench(name, specs, engine) {
export async function bench(name, specs, engine) {
const before = process.hrtime();
for (let i = 0; i < 1e3; i++) {
for (const spec of specs) {
@ -138,7 +142,7 @@ async function bench(name, specs, engine) {
let correct = 0;
for (const spec of specs) {
if (await htmlDiffer.isEqual(spec.html, await engine(spec.markdown))) {
if (await isEqual(spec.html, await engine(spec.markdown))) {
correct++;
}
}
@ -150,7 +154,7 @@ async function bench(name, specs, engine) {
/**
* A simple one-time benchmark
*/
async function time(options) {
export async function time(options) {
options = options || {};
const specs = load();
if (options.marked) {
@ -252,11 +256,13 @@ function camelize(text) {
/**
* Main
*/
async function main(argv) {
export default async function main(argv) {
marked = (await import('../lib/marked.cjs')).marked;
const opt = parseArg(argv);
if (opt.minified) {
marked = require('../marked.min.js');
marked = (await import('../marked.min.js')).marked;
}
if (opt.time) {
@ -275,14 +281,5 @@ function prettyElapsedTime(hrtimeElapsed) {
return seconds * 1e3 + frac;
}
if (!module.parent) {
process.title = 'marked bench';
main(process.argv.slice());
} else {
module.exports = main;
module.exports.main = main;
module.exports.time = time;
module.exports.runBench = runBench;
module.exports.load = load;
module.exports.bench = bench;
}
process.title = 'marked bench';
main(process.argv.slice());

View File

@ -1,9 +1,9 @@
const marked = require('../../src/marked.js');
const htmlDiffer = require('./html-differ.js');
const assert = require('assert');
import { marked, setOptions, getDefaults } from '../../src/marked.js';
import { isEqual, firstDiff } from './html-differ.js';
import { strictEqual } from 'assert';
beforeEach(() => {
marked.setOptions(marked.getDefaults());
setOptions(getDefaults());
jasmine.addAsyncMatchers({
toRender: () => {
@ -11,12 +11,12 @@ beforeEach(() => {
compare: async(spec, expected) => {
const result = {};
const actual = marked(spec.markdown, spec.options);
result.pass = await htmlDiffer.isEqual(expected, actual);
result.pass = await isEqual(expected, actual);
if (result.pass) {
result.message = `${spec.markdown}\n------\n\nExpected: Should Fail`;
} else {
const diff = await htmlDiffer.firstDiff(actual, expected);
const diff = await firstDiff(actual, expected);
result.message = `Expected: ${diff.expected}\n Actual: ${diff.actual}`;
}
return result;
@ -27,12 +27,12 @@ beforeEach(() => {
return {
compare: async(actual, expected) => {
const result = {};
result.pass = await htmlDiffer.isEqual(expected, actual);
result.pass = await isEqual(expected, actual);
if (result.pass) {
result.message = `Expected '${actual}' not to equal '${expected}'`;
} else {
const diff = await htmlDiffer.firstDiff(actual, expected);
const diff = await firstDiff(actual, expected);
result.message = `Expected: ${diff.expected}\n Actual: ${diff.actual}`;
}
return result;
@ -44,7 +44,7 @@ beforeEach(() => {
const result = {};
const actual = marked(spec.markdown, spec.options);
result.pass = assert.strictEqual(expected, actual) === undefined;
result.pass = strictEqual(expected, actual) === undefined;
return result;
}

View File

@ -1,40 +1,38 @@
const HtmlDiffer = require('@markedjs/html-differ').HtmlDiffer;
import { HtmlDiffer } from '@markedjs/html-differ';
const htmlDiffer = new HtmlDiffer({
ignoreSelfClosingSlash: true,
ignoreComments: false
});
module.exports = {
isEqual: htmlDiffer.isEqual.bind(htmlDiffer),
firstDiff: async(actual, expected, padding) => {
padding = padding || 30;
const diffHtml = await htmlDiffer.diffHtml(actual, expected);
const result = diffHtml.reduce((obj, diff) => {
if (diff.added) {
if (obj.firstIndex === null) {
obj.firstIndex = obj.expected.length;
}
obj.expected += diff.value;
} else if (diff.removed) {
if (obj.firstIndex === null) {
obj.firstIndex = obj.actual.length;
}
obj.actual += diff.value;
} else {
obj.actual += diff.value;
obj.expected += diff.value;
export const isEqual = htmlDiffer.isEqual.bind(htmlDiffer);
export async function firstDiff(actual, expected, padding) {
padding = padding || 30;
const diffHtml = await htmlDiffer.diffHtml(actual, expected);
const result = diffHtml.reduce((obj, diff) => {
if (diff.added) {
if (obj.firstIndex === null) {
obj.firstIndex = obj.expected.length;
}
obj.expected += diff.value;
} else if (diff.removed) {
if (obj.firstIndex === null) {
obj.firstIndex = obj.actual.length;
}
obj.actual += diff.value;
} else {
obj.actual += diff.value;
obj.expected += diff.value;
}
return obj;
}, {
firstIndex: null,
actual: '',
expected: ''
});
return obj;
}, {
firstIndex: null,
actual: '',
expected: ''
});
return {
actual: result.actual.substring(result.firstIndex - padding, result.firstIndex + padding),
expected: result.expected.substring(result.firstIndex - padding, result.firstIndex + padding)
};
}
};
return {
actual: result.actual.substring(result.firstIndex - padding, result.firstIndex + padding),
expected: result.expected.substring(result.firstIndex - padding, result.firstIndex + padding)
};
}

View File

@ -1,8 +1,9 @@
'use strict';
import fs from 'fs';
import path from 'path';
import fm from 'front-matter';
import { createRequire } from 'module';
const fs = require('fs');
const path = require('path');
const fm = require('front-matter');
const require = createRequire(import.meta.url);
function node4Polyfills() {
// https://github.com/uxitten/polyfill/blob/master/string.polyfill.js
@ -45,7 +46,7 @@ function node4Polyfills() {
}
node4Polyfills();
function outputCompletionTable(title, specs) {
export function outputCompletionTable(title, specs) {
let longestName = 0;
let maxSpecs = 0;
@ -67,7 +68,7 @@ function outputCompletionTable(title, specs) {
console.log();
}
function loadFiles(dir) {
export function loadFiles(dir) {
const files = fs.readdirSync(dir);
return files.reduce((obj, file) => {
@ -93,9 +94,14 @@ function loadFiles(dir) {
}];
break;
}
case '.js':
case '.cjs':
case '.json': {
specs = require(absFile);
try {
specs = require(absFile);
} catch (err) {
console.log(`Error loading ${absFile}`);
throw err;
}
if (!Array.isArray(specs)) {
specs = [specs];
}
@ -125,8 +131,3 @@ function loadFiles(dir) {
return obj;
}, {});
}
module.exports = {
outputCompletionTable,
loadFiles
};

2
test/rules.js vendored
View File

@ -1,4 +1,4 @@
const rules = require('../src/rules.js');
import rules from '../src/rules.js';
const COLOR = {
reset: '\x1b[0m',

View File

@ -1,12 +1,15 @@
const path = require('path');
const load = require('../helpers/load.js');
import { dirname, resolve } from 'path';
import { fileURLToPath } from 'url';
import { loadFiles, outputCompletionTable } from '../helpers/load.js';
const __dirname = dirname(fileURLToPath(import.meta.url));
function runSpecs(title, dir, showCompletionTable, options) {
options = options || {};
const specs = load.loadFiles(path.resolve(__dirname, dir));
const specs = loadFiles(resolve(__dirname, dir));
if (showCompletionTable) {
load.outputCompletionTable(title, specs);
outputCompletionTable(title, specs);
}
describe(title, () => {

View File

@ -1,4 +1,4 @@
const Lexer = require('../../src/Lexer.js');
import { Lexer } from '../../src/Lexer.js';
function expectTokens({ md, options, tokens = [], links = {} }) {
const lexer = new Lexer(options);

View File

@ -1,4 +1,4 @@
const Parser = require('../../src/Parser.js');
import { Parser } from '../../src/Parser.js';
async function expectHtml({ tokens, options, html, inline }) {
const parser = new Parser(options);

View File

@ -1,15 +1,15 @@
const marked = require('../../src/marked.js');
import { marked, Renderer, Slugger, lexer, parseInline, use, getDefaults, walkTokens as _walkTokens } from '../../src/marked.js';
describe('Test heading ID functionality', () => {
it('should add id attribute by default', () => {
const renderer = new marked.Renderer();
const slugger = new marked.Slugger();
const renderer = new Renderer();
const slugger = new Slugger();
const header = renderer.heading('test', 1, 'test', slugger);
expect(header).toBe('<h1 id="test">test</h1>\n');
});
it('should NOT add id attribute when options set false', () => {
const renderer = new marked.Renderer({ headerIds: false });
const renderer = new Renderer({ headerIds: false });
const header = renderer.heading('test', 1, 'test');
expect(header).toBe('<h1>test</h1>\n');
});
@ -17,26 +17,26 @@ describe('Test heading ID functionality', () => {
describe('Test slugger functionality', () => {
it('should use lowercase slug', () => {
const slugger = new marked.Slugger();
const slugger = new Slugger();
expect(slugger.slug('Test')).toBe('test');
});
it('should be unique to avoid collisions 1280', () => {
const slugger = new marked.Slugger();
const slugger = new Slugger();
expect(slugger.slug('test')).toBe('test');
expect(slugger.slug('test')).toBe('test-1');
expect(slugger.slug('test')).toBe('test-2');
});
it('should be unique when slug ends with number', () => {
const slugger = new marked.Slugger();
const slugger = new Slugger();
expect(slugger.slug('test 1')).toBe('test-1');
expect(slugger.slug('test')).toBe('test');
expect(slugger.slug('test')).toBe('test-2');
});
it('should be unique when slug ends with hyphen number', () => {
const slugger = new marked.Slugger();
const slugger = new Slugger();
expect(slugger.slug('foo')).toBe('foo');
expect(slugger.slug('foo')).toBe('foo-1');
expect(slugger.slug('foo 1')).toBe('foo-1-1');
@ -45,39 +45,39 @@ describe('Test slugger functionality', () => {
});
it('should allow non-latin chars', () => {
const slugger = new marked.Slugger();
const slugger = new Slugger();
expect(slugger.slug('привет')).toBe('привет');
});
it('should remove ampersands 857', () => {
const slugger = new marked.Slugger();
const slugger = new Slugger();
expect(slugger.slug('This & That Section')).toBe('this--that-section');
});
it('should remove periods', () => {
const slugger = new marked.Slugger();
const slugger = new Slugger();
expect(slugger.slug('file.txt')).toBe('filetxt');
});
it('should remove html tags', () => {
const slugger = new marked.Slugger();
const slugger = new Slugger();
expect(slugger.slug('<em>html</em>')).toBe('html');
});
it('should not increment seen when using dryrun option', () => {
const slugger = new marked.Slugger();
const slugger = new Slugger();
expect(slugger.slug('<h1>This Section</h1>', { dryrun: true })).toBe('this-section');
expect(slugger.slug('<h1>This Section</h1>')).toBe('this-section');
});
it('should still return the next unique id when using dryrun', () => {
const slugger = new marked.Slugger();
const slugger = new Slugger();
expect(slugger.slug('<h1>This Section</h1>')).toBe('this-section');
expect(slugger.slug('<h1>This Section</h1>', { dryrun: true })).toBe('this-section-1');
});
it('should be repeatable in a sequence', () => {
const slugger = new marked.Slugger();
const slugger = new Slugger();
expect(slugger.slug('foo')).toBe('foo');
expect(slugger.slug('foo')).toBe('foo-1');
expect(slugger.slug('foo')).toBe('foo-2');
@ -92,7 +92,7 @@ describe('Test paragraph token type', () => {
it('should use the "paragraph" type on top level', () => {
const md = 'A Paragraph.\n\n> A blockquote\n\n- list item\n';
const tokens = marked.lexer(md);
const tokens = lexer(md);
expect(tokens[0].type).toBe('paragraph');
expect(tokens[2].tokens[0].type).toBe('paragraph');
@ -101,17 +101,17 @@ describe('Test paragraph token type', () => {
});
describe('changeDefaults', () => {
it('should change global defaults', () => {
const { defaults, changeDefaults } = require('../../src/defaults');
it('should change global defaults', async() => {
const { defaults, changeDefaults } = await import('../../src/defaults.js');
expect(defaults.test).toBeUndefined();
changeDefaults({ test: true });
expect(require('../../src/defaults').defaults.test).toBe(true);
expect((await import('../../src/defaults.js')).defaults.test).toBe(true);
});
});
describe('inlineLexer', () => {
it('should send html to renderer.html', () => {
const renderer = new marked.Renderer();
const renderer = new Renderer();
spyOn(renderer, 'html').and.callThrough();
const md = 'HTML Image: <img alt="MY IMAGE" src="example.png" />';
marked(md, { renderer });
@ -123,14 +123,14 @@ describe('inlineLexer', () => {
describe('parseInline', () => {
it('should parse inline tokens', () => {
const md = '**strong** _em_';
const html = marked.parseInline(md);
const html = parseInline(md);
expect(html).toBe('<strong>strong</strong> <em>em</em>');
});
it('should not parse block tokens', () => {
const md = '# header\n\n_em_';
const html = marked.parseInline(md);
const html = parseInline(md);
expect(html).toBe('# header\n\n<em>em</em>');
});
@ -156,7 +156,7 @@ describe('use extension', () => {
return `<u>${token.text}</u>\n`;
}
};
marked.use({ extensions: [underline] });
use({ extensions: [underline] });
let html = marked('Not Underlined\n:Underlined\nNot Underlined');
expect(html).toBe('<p>Not Underlined\n:Underlined\nNot Underlined</p>\n');
@ -186,7 +186,7 @@ describe('use extension', () => {
}
}]
};
marked.use(underline);
use(underline);
const html = marked('Not Underlined A\n:Underlined B:\nNot Underlined C\n:Not Underlined D');
expect(html).toBe('<p>Not Underlined A</p>\n<u>Underlined B</u>\n<p>Not Underlined C\n:Not Underlined D</p>\n');
});
@ -211,7 +211,7 @@ describe('use extension', () => {
return `<u>${token.text}</u>`;
}
};
marked.use({ extensions: [underline] });
use({ extensions: [underline] });
const html = marked('Not Underlined =Underlined= Not Underlined');
expect(html).toBe('<p>Not Underlined <u>Underlined</u> Not Underlined</p>\n');
});
@ -268,7 +268,7 @@ describe('use extension', () => {
return `\n<dt>${this.parser.parseInline(token.dt)}</dt><dd>${this.parser.parseInline(token.dd)}</dd>`;
}
};
marked.use({ extensions: [descriptionlist, description] });
use({ extensions: [descriptionlist, description] });
const html = marked('A Description List with One Description:\n'
+ ': Topic 1 : Description 1\n'
+ ': **Topic 2** : *Description 2*');
@ -299,7 +299,7 @@ describe('use extension', () => {
return `<u>${token.text}</u>\n`;
}
};
marked.use({ sanitize: true, silent: true, extensions: [extension] });
use({ sanitize: true, silent: true, extensions: [extension] });
const html = marked(':test:\ntest\n<div></div>');
expect(html).toBe('<u>test</u>\n<p>test</p>\n<p>&lt;div&gt;&lt;/div&gt;</p>\n');
});
@ -336,7 +336,7 @@ describe('use extension', () => {
return false;
}
};
marked.use({ extensions: [fallbackRenderer, extension] });
use({ extensions: [fallbackRenderer, extension] });
const html = marked(':Test:\n\n:test:\n\n:none:');
expect(html).toBe('fallbacktest');
});
@ -379,7 +379,7 @@ describe('use extension', () => {
return false;
}
};
marked.use({ extensions: [extension, extension2] });
use({ extensions: [extension, extension2] });
const html = marked(':Test:\n\n:test:');
expect(html).toBe('TESTtest');
});
@ -415,7 +415,7 @@ describe('use extension', () => {
}
}]
};
marked.use(extension);
use(extension);
const html = marked('# extension1\n:extension2:');
expect(html).toBe('<h1>extension1 RENDERER EXTENSION</h1>\n<pre><code>extension2 TOKENIZER EXTENSION\n</code></pre>\n');
});
@ -454,7 +454,7 @@ describe('use extension', () => {
}
}
};
marked.use(walkableDescription);
use(walkableDescription);
const html = marked(': Topic 1 : Description 1\n'
+ ': **Topic 2** : *Description 2*');
expect(html).toBe('<p>\n<dt>Topic 1 walked - unwalked</dt><dd>Description 1 walked</dd>'
@ -588,14 +588,14 @@ used extension2 walked</p>
}
it('should merge extensions when calling marked.use multiple times', () => {
marked.use(createExtension('extension1'));
marked.use(createExtension('extension2'));
use(createExtension('extension1'));
use(createExtension('extension2'));
runTest();
});
it('should merge extensions when calling marked.use with multiple extensions', () => {
marked.use(
use(
createExtension('extension1'),
createExtension('extension2')
);
@ -604,7 +604,7 @@ used extension2 walked</p>
});
it('should fall back to any extensions with the same name if the first returns false', () => {
marked.use(
use(
createExtension('extension1'),
createExtension('extension2'),
createFalseExtension('extension1'),
@ -663,7 +663,7 @@ used extension2 walked</p>
},
headerIds: false
};
marked.use(styleTags);
use(styleTags);
const html = marked('This is a *paragraph* with blue text. {blue}\n'
+ '# This is a *header* with red text {red}');
expect(html).toBe('<p style="color:blue;">This is a <em>paragraph</em> with blue text.</p>\n'
@ -679,7 +679,7 @@ used extension2 walked</p>
}
};
spyOn(extension.renderer, 'paragraph').and.callThrough();
marked.use(extension);
use(extension);
const html = marked('text');
expect(extension.renderer.paragraph).toHaveBeenCalledWith('text');
expect(html).toBe('extension');
@ -701,7 +701,7 @@ used extension2 walked</p>
}
};
spyOn(extension.tokenizer, 'paragraph').and.callThrough();
marked.use(extension);
use(extension);
const html = marked('text');
expect(extension.tokenizer.paragraph).toHaveBeenCalledWith('text');
expect(html).toBe('<p>extension</p>\n');
@ -714,7 +714,7 @@ used extension2 walked</p>
walked++;
}
};
marked.use(extension);
use(extension);
marked('text');
expect(walked).toBe(2);
});
@ -726,7 +726,7 @@ used extension2 walked</p>
walked++;
}
};
marked.use(extension);
use(extension);
marked('text', () => {
expect(walked).toBe(2);
done();
@ -737,7 +737,7 @@ used extension2 walked</p>
const extension = {
headerIds: false
};
marked.use(extension);
use(extension);
const html = marked('# heading');
expect(html).toBe('<h1>heading</h1>\n');
});
@ -758,8 +758,8 @@ used extension2 walked</p>
token.walkedOnce = true;
}
};
marked.use(extension1);
marked.use(extension2);
use(extension1);
use(extension2);
marked('text');
expect(walkedOnce).toBe(2);
expect(walkedTwice).toBe(2);
@ -783,8 +783,8 @@ used extension2 walked</p>
}
}
};
marked.use(extension1);
marked.use(extension2);
use(extension1);
use(extension2);
const html = marked(`
paragraph
@ -816,8 +816,8 @@ paragraph
}
}
};
marked.use(extension1);
marked.use(extension2);
use(extension1);
use(extension2);
const html = marked(`
paragraph
@ -832,7 +832,7 @@ original
const extension = {
renderer: {
heading: () => {
return this.options ? 'arrow options\n' : 'arrow no options\n';
return this && this.options ? 'arrow options\n' : 'arrow no options\n';
},
html: function() {
return this.options ? 'function options\n' : 'function no options\n';
@ -842,7 +842,7 @@ original
}
}
};
marked.use(extension);
use(extension);
const html = marked(`
# heading
@ -987,9 +987,9 @@ code
br
br
`;
const tokens = marked.lexer(markdown, { ...marked.getDefaults(), breaks: true });
const tokens = lexer(markdown, { ...getDefaults(), breaks: true });
const tokensSeen = [];
marked.walkTokens(tokens, (token) => {
_walkTokens(tokens, (token) => {
tokensSeen.push([token.type, (token.raw || '').replace(/\n/g, '')]);
});

30
test/update-specs.js vendored
View File

@ -1,13 +1,13 @@
const fetch = require('node-fetch');
const cheerio = require('cheerio');
const marked = require('../');
const htmlDiffer = require('./helpers/html-differ.js');
const fs = require('fs');
const path = require('path');
import fetch from 'node-fetch';
import { load } from 'cheerio';
import marked from '../';
import { isEqual } from './helpers/html-differ.js';
import { readdirSync, unlinkSync, writeFileSync } from 'fs';
import { join, resolve } from 'path';
function removeFiles(dir) {
fs.readdirSync(dir).forEach(file => {
fs.unlinkSync(path.join(dir, file));
readdirSync(dir).forEach(file => {
unlinkSync(join(dir, file));
});
}
@ -20,11 +20,11 @@ async function updateCommonmark(dir, options) {
const specs = await res2.json();
specs.forEach(spec => {
const html = marked(spec.markdown, options);
if (!htmlDiffer.isEqual(html, spec.html)) {
if (!isEqual(html, spec.html)) {
spec.shouldFail = true;
}
});
fs.writeFileSync(path.resolve(dir, `./commonmark.${version}.json`), JSON.stringify(specs, null, 2) + '\n');
writeFileSync(resolve(dir, `./commonmark.${version}.json`), JSON.stringify(specs, null, 2) + '\n');
console.log(`Saved CommonMark v${version} specs`);
} catch (ex) {
console.log(ex);
@ -35,7 +35,7 @@ async function updateGfm(dir) {
try {
const res = await fetch('https://github.github.com/gfm/');
const html = await res.text();
const $ = cheerio.load(html);
const $ = load(html);
const version = $('.version').text().match(/\d+\.\d+/)[0];
if (!version) {
throw new Error('No version found');
@ -58,19 +58,19 @@ async function updateGfm(dir) {
specs.forEach(spec => {
const html = marked(spec.markdown, { gfm: true, pedantic: false });
if (!htmlDiffer.isEqual(html, spec.html)) {
if (!isEqual(html, spec.html)) {
spec.shouldFail = true;
}
});
fs.writeFileSync(path.resolve(dir, `./gfm.${version}.json`), JSON.stringify(specs, null, 2) + '\n');
writeFileSync(resolve(dir, `./gfm.${version}.json`), JSON.stringify(specs, null, 2) + '\n');
console.log(`Saved GFM v${version} specs.`);
} catch (ex) {
console.log(ex);
}
}
const commonmarkDir = path.resolve(__dirname, './specs/commonmark');
const gfmDir = path.resolve(__dirname, './specs/gfm');
const commonmarkDir = resolve(__dirname, './specs/commonmark');
const gfmDir = resolve(__dirname, './specs/gfm');
removeFiles(commonmarkDir);
removeFiles(gfmDir);
updateCommonmark(commonmarkDir, { gfm: false, pedantic: false, headerIds: false });

10
test/vuln-regex.js vendored
View File

@ -1,5 +1,5 @@
const regexp = require('../src/rules.js');
const vulnRegexDetector = require('vuln-regex-detector');
import regexp from '../src/rules.js';
import { test, responses } from 'vuln-regex-detector';
const promises = [];
function findRegexps(name, obj) {
@ -18,12 +18,12 @@ function findRegexps(name, obj) {
async function testRegexp(name, source) {
try {
const result = await vulnRegexDetector.test(source);
const result = await test(source);
if (result === vulnRegexDetector.responses.safe) {
if (result === responses.safe) {
console.log(`${name} is safe`);
return true;
} else if (result === vulnRegexDetector.responses.vulnerable) {
} else if (result === responses.vulnerable) {
console.error(`${name} is vulnerable`);
} else {
console.error(`${name} might be vulnerable: ` + result.toString());