doc: remove old vendorized marked module and replace with 0.8.2
This change is needed because in 1.8.3 we are going to introduce Markdown tables in the documentation (#3873 and #3921), and the old marked version did not support generating them. Instead of committing the marked source code here, we live install from npm if needed via the Makefile. n.b.: at the time of this change, marked latest version is 1.0.0, released a few days ago. I am updating to the version immediately before that (0.8.2), because in 1.0.0 the hyperlinks in the Table of Contents do not work (probably a bug in that version).pull/3933/head
parent
13dddde1d9
commit
db77302883
9
Makefile
9
Makefile
|
@ -6,7 +6,14 @@ docassets = $(addprefix out/,$(wildcard doc/assets/*))
|
|||
VERSION = $(shell node -e "console.log( require('./src/package.json').version )")
|
||||
UNAME := $(shell uname -s)
|
||||
|
||||
docs: $(outdoc_files) $(docassets)
|
||||
ensure_marked_is_installed:
|
||||
set -eu; \
|
||||
hash npm; \
|
||||
if [ $(shell npm list --prefix bin/doc >/dev/null 2>/dev/null; echo $$?) -ne "0" ]; then \
|
||||
npm ci --prefix=bin/doc; \
|
||||
fi
|
||||
|
||||
docs: ensure_marked_is_installed $(outdoc_files) $(docassets)
|
||||
|
||||
out/doc/assets/%: doc/assets/%
|
||||
mkdir -p $(@D)
|
||||
|
|
|
@ -1 +0,0 @@
|
|||
../marked/bin/marked
|
|
@ -1,2 +0,0 @@
|
|||
.git*
|
||||
test/
|
|
@ -1,19 +0,0 @@
|
|||
Copyright (c) 2011-2012, Christopher Jeffrey (https://github.com/chjj/)
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in
|
||||
all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||
THE SOFTWARE.
|
|
@ -1,9 +0,0 @@
|
|||
all:
|
||||
@cp lib/marked.js marked.js
|
||||
@uglifyjs -o marked.min.js marked.js
|
||||
|
||||
clean:
|
||||
@rm marked.js
|
||||
@rm marked.min.js
|
||||
|
||||
.PHONY: clean all
|
|
@ -1,135 +0,0 @@
|
|||
# marked
|
||||
|
||||
A full-featured markdown parser and compiler.
|
||||
Built for speed.
|
||||
|
||||
## Benchmarks
|
||||
|
||||
node v0.4.x
|
||||
|
||||
``` bash
|
||||
$ node test --bench
|
||||
marked completed in 12071ms.
|
||||
showdown (reuse converter) completed in 27387ms.
|
||||
showdown (new converter) completed in 75617ms.
|
||||
markdown-js completed in 70069ms.
|
||||
```
|
||||
|
||||
node v0.6.x
|
||||
|
||||
``` bash
|
||||
$ node test --bench
|
||||
marked completed in 6485ms.
|
||||
marked (with gfm) completed in 7466ms.
|
||||
discount completed in 7169ms.
|
||||
showdown (reuse converter) completed in 15937ms.
|
||||
showdown (new converter) completed in 18279ms.
|
||||
markdown-js completed in 23572ms.
|
||||
```
|
||||
|
||||
__Marked is now faster than Discount, which is written in C.__
|
||||
|
||||
For those feeling skeptical: These benchmarks run the entire markdown test suite
|
||||
1000 times. The test suite tests every feature. It doesn't cater to specific
|
||||
aspects.
|
||||
|
||||
Benchmarks for other engines to come (?).
|
||||
|
||||
## Install
|
||||
|
||||
``` bash
|
||||
$ npm install marked
|
||||
```
|
||||
|
||||
## Another javascript markdown parser
|
||||
|
||||
The point of marked was to create a markdown compiler where it was possible to
|
||||
frequently parse huge chunks of markdown without having to worry about
|
||||
caching the compiled output somehow...or blocking for an unnecesarily long time.
|
||||
|
||||
marked is very concise and still implements all markdown features. It is also
|
||||
now fully compatible with the client-side.
|
||||
|
||||
marked more or less passes the official markdown test suite in its
|
||||
entirety. This is important because a surprising number of markdown compilers
|
||||
cannot pass more than a few tests. It was very difficult to get marked as
|
||||
compliant as it is. It could have cut corners in several areas for the sake
|
||||
of performance, but did not in order to be exactly what you expect in terms
|
||||
of a markdown rendering. In fact, this is why marked could be considered at a
|
||||
disadvantage in the benchmarks above.
|
||||
|
||||
Along with implementing every markdown feature, marked also implements
|
||||
[GFM features](http://github.github.com/github-flavored-markdown/).
|
||||
|
||||
## Usage
|
||||
|
||||
``` js
|
||||
var marked = require('marked');
|
||||
console.log(marked('i am using __markdown__.'));
|
||||
```
|
||||
|
||||
You also have direct access to the lexer and parser if you so desire.
|
||||
|
||||
``` js
|
||||
var tokens = marked.lexer(str);
|
||||
console.log(marked.parser(tokens));
|
||||
```
|
||||
|
||||
``` bash
|
||||
$ node
|
||||
> require('marked').lexer('> i am using marked.')
|
||||
[ { type: 'blockquote_start' },
|
||||
{ type: 'text', text: ' i am using marked.' },
|
||||
{ type: 'blockquote_end' },
|
||||
links: {} ]
|
||||
```
|
||||
|
||||
## CLI
|
||||
|
||||
``` bash
|
||||
$ marked -o hello.html
|
||||
hello world
|
||||
^D
|
||||
$ cat hello.html
|
||||
<p>hello world</p>
|
||||
```
|
||||
|
||||
## Syntax Highlighting
|
||||
|
||||
Marked has an interface that allows for a syntax highlighter to highlight code
|
||||
blocks before they're output.
|
||||
|
||||
Example implementation:
|
||||
|
||||
``` js
|
||||
var highlight = require('my-syntax-highlighter')
|
||||
, marked_ = require('marked');
|
||||
|
||||
var marked = function(text) {
|
||||
var tokens = marked_.lexer(text)
|
||||
, l = tokens.length
|
||||
, i = 0
|
||||
, token;
|
||||
|
||||
for (; i < l; i++) {
|
||||
token = tokens[i];
|
||||
if (token.type === 'code') {
|
||||
token.text = highlight(token.text, token.lang);
|
||||
// marked should not escape this
|
||||
token.escaped = true;
|
||||
}
|
||||
}
|
||||
|
||||
text = marked_.parser(tokens);
|
||||
|
||||
return text;
|
||||
};
|
||||
|
||||
module.exports = marked;
|
||||
```
|
||||
|
||||
## License
|
||||
|
||||
Copyright (c) 2011-2012, Christopher Jeffrey. (MIT License)
|
||||
|
||||
See LICENSE for more info.
|
|
@ -1,115 +0,0 @@
|
|||
#!/usr/bin/env node
|
||||
|
||||
/**
|
||||
* Marked CLI
|
||||
* Copyright (c) 2011-2012, Christopher Jeffrey (MIT License)
|
||||
*/
|
||||
|
||||
var fs = require('fs')
|
||||
, util = require('util')
|
||||
, marked = require('../');
|
||||
|
||||
/**
|
||||
* Man Page
|
||||
*/
|
||||
|
||||
var help = function() {
|
||||
var spawn = require('child_process').spawn;
|
||||
|
||||
var options = {
|
||||
cwd: process.cwd(),
|
||||
env: process.env,
|
||||
setsid: false,
|
||||
customFds: [0, 1, 2]
|
||||
};
|
||||
|
||||
spawn('man',
|
||||
[__dirname + '/../man/marked.1'],
|
||||
options);
|
||||
};
|
||||
|
||||
/**
|
||||
* Main
|
||||
*/
|
||||
|
||||
var main = function(argv) {
|
||||
var files = []
|
||||
, data = ''
|
||||
, input
|
||||
, output
|
||||
, arg
|
||||
, tokens;
|
||||
|
||||
var getarg = function() {
|
||||
var arg = argv.shift();
|
||||
arg = arg.split('=');
|
||||
if (arg.length > 1) {
|
||||
argv.unshift(arg.slice(1).join('='));
|
||||
}
|
||||
return arg[0];
|
||||
};
|
||||
|
||||
while (argv.length) {
|
||||
arg = getarg();
|
||||
switch (arg) {
|
||||
case '-o':
|
||||
case '--output':
|
||||
output = argv.shift();
|
||||
break;
|
||||
case '-i':
|
||||
case '--input':
|
||||
input = argv.shift();
|
||||
break;
|
||||
case '-t':
|
||||
case '--tokens':
|
||||
tokens = true;
|
||||
break;
|
||||
case '-h':
|
||||
case '--help':
|
||||
return help();
|
||||
default:
|
||||
files.push(arg);
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
if (!input) {
|
||||
if (files.length <= 2) {
|
||||
var stdin = process.stdin;
|
||||
|
||||
stdin.setEncoding('utf8');
|
||||
stdin.resume();
|
||||
|
||||
stdin.on('data', function(text) {
|
||||
data += text;
|
||||
});
|
||||
|
||||
stdin.on('end', write);
|
||||
|
||||
return;
|
||||
}
|
||||
input = files.pop();
|
||||
}
|
||||
|
||||
data = fs.readFileSync(input, 'utf8');
|
||||
write();
|
||||
|
||||
function write() {
|
||||
data = tokens
|
||||
? JSON.stringify(marked.lexer(data), null, 2)
|
||||
: marked(data);
|
||||
|
||||
if (!output) {
|
||||
process.stdout.write(data + '\n');
|
||||
} else {
|
||||
fs.writeFileSync(output, data);
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
if (!module.parent) {
|
||||
process.title = 'marked';
|
||||
main(process.argv.slice());
|
||||
} else {
|
||||
module.exports = main;
|
||||
}
|
|
@ -1 +0,0 @@
|
|||
module.exports = require('./lib/marked');
|
|
@ -1,662 +0,0 @@
|
|||
/**
|
||||
* marked - A markdown parser (https://github.com/chjj/marked)
|
||||
* Copyright (c) 2011-2012, Christopher Jeffrey. (MIT Licensed)
|
||||
*/
|
||||
|
||||
;(function() {
|
||||
|
||||
/**
|
||||
* Block-Level Grammar
|
||||
*/
|
||||
|
||||
var block = {
|
||||
newline: /^\n+/,
|
||||
code: /^ {4,}[^\n]*(?:\n {4,}[^\n]*|\n)*(?:\n+|$)/,
|
||||
gfm_code: /^ *``` *(\w+)? *\n([^\0]+?)\s*``` *(?:\n+|$)/,
|
||||
hr: /^( *[\-*_]){3,} *(?:\n+|$)/,
|
||||
heading: /^ *(#{1,6}) *([^\n]+?) *#* *(?:\n+|$)/,
|
||||
lheading: /^([^\n]+)\n *(=|-){3,} *\n*/,
|
||||
blockquote: /^( *>[^\n]+(\n[^\n]+)*\n*)+/,
|
||||
list: /^( *)([*+-]|\d+\.) [^\0]+?(?:\n{2,}(?! )|\s*$)(?!\1bullet)\n*/,
|
||||
html: /^ *(?:comment|closed|closing) *(?:\n{2,}|\s*$)/,
|
||||
def: /^ *\[([^\]]+)\]: *([^\s]+)(?: +["(]([^\n]+)[")])? *(?:\n+|$)/,
|
||||
paragraph: /^([^\n]+\n?(?!body))+\n*/,
|
||||
text: /^[^\n]+/
|
||||
};
|
||||
|
||||
block.list = (function() {
|
||||
var list = block.list.source;
|
||||
|
||||
list = list
|
||||
.replace('bullet', /(?:[*+-](?!(?: *[-*]){2,})|\d+\.)/.source);
|
||||
|
||||
return new RegExp(list);
|
||||
})();
|
||||
|
||||
block.html = (function() {
|
||||
var html = block.html.source;
|
||||
|
||||
html = html
|
||||
.replace('comment', /<!--[^\0]*?-->/.source)
|
||||
.replace('closed', /<(tag)[^\0]+?<\/\1>/.source)
|
||||
.replace('closing', /<tag(?!:\/|@)\b(?:"[^"]*"|'[^']*'|[^'">])*?>/.source)
|
||||
.replace(/tag/g, tag());
|
||||
|
||||
return new RegExp(html);
|
||||
})();
|
||||
|
||||
block.paragraph = (function() {
|
||||
var paragraph = block.paragraph.source
|
||||
, body = [];
|
||||
|
||||
(function push(rule) {
|
||||
rule = block[rule] ? block[rule].source : rule;
|
||||
body.push(rule.replace(/(^|[^\[])\^/g, '$1'));
|
||||
return push;
|
||||
})
|
||||
('gfm_code')
|
||||
('hr')
|
||||
('heading')
|
||||
('lheading')
|
||||
('blockquote')
|
||||
('<' + tag())
|
||||
('def');
|
||||
|
||||
return new
|
||||
RegExp(paragraph.replace('body', body.join('|')));
|
||||
})();
|
||||
|
||||
/**
|
||||
* Block Lexer
|
||||
*/
|
||||
|
||||
block.lexer = function(src) {
|
||||
var tokens = [];
|
||||
|
||||
tokens.links = {};
|
||||
|
||||
src = src
|
||||
.replace(/\r\n|\r/g, '\n')
|
||||
.replace(/\t/g, ' ');
|
||||
|
||||
return block.token(src, tokens, true);
|
||||
};
|
||||
|
||||
block.token = function(src, tokens, top) {
|
||||
var src = src.replace(/^ +$/gm, '')
|
||||
, next
|
||||
, loose
|
||||
, cap
|
||||
, item
|
||||
, space
|
||||
, i
|
||||
, l;
|
||||
|
||||
while (src) {
|
||||
// newline
|
||||
if (cap = block.newline.exec(src)) {
|
||||
src = src.substring(cap[0].length);
|
||||
if (cap[0].length > 1) {
|
||||
tokens.push({
|
||||
type: 'space'
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
// code
|
||||
if (cap = block.code.exec(src)) {
|
||||
src = src.substring(cap[0].length);
|
||||
cap = cap[0].replace(/^ {4}/gm, '');
|
||||
tokens.push({
|
||||
type: 'code',
|
||||
text: cap.replace(/\n+$/, '')
|
||||
});
|
||||
continue;
|
||||
}
|
||||
|
||||
// gfm_code
|
||||
if (cap = block.gfm_code.exec(src)) {
|
||||
src = src.substring(cap[0].length);
|
||||
tokens.push({
|
||||
type: 'code',
|
||||
lang: cap[1],
|
||||
text: cap[2]
|
||||
});
|
||||
continue;
|
||||
}
|
||||
|
||||
// heading
|
||||
if (cap = block.heading.exec(src)) {
|
||||
src = src.substring(cap[0].length);
|
||||
tokens.push({
|
||||
type: 'heading',
|
||||
depth: cap[1].length,
|
||||
text: cap[2]
|
||||
});
|
||||
continue;
|
||||
}
|
||||
|
||||
// lheading
|
||||
if (cap = block.lheading.exec(src)) {
|
||||
src = src.substring(cap[0].length);
|
||||
tokens.push({
|
||||
type: 'heading',
|
||||
depth: cap[2] === '=' ? 1 : 2,
|
||||
text: cap[1]
|
||||
});
|
||||
continue;
|
||||
}
|
||||
|
||||
// hr
|
||||
if (cap = block.hr.exec(src)) {
|
||||
src = src.substring(cap[0].length);
|
||||
tokens.push({
|
||||
type: 'hr'
|
||||
});
|
||||
continue;
|
||||
}
|
||||
|
||||
// blockquote
|
||||
if (cap = block.blockquote.exec(src)) {
|
||||
src = src.substring(cap[0].length);
|
||||
tokens.push({
|
||||
type: 'blockquote_start'
|
||||
});
|
||||
|
||||
cap = cap[0].replace(/^ *> ?/gm, '');
|
||||
|
||||
// Pass `top` to keep the current
|
||||
// "toplevel" state. This is exactly
|
||||
// how markdown.pl works.
|
||||
block.token(cap, tokens, top);
|
||||
|
||||
tokens.push({
|
||||
type: 'blockquote_end'
|
||||
});
|
||||
continue;
|
||||
}
|
||||
|
||||
// list
|
||||
if (cap = block.list.exec(src)) {
|
||||
src = src.substring(cap[0].length);
|
||||
|
||||
tokens.push({
|
||||
type: 'list_start',
|
||||
ordered: isFinite(cap[2])
|
||||
});
|
||||
|
||||
// Get each top-level item.
|
||||
cap = cap[0].match(
|
||||
/^( *)([*+-]|\d+\.)[^\n]*(?:\n(?!\1(?:[*+-]|\d+\.))[^\n]*)*/gm
|
||||
);
|
||||
|
||||
next = false;
|
||||
l = cap.length;
|
||||
i = 0;
|
||||
|
||||
for (; i < l; i++) {
|
||||
item = cap[i];
|
||||
|
||||
// Remove the list item's bullet
|
||||
// so it is seen as the next token.
|
||||
space = item.length;
|
||||
item = item.replace(/^ *([*+-]|\d+\.) */, '');
|
||||
|
||||
// Outdent whatever the
|
||||
// list item contains. Hacky.
|
||||
if (~item.indexOf('\n ')) {
|
||||
space -= item.length;
|
||||
item = item.replace(new RegExp('^ {1,' + space + '}', 'gm'), '');
|
||||
}
|
||||
|
||||
// Determine whether item is loose or not.
|
||||
// Use: /(^|\n)(?! )[^\n]+\n\n(?!\s*$)/
|
||||
// for discount behavior.
|
||||
loose = next || /\n\n(?!\s*$)/.test(item);
|
||||
if (i !== l - 1) {
|
||||
next = item[item.length-1] === '\n';
|
||||
if (!loose) loose = next;
|
||||
}
|
||||
|
||||
tokens.push({
|
||||
type: loose
|
||||
? 'loose_item_start'
|
||||
: 'list_item_start'
|
||||
});
|
||||
|
||||
// Recurse.
|
||||
block.token(item, tokens);
|
||||
|
||||
tokens.push({
|
||||
type: 'list_item_end'
|
||||
});
|
||||
}
|
||||
|
||||
tokens.push({
|
||||
type: 'list_end'
|
||||
});
|
||||
|
||||
continue;
|
||||
}
|
||||
|
||||
// html
|
||||
if (cap = block.html.exec(src)) {
|
||||
src = src.substring(cap[0].length);
|
||||
tokens.push({
|
||||
type: 'html',
|
||||
text: cap[0]
|
||||
});
|
||||
continue;
|
||||
}
|
||||
|
||||
// def
|
||||
if (top && (cap = block.def.exec(src))) {
|
||||
src = src.substring(cap[0].length);
|
||||
tokens.links[cap[1].toLowerCase()] = {
|
||||
href: cap[2],
|
||||
title: cap[3]
|
||||
};
|
||||
continue;
|
||||
}
|
||||
|
||||
// top-level paragraph
|
||||
if (top && (cap = block.paragraph.exec(src))) {
|
||||
src = src.substring(cap[0].length);
|
||||
tokens.push({
|
||||
type: 'paragraph',
|
||||
text: cap[0]
|
||||
});
|
||||
continue;
|
||||
}
|
||||
|
||||
// text
|
||||
if (cap = block.text.exec(src)) {
|
||||
// Top-level should never reach here.
|
||||
src = src.substring(cap[0].length);
|
||||
tokens.push({
|
||||
type: 'text',
|
||||
text: cap[0]
|
||||
});
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
||||
return tokens;
|
||||
};
|
||||
|
||||
/**
|
||||
* Inline Processing
|
||||
*/
|
||||
|
||||
var inline = {
|
||||
escape: /^\\([\\`*{}\[\]()#+\-.!_>])/,
|
||||
autolink: /^<([^ >]+(@|:\/)[^ >]+)>/,
|
||||
gfm_autolink: /^(\w+:\/\/[^\s]+[^.,:;"')\]\s])/,
|
||||
tag: /^<!--[^\0]*?-->|^<\/?\w+(?:"[^"]*"|'[^']*'|[^'">])*?>/,
|
||||
link: /^!?\[((?:\[[^\]]*\]|[^\[\]]|\[|\](?=[^[\]]*\]))*)\]\(([^\)]*)\)/,
|
||||
reflink: /^!?\[((?:\[[^\]]*\]|[^\[\]]|\[|\](?=[^[\]]*\]))*)\]\s*\[([^\]]*)\]/,
|
||||
nolink: /^!?\[((?:\[[^\]]*\]|[^\[\]])*)\]/,
|
||||
strong: /^__([^\0]+?)__(?!_)|^\*\*([^\0]+?)\*\*(?!\*)/,
|
||||
em: /^\b_([^\0]+?)_\b|^\*((?:\*\*|[^\0])+?)\*(?!\*)/,
|
||||
code: /^(`+)([^\0]*?[^`])\1(?!`)/,
|
||||
br: /^ {2,}\n(?!\s*$)/,
|
||||
text: /^[^\0]+?(?=[\\<!\[_*`]|\w+:\/\/| {2,}\n|$)/
|
||||
};
|
||||
|
||||
/**
|
||||
* Inline Lexer
|
||||
*/
|
||||
|
||||
inline.lexer = function(src) {
|
||||
var out = ''
|
||||
, links = tokens.links
|
||||
, link
|
||||
, text
|
||||
, href
|
||||
, cap;
|
||||
|
||||
while (src) {
|
||||
// escape
|
||||
if (cap = inline.escape.exec(src)) {
|
||||
src = src.substring(cap[0].length);
|
||||
out += cap[1];
|
||||
continue;
|
||||
}
|
||||
|
||||
// autolink
|
||||
if (cap = inline.autolink.exec(src)) {
|
||||
src = src.substring(cap[0].length);
|
||||
if (cap[2] === '@') {
|
||||
text = cap[1][6] === ':'
|
||||
? mangle(cap[1].substring(7))
|
||||
: mangle(cap[1]);
|
||||
href = mangle('mailto:') + text;
|
||||
} else {
|
||||
text = escape(cap[1]);
|
||||
href = text;
|
||||
}
|
||||
out += '<a href="'
|
||||
+ href
|
||||
+ '">'
|
||||
+ text
|
||||
+ '</a>';
|
||||
continue;
|
||||
}
|
||||
|
||||
// gfm_autolink
|
||||
if (cap = inline.gfm_autolink.exec(src)) {
|
||||
src = src.substring(cap[0].length);
|
||||
text = escape(cap[1]);
|
||||
href = text;
|
||||
out += '<a href="'
|
||||
+ href
|
||||
+ '">'
|
||||
+ text
|
||||
+ '</a>';
|
||||
continue;
|
||||
}
|
||||
|
||||
// tag
|
||||
if (cap = inline.tag.exec(src)) {
|
||||
src = src.substring(cap[0].length);
|
||||
out += cap[0];
|
||||
continue;
|
||||
}
|
||||
|
||||
// link
|
||||
if (cap = inline.link.exec(src)) {
|
||||
src = src.substring(cap[0].length);
|
||||
text = /^\s*<?([^\s]*?)>?(?:\s+"([^\n]+)")?\s*$/.exec(cap[2]);
|
||||
if (!text) {
|
||||
out += cap[0][0];
|
||||
src = cap[0].substring(1) + src;
|
||||
continue;
|
||||
}
|
||||
out += outputLink(cap, {
|
||||
href: text[1],
|
||||
title: text[2]
|
||||
});
|
||||
continue;
|
||||
}
|
||||
|
||||
// reflink, nolink
|
||||
if ((cap = inline.reflink.exec(src))
|
||||
|| (cap = inline.nolink.exec(src))) {
|
||||
src = src.substring(cap[0].length);
|
||||
link = (cap[2] || cap[1]).replace(/\s+/g, ' ');
|
||||
link = links[link.toLowerCase()];
|
||||
if (!link || !link.href) {
|
||||
out += cap[0][0];
|
||||
src = cap[0].substring(1) + src;
|
||||
continue;
|
||||
}
|
||||
out += outputLink(cap, link);
|
||||
continue;
|
||||
}
|
||||
|
||||
// strong
|
||||
if (cap = inline.strong.exec(src)) {
|
||||
src = src.substring(cap[0].length);
|
||||
out += '<strong>'
|
||||
+ inline.lexer(cap[2] || cap[1])
|
||||
+ '</strong>';
|
||||
continue;
|
||||
}
|
||||
|
||||
// em
|
||||
if (cap = inline.em.exec(src)) {
|
||||
src = src.substring(cap[0].length);
|
||||
out += '<em>'
|
||||
+ inline.lexer(cap[2] || cap[1])
|
||||
+ '</em>';
|
||||
continue;
|
||||
}
|
||||
|
||||
// code
|
||||
if (cap = inline.code.exec(src)) {
|
||||
src = src.substring(cap[0].length);
|
||||
out += '<code>'
|
||||
+ escape(cap[2], true)
|
||||
+ '</code>';
|
||||
continue;
|
||||
}
|
||||
|
||||
// br
|
||||
if (cap = inline.br.exec(src)) {
|
||||
src = src.substring(cap[0].length);
|
||||
out += '<br>';
|
||||
continue;
|
||||
}
|
||||
|
||||
// text
|
||||
if (cap = inline.text.exec(src)) {
|
||||
src = src.substring(cap[0].length);
|
||||
out += escape(cap[0]);
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
||||
return out;
|
||||
};
|
||||
|
||||
var outputLink = function(cap, link) {
|
||||
if (cap[0][0] !== '!') {
|
||||
return '<a href="'
|
||||
+ escape(link.href)
|
||||
+ '"'
|
||||
+ (link.title
|
||||
? ' title="'
|
||||
+ escape(link.title)
|
||||
+ '"'
|
||||
: '')
|
||||
+ '>'
|
||||
+ inline.lexer(cap[1])
|
||||
+ '</a>';
|
||||
} else {
|
||||
return '<img src="'
|
||||
+ escape(link.href)
|
||||
+ '" alt="'
|
||||
+ escape(cap[1])
|
||||
+ '"'
|
||||
+ (link.title
|
||||
? ' title="'
|
||||
+ escape(link.title)
|
||||
+ '"'
|
||||
: '')
|
||||
+ '>';
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Parsing
|
||||
*/
|
||||
|
||||
var tokens
|
||||
, token;
|
||||
|
||||
var next = function() {
|
||||
return token = tokens.pop();
|
||||
};
|
||||
|
||||
var tok = function() {
|
||||
switch (token.type) {
|
||||
case 'space': {
|
||||
return '';
|
||||
}
|
||||
case 'hr': {
|
||||
return '<hr>\n';
|
||||
}
|
||||
case 'heading': {
|
||||
return '<h'
|
||||
+ token.depth
|
||||
+ '>'
|
||||
+ inline.lexer(token.text)
|
||||
+ '</h'
|
||||
+ token.depth
|
||||
+ '>\n';
|
||||
}
|
||||
case 'code': {
|
||||
return '<pre><code'
|
||||
+ (token.lang
|
||||
? ' class="'
|
||||
+ token.lang
|
||||
+ '"'
|
||||
: '')
|
||||
+ '>'
|
||||
+ (token.escaped
|
||||
? token.text
|
||||
: escape(token.text, true))
|
||||
+ '</code></pre>\n';
|
||||
}
|
||||
case 'blockquote_start': {
|
||||
var body = '';
|
||||
|
||||
while (next().type !== 'blockquote_end') {
|
||||
body += tok();
|
||||
}
|
||||
|
||||
return '<blockquote>\n'
|
||||
+ body
|
||||
+ '</blockquote>\n';
|
||||
}
|
||||
case 'list_start': {
|
||||
var type = token.ordered ? 'ol' : 'ul'
|
||||
, body = '';
|
||||
|
||||
while (next().type !== 'list_end') {
|
||||
body += tok();
|
||||
}
|
||||
|
||||
return '<'
|
||||
+ type
|
||||
+ '>\n'
|
||||
+ body
|
||||
+ '</'
|
||||
+ type
|
||||
+ '>\n';
|
||||
}
|
||||
case 'list_item_start': {
|
||||
var body = '';
|
||||
|
||||
while (next().type !== 'list_item_end') {
|
||||
body += token.type === 'text'
|
||||
? parseText()
|
||||
: tok();
|
||||
}
|
||||
|
||||
return '<li>'
|
||||
+ body
|
||||
+ '</li>\n';
|
||||
}
|
||||
case 'loose_item_start': {
|
||||
var body = '';
|
||||
|
||||
while (next().type !== 'list_item_end') {
|
||||
body += tok();
|
||||
}
|
||||
|
||||
return '<li>'
|
||||
+ body
|
||||
+ '</li>\n';
|
||||
}
|
||||
case 'html': {
|
||||
return inline.lexer(token.text);
|
||||
}
|
||||
case 'paragraph': {
|
||||
return '<p>'
|
||||
+ inline.lexer(token.text)
|
||||
+ '</p>\n';
|
||||
}
|
||||
case 'text': {
|
||||
return '<p>'
|
||||
+ parseText()
|
||||
+ '</p>\n';
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
var parseText = function() {
|
||||
var body = token.text
|
||||
, top;
|
||||
|
||||
while ((top = tokens[tokens.length-1])
|
||||
&& top.type === 'text') {
|
||||
body += '\n' + next().text;
|
||||
}
|
||||
|
||||
return inline.lexer(body);
|
||||
};
|
||||
|
||||
var parse = function(src) {
|
||||
tokens = src.reverse();
|
||||
|
||||
var out = '';
|
||||
while (next()) {
|
||||
out += tok();
|
||||
}
|
||||
|
||||
tokens = null;
|
||||
token = null;
|
||||
|
||||
return out;
|
||||
};
|
||||
|
||||
/**
|
||||
* Helpers
|
||||
*/
|
||||
|
||||
var escape = function(html, encode) {
|
||||
return html
|
||||
.replace(!encode ? /&(?!#?\w+;)/g : /&/g, '&')
|
||||
.replace(/</g, '<')
|
||||
.replace(/>/g, '>')
|
||||
.replace(/"/g, '"')
|
||||
.replace(/'/g, ''');
|
||||
};
|
||||
|
||||
var mangle = function(text) {
|
||||
var out = ''
|
||||
, l = text.length
|
||||
, i = 0
|
||||
, ch;
|
||||
|
||||
for (; i < l; i++) {
|
||||
ch = text.charCodeAt(i);
|
||||
if (Math.random() > 0.5) {
|
||||
ch = 'x' + ch.toString(16);
|
||||
}
|
||||
out += '&#' + ch + ';';
|
||||
}
|
||||
|
||||
return out;
|
||||
};
|
||||
|
||||
function tag() {
|
||||
var tag = '(?!(?:'
|
||||
+ 'a|em|strong|small|s|cite|q|dfn|abbr|data|time|code'
|
||||
+ '|var|samp|kbd|sub|sup|i|b|u|mark|ruby|rt|rp|bdi|bdo'
|
||||
+ '|span|br|wbr|ins|del|img)\\b)\\w+';
|
||||
|
||||
return tag;
|
||||
}
|
||||
|
||||
/**
|
||||
* Expose
|
||||
*/
|
||||
|
||||
var marked = function(src) {
|
||||
return parse(block.lexer(src));
|
||||
};
|
||||
|
||||
marked.parser = parse;
|
||||
marked.lexer = block.lexer;
|
||||
|
||||
marked.parse = marked;
|
||||
|
||||
if (typeof module !== 'undefined') {
|
||||
module.exports = marked;
|
||||
} else {
|
||||
this.marked = marked;
|
||||
}
|
||||
|
||||
}).call(this);
|
|
@ -1,39 +0,0 @@
|
|||
.ds q \N'34'
|
||||
.TH marked 1
|
||||
.SH NAME
|
||||
marked \- a javascript markdown parser
|
||||
.SH SYNOPSIS
|
||||
.nf
|
||||
.B marked [\-o output] [\-i input] [\-th]
|
||||
.fi
|
||||
.SH DESCRIPTION
|
||||
.B marked
|
||||
is a full-featured javascript markdown parser, built for speed. It also includes
|
||||
multiple GFM features.
|
||||
.SH OPTIONS
|
||||
.TP
|
||||
.BI \-o,\ \-\-output\ [output]
|
||||
Specify file output. If none is specified, write to stdout.
|
||||
.TP
|
||||
.BI \-i,\ \-\-input\ [input]
|
||||
Specify file input, otherwise use last argument as input file. If no input file
|
||||
is specified, read from stdin.
|
||||
.TP
|
||||
.BI \-t,\ \-\-tokens
|
||||
Output a token stream instead of html.
|
||||
.TP
|
||||
.BI \-h,\ \-\-help
|
||||
Display help information.
|
||||
.SH EXAMPLES
|
||||
.TP
|
||||
cat in.md | marked > out.html
|
||||
.TP
|
||||
echo "hello *world*" | marked
|
||||
.TP
|
||||
marked -o out.html in.md
|
||||
.TP
|
||||
marked --output="hello world.html" -i in.md
|
||||
.SH BUGS
|
||||
Please report any bugs to https://github.com/chjj/marked.
|
||||
.SH LICENSE
|
||||
Copyright (c) 2011-2012, Christopher Jeffrey (MIT License)
|
|
@ -1,15 +0,0 @@
|
|||
{
|
||||
"name": "marked",
|
||||
"description": "A markdown parser built for speed",
|
||||
"author": "Christopher Jeffrey",
|
||||
"version": "0.1.9",
|
||||
"main": "./lib/marked.js",
|
||||
"bin": "./bin/marked",
|
||||
"man": "./man/marked.1",
|
||||
"preferGlobal": false,
|
||||
"repository": "git://github.com/chjj/marked.git",
|
||||
"homepage": "https://github.com/chjj/marked",
|
||||
"bugs": "http://github.com/chjj/marked/issues",
|
||||
"keywords": [ "markdown", "markup", "html" ],
|
||||
"tags": [ "markdown", "markup", "html" ]
|
||||
}
|
|
@ -0,0 +1,13 @@
|
|||
{
|
||||
"name": "node-doc-generator",
|
||||
"version": "0.0.0",
|
||||
"lockfileVersion": 1,
|
||||
"requires": true,
|
||||
"dependencies": {
|
||||
"marked": {
|
||||
"version": "0.8.2",
|
||||
"resolved": "https://registry.npmjs.org/marked/-/marked-0.8.2.tgz",
|
||||
"integrity": "sha512-EGwzEeCcLniFX51DhTpmTom+dSA/MG/OBUDjnWtHbEnjAH180VzUeAw+oE4+Zv+CoYBWyRlYOTR0N8SO9R1PVw=="
|
||||
}
|
||||
}
|
||||
}
|
|
@ -7,7 +7,7 @@
|
|||
"node": ">=0.6.10"
|
||||
},
|
||||
"dependencies": {
|
||||
"marked": ">=0.3.6"
|
||||
"marked": "0.8.2"
|
||||
},
|
||||
"devDependencies": {},
|
||||
"optionalDependencies": {},
|
||||
|
|
Loading…
Reference in New Issue