
14 changed files with 250 additions and 214 deletions
@ -1,42 +0,0 @@ |
|||
// Class of link replacement rules
|
|||
//
|
|||
'use strict'; |
|||
|
|||
|
|||
var assign = require('./common/utils').assign; |
|||
var Ruler = require('./ruler'); |
|||
|
|||
|
|||
var _rules = [ |
|||
[ 'linkify', require('./rules_text/linkify') ] |
|||
]; |
|||
|
|||
|
|||
function Linkifier() { |
|||
this.options = {}; |
|||
|
|||
this.ruler = new Ruler(); |
|||
|
|||
for (var i = 0; i < _rules.length; i++) { |
|||
this.ruler.push(_rules[i][0], _rules[i][1]); |
|||
} |
|||
} |
|||
|
|||
|
|||
Linkifier.prototype.set = function (options) { |
|||
assign(this.options, options); |
|||
}; |
|||
|
|||
|
|||
Linkifier.prototype.process = function (state) { |
|||
var i, l, rules; |
|||
|
|||
rules = this.ruler.getRules(''); |
|||
|
|||
for (i = 0, l = rules.length; i < l; i++) { |
|||
rules[i](this, state); |
|||
} |
|||
}; |
|||
|
|||
|
|||
module.exports = Linkifier; |
@ -0,0 +1,6 @@ |
|||
'use strict'; |
|||
|
|||
module.exports = function block(state) { |
|||
var tokens = state.block.parse(state.src, state.options, state.env); |
|||
state.tokens = state.tokens.concat(tokens); |
|||
}; |
@ -0,0 +1,13 @@ |
|||
'use strict'; |
|||
|
|||
module.exports = function inline(state) { |
|||
var tokens = state.tokens, tok, i, l; |
|||
|
|||
// Parse inlines
|
|||
for (i = 0, l = tokens.length; i < l; i++) { |
|||
tok = tokens[i]; |
|||
if (tok.type === 'inline') { |
|||
tok.children = state.inline.parse(tok.content, state.options, state.env); |
|||
} |
|||
} |
|||
}; |
@ -0,0 +1,128 @@ |
|||
// Replace link-like texts with link nodes.
|
|||
//
|
|||
// Currently restricted to http/https/ftp
|
|||
//
|
|||
'use strict'; |
|||
|
|||
|
|||
var Autolinker = require('autolinker'); |
|||
|
|||
|
|||
var LINK_SCAN_RE = /www|\:\/\//; |
|||
|
|||
var links = []; |
|||
var autolinker = new Autolinker({ |
|||
stripPrefix: false, |
|||
replaceFn: function (autolinker, match) { |
|||
// Only collect matched strings but don't change anything.
|
|||
if (match.getType() === 'url') { |
|||
links.push({ text: match.matchedText, url: match.getUrl() }); |
|||
} |
|||
return false; |
|||
} |
|||
}); |
|||
|
|||
function isLinkOpen(str) { |
|||
return /^<a[>\s]/i.test(str); |
|||
} |
|||
function isLinkClose(str) { |
|||
return /^<\/a\s*>/i.test(str); |
|||
} |
|||
|
|||
|
|||
module.exports = function linkify(state) { |
|||
var i, j, l, tokens, token, text, nodes, ln, pos, level, htmlLinkLevel, |
|||
blockTokens = state.tokens; |
|||
|
|||
if (!state.options.linkify) { return; } |
|||
|
|||
for (j = 0, l = blockTokens.length; j < l; j++) { |
|||
if (blockTokens[j].type !== 'inline') { continue; } |
|||
tokens = blockTokens[j].children; |
|||
|
|||
htmlLinkLevel = 0; |
|||
|
|||
// We scan from the end, to keep position when new tags added.
|
|||
// Use reversed logic in links start/end match
|
|||
for (i = tokens.length - 1; i >= 0; i--) { |
|||
token = tokens[i]; |
|||
|
|||
// Skip content of markdown links
|
|||
if (token.type === 'link_close') { |
|||
i--; |
|||
while (tokens[i].level !== token.level && tokens[i].type !== 'link_open') { |
|||
i--; |
|||
} |
|||
continue; |
|||
} |
|||
|
|||
// Skip content of html tag links
|
|||
if (token.type === 'htmltag') { |
|||
if (isLinkOpen(token.content) && htmlLinkLevel > 0) { |
|||
htmlLinkLevel--; |
|||
} |
|||
if (isLinkClose(token.content)) { |
|||
htmlLinkLevel++; |
|||
} |
|||
} |
|||
if (htmlLinkLevel > 0) { continue; } |
|||
|
|||
if (token.type === 'text' && LINK_SCAN_RE.test(token.content)) { |
|||
|
|||
text = token.content; |
|||
links.length = 0; |
|||
autolinker.link(text); |
|||
|
|||
if (!links.length) { continue; } |
|||
|
|||
// Now split string to nodes
|
|||
nodes = []; |
|||
level = token.level; |
|||
|
|||
for (ln = 0; ln < links.length; ln++) { |
|||
|
|||
if (!state.inline.validateLink(links[ln].url)) { continue; } |
|||
|
|||
pos = text.indexOf(links[ln].text); |
|||
|
|||
if (pos === -1) { continue; } |
|||
|
|||
if (pos) { |
|||
level = level; |
|||
nodes.push({ |
|||
type: 'text', |
|||
content: text.slice(0, pos), |
|||
level: level |
|||
}); |
|||
} |
|||
nodes.push({ |
|||
type: 'link_open', |
|||
href: links[ln].url, |
|||
title: '', |
|||
level: level++ |
|||
}); |
|||
nodes.push({ |
|||
type: 'text', |
|||
content: links[ln].text, |
|||
level: level |
|||
}); |
|||
nodes.push({ |
|||
type: 'link_close', |
|||
level: --level |
|||
}); |
|||
text = text.slice(pos + links[ln].text.length); |
|||
} |
|||
if (text.length) { |
|||
nodes.push({ |
|||
type: 'text', |
|||
content: text, |
|||
level: level |
|||
}); |
|||
} |
|||
|
|||
// replace current node
|
|||
blockTokens[j].children = tokens = [].concat(tokens.slice(0, i), nodes, tokens.slice(i + 1)); |
|||
} |
|||
} |
|||
} |
|||
}; |
@ -0,0 +1,28 @@ |
|||
'use strict'; |
|||
|
|||
var parseRef = require('../parser_ref'); |
|||
|
|||
module.exports = function references(state) { |
|||
var tokens = state.tokens, i, l, content, pos; |
|||
|
|||
// Parse inlines
|
|||
for (i = 1, l = tokens.length - 1; i < l; i++) { |
|||
if (tokens[i - 1].type === 'paragraph_open' && |
|||
tokens[i].type === 'inline' && |
|||
tokens[i + 1].type === 'paragraph_close') { |
|||
|
|||
content = tokens[i].content; |
|||
while (content.length) { |
|||
pos = parseRef(content, state.inline, state.options, state.env); |
|||
if (pos < 0) { break; } |
|||
content = content.slice(pos).trim(); |
|||
} |
|||
|
|||
tokens[i].content = content; |
|||
if (!content.length) { |
|||
tokens[i - 1].tight = true; |
|||
tokens[i + 1].tight = true; |
|||
} |
|||
} |
|||
} |
|||
}; |
@ -0,0 +1,14 @@ |
|||
'use strict'; |
|||
|
|||
module.exports = function typographer(state) { |
|||
if (!state.options.typographer) { return; } |
|||
var tokens = state.tokens, tok, i, l; |
|||
|
|||
// Parse inlines
|
|||
for (i = 0, l = tokens.length; i < l; i++) { |
|||
tok = tokens[i]; |
|||
if (tok.type === 'inline') { |
|||
state.typographer.process(tok, state); |
|||
} |
|||
} |
|||
}; |
@ -1,120 +0,0 @@ |
|||
// Replace link-like texts with link nodes.
|
|||
//
|
|||
// Currently restricted to http/https/ftp
|
|||
//
|
|||
'use strict'; |
|||
|
|||
|
|||
var Autolinker = require('autolinker'); |
|||
|
|||
|
|||
var LINK_SCAN_RE = /www|\:\/\//; |
|||
|
|||
var links = []; |
|||
var autolinker = new Autolinker({ |
|||
stripPrefix: false, |
|||
replaceFn: function (autolinker, match) { |
|||
// Only collect matched strings but don't change anything.
|
|||
if (match.getType() === 'url') { |
|||
links.push({ text: match.matchedText, url: match.getUrl() }); |
|||
} |
|||
return false; |
|||
} |
|||
}); |
|||
|
|||
function isLinkOpen(str) { |
|||
return /^<a[>\s]/i.test(str); |
|||
} |
|||
function isLinkClose(str) { |
|||
return /^<\/a\s*>/i.test(str); |
|||
} |
|||
|
|||
|
|||
module.exports = function linkify(t, state) { |
|||
var i, token, text, nodes, ln, pos, level, |
|||
htmlLinkLevel = 0, |
|||
tokens = state.tokens; |
|||
|
|||
// We scan from the end, to keep position when new tags added.
|
|||
// Use reversed logic in links start/end match
|
|||
for (i = tokens.length - 1; i >= 0; i--) { |
|||
token = tokens[i]; |
|||
|
|||
// Skip content of markdown links
|
|||
if (token.type === 'link_close') { |
|||
i--; |
|||
while (tokens[i].level !== token.level && tokens[i].type !== 'link_open') { |
|||
i--; |
|||
} |
|||
continue; |
|||
} |
|||
|
|||
// Skip content of html tag links
|
|||
if (token.type === 'htmltag') { |
|||
if (isLinkOpen(token.content) && htmlLinkLevel > 0) { |
|||
htmlLinkLevel--; |
|||
} |
|||
if (isLinkClose(token.content)) { |
|||
htmlLinkLevel++; |
|||
} |
|||
} |
|||
if (htmlLinkLevel > 0) { continue; } |
|||
|
|||
if (token.type === 'text' && LINK_SCAN_RE.test(token.content)) { |
|||
|
|||
text = token.content; |
|||
links.length = 0; |
|||
autolinker.link(text); |
|||
|
|||
if (!links.length) { continue; } |
|||
|
|||
// Now split string to nodes
|
|||
nodes = []; |
|||
level = token.level; |
|||
|
|||
for (ln = 0; ln < links.length; ln++) { |
|||
|
|||
if (!state.parser.validateLink(links[ln].url)) { continue; } |
|||
|
|||
pos = text.indexOf(links[ln].text); |
|||
|
|||
if (pos === -1) { continue; } |
|||
|
|||
if (pos) { |
|||
level = level; |
|||
nodes.push({ |
|||
type: 'text', |
|||
content: text.slice(0, pos), |
|||
level: level |
|||
}); |
|||
} |
|||
nodes.push({ |
|||
type: 'link_open', |
|||
href: links[ln].url, |
|||
title: '', |
|||
level: level++ |
|||
}); |
|||
nodes.push({ |
|||
type: 'text', |
|||
content: links[ln].text, |
|||
level: level |
|||
}); |
|||
nodes.push({ |
|||
type: 'link_close', |
|||
level: --level |
|||
}); |
|||
text = text.slice(pos + links[ln].text.length); |
|||
} |
|||
if (text.length) { |
|||
nodes.push({ |
|||
type: 'text', |
|||
content: text, |
|||
level: level |
|||
}); |
|||
} |
|||
|
|||
// replace cuttent node
|
|||
state.tokens = tokens = [].concat(tokens.slice(0, i), nodes, tokens.slice(i + 1)); |
|||
} |
|||
} |
|||
}; |
Loading…
Reference in new issue