|
@ -71,7 +71,7 @@ var rules = []; |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
// code
|
|
|
// code
|
|
|
rules.push(function code(state, startLine, endLine) { |
|
|
rules.push(function code(state, startLine, endLine, silent) { |
|
|
var nextLine, last; |
|
|
var nextLine, last; |
|
|
|
|
|
|
|
|
if (getIndent(state, startLine, 4) === -1) { return false; } |
|
|
if (getIndent(state, startLine, 4) === -1) { return false; } |
|
@ -94,6 +94,8 @@ rules.push(function code(state, startLine, endLine) { |
|
|
break; |
|
|
break; |
|
|
} |
|
|
} |
|
|
|
|
|
|
|
|
|
|
|
if (silent) { return true; } |
|
|
|
|
|
|
|
|
state.tokens.push({ |
|
|
state.tokens.push({ |
|
|
type: 'code', |
|
|
type: 'code', |
|
|
startLine: startLine, |
|
|
startLine: startLine, |
|
@ -105,8 +107,79 @@ rules.push(function code(state, startLine, endLine) { |
|
|
}); |
|
|
}); |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
// heading
|
|
|
|
|
|
rules.push(function heading(state, startLine, endLine, silent) { |
|
|
|
|
|
var ch, level, |
|
|
|
|
|
pos = state.bMarks[startLine], |
|
|
|
|
|
max = state.eMarks[startLine]; |
|
|
|
|
|
|
|
|
|
|
|
ch = state.src.charCodeAt(pos); |
|
|
|
|
|
|
|
|
|
|
|
// skip leading spaces
|
|
|
|
|
|
while (isWhiteSpace(ch) && pos < max) { |
|
|
|
|
|
ch = state.src.charCodeAt(++pos); |
|
|
|
|
|
} |
|
|
|
|
|
|
|
|
|
|
|
if (ch !== 0x23/* # */ || pos >= max) { return false; } |
|
|
|
|
|
|
|
|
|
|
|
// count heading level
|
|
|
|
|
|
level = 1; |
|
|
|
|
|
ch = state.src.charCodeAt(++pos); |
|
|
|
|
|
while (ch === 0x23/* # */ && pos < max && level <= 6) { |
|
|
|
|
|
level++; |
|
|
|
|
|
ch = state.src.charCodeAt(++pos); |
|
|
|
|
|
} |
|
|
|
|
|
|
|
|
|
|
|
if (!isWhiteSpace(ch) || pos >= max || level > 6) { return false; } |
|
|
|
|
|
|
|
|
|
|
|
// skip spaces before heading text
|
|
|
|
|
|
ch = state.src.charCodeAt(++pos); |
|
|
|
|
|
while (isWhiteSpace(ch) && pos < max) { |
|
|
|
|
|
ch = state.src.charCodeAt(++pos); |
|
|
|
|
|
} |
|
|
|
|
|
|
|
|
|
|
|
if (pos >= max) { return false; } |
|
|
|
|
|
|
|
|
|
|
|
// Now pos contains offset of first heared char
|
|
|
|
|
|
// Let's cut tails like ' ### ' from the end of string
|
|
|
|
|
|
|
|
|
|
|
|
max--; |
|
|
|
|
|
ch = state.src.charCodeAt(max); |
|
|
|
|
|
|
|
|
|
|
|
while (isWhiteSpace(ch) && max > pos) { |
|
|
|
|
|
ch = state.src.charCodeAt(--max); |
|
|
|
|
|
} |
|
|
|
|
|
if (ch === 0x23/* # */) { |
|
|
|
|
|
while (ch === 0x23/* # */ && max > pos) { |
|
|
|
|
|
ch = state.src.charCodeAt(--max); |
|
|
|
|
|
} |
|
|
|
|
|
if (isWhiteSpace(ch)) { |
|
|
|
|
|
while (isWhiteSpace(ch) && max > pos) { |
|
|
|
|
|
ch = state.src.charCodeAt(--max); |
|
|
|
|
|
} |
|
|
|
|
|
} else if (ch === 0x5C/* \ */) { |
|
|
|
|
|
max++; |
|
|
|
|
|
} |
|
|
|
|
|
} |
|
|
|
|
|
max++; |
|
|
|
|
|
|
|
|
|
|
|
if (silent) { return true; } |
|
|
|
|
|
|
|
|
|
|
|
if (silent) { |
|
|
|
|
|
return true; |
|
|
|
|
|
} |
|
|
|
|
|
|
|
|
|
|
|
state.tokens.push({ type: 'heading_open', level: level }); |
|
|
|
|
|
state.lexerInline.tokenize(state, pos, max); |
|
|
|
|
|
state.tokens.push({ type: 'heading_close', level: level }); |
|
|
|
|
|
|
|
|
|
|
|
skipEmptyLines(state, ++startLine); |
|
|
|
|
|
return true; |
|
|
|
|
|
}); |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
// Horizontal rule
|
|
|
// Horizontal rule
|
|
|
rules.push(function hr(state, startLine, endLine) { |
|
|
rules.push(function hr(state, startLine, endLine, silent) { |
|
|
var ch, marker, |
|
|
var ch, marker, |
|
|
pos = state.bMarks[startLine], |
|
|
pos = state.bMarks[startLine], |
|
|
space_max = pos + 3, |
|
|
space_max = pos + 3, |
|
@ -162,6 +235,8 @@ rules.push(function hr(state, startLine, endLine) { |
|
|
} |
|
|
} |
|
|
} |
|
|
} |
|
|
|
|
|
|
|
|
|
|
|
if (silent) { return true; } |
|
|
|
|
|
|
|
|
state.tokens.push({ type: 'hr' }); |
|
|
state.tokens.push({ type: 'hr' }); |
|
|
|
|
|
|
|
|
skipEmptyLines(state, ++startLine); |
|
|
skipEmptyLines(state, ++startLine); |
|
@ -171,10 +246,18 @@ rules.push(function hr(state, startLine, endLine) { |
|
|
|
|
|
|
|
|
// Paragraph
|
|
|
// Paragraph
|
|
|
rules.push(function paragraph(state, startLine, endLine) { |
|
|
rules.push(function paragraph(state, startLine, endLine) { |
|
|
var nextLine = startLine + 1; |
|
|
var nextLine = startLine + 1, |
|
|
|
|
|
rules_named = state.lexerBlock.rules_named; |
|
|
|
|
|
|
|
|
// jump line-by-line until empty one or EOF
|
|
|
// jump line-by-line until empty one or EOF
|
|
|
while (nextLine < endLine && !isEmpty(state, nextLine)) { |
|
|
while (nextLine < endLine && !isEmpty(state, nextLine)) { |
|
|
|
|
|
// Force paragraph termination of next tag found
|
|
|
|
|
|
if (rules_named.hr(state, nextLine, endLine, true)) { break; } |
|
|
|
|
|
if (rules_named.heading(state, nextLine, endLine, true)) { break; } |
|
|
|
|
|
//if (rules_named.lheading(state, nextLine, endLine, true)) { break; }
|
|
|
|
|
|
//if (rules_named.blockquote(state, nextLine, endLine, true)) { break; }
|
|
|
|
|
|
//if (rules_named.tag(state, nextLine, endLine, true)) { break; }
|
|
|
|
|
|
//if (rules_named.def(state, nextLine, endLine, true)) { break; }
|
|
|
nextLine++; |
|
|
nextLine++; |
|
|
} |
|
|
} |
|
|
|
|
|
|
|
@ -194,10 +277,16 @@ rules.push(function paragraph(state, startLine, endLine) { |
|
|
////////////////////////////////////////////////////////////////////////////////
|
|
|
////////////////////////////////////////////////////////////////////////////////
|
|
|
// Lexer class
|
|
|
// Lexer class
|
|
|
|
|
|
|
|
|
|
|
|
function functionName(fn) { |
|
|
|
|
|
var ret = fn.toString(); |
|
|
|
|
|
ret = ret.substr('function '.length); |
|
|
|
|
|
ret = ret.substr(0, ret.indexOf('(')); |
|
|
|
|
|
return ret; |
|
|
|
|
|
} |
|
|
|
|
|
|
|
|
function findByName(self, name) { |
|
|
function findByName(self, name) { |
|
|
for (var i = 0; i < self.rules.length; i++) { |
|
|
for (var i = 0; i < self.rules.length; i++) { |
|
|
if (self.rules[i].name === name) { |
|
|
if (functionName(self.rules[i]) === name) { |
|
|
return i; |
|
|
return i; |
|
|
} |
|
|
} |
|
|
} |
|
|
} |
|
@ -209,6 +298,7 @@ function findByName(self, name) { |
|
|
//
|
|
|
//
|
|
|
function LexerBlock() { |
|
|
function LexerBlock() { |
|
|
this.rules = []; |
|
|
this.rules = []; |
|
|
|
|
|
this.rules_named = {}; |
|
|
|
|
|
|
|
|
for (var i = 0; i < rules.length; i++) { |
|
|
for (var i = 0; i < rules.length; i++) { |
|
|
this.after(null, rules[i]); |
|
|
this.after(null, rules[i]); |
|
@ -229,6 +319,8 @@ LexerBlock.prototype.at = function (name, fn) { |
|
|
} else { |
|
|
} else { |
|
|
this.rules = this.rules.slice(0, index).concat(this.rules.slice(index + 1)); |
|
|
this.rules = this.rules.slice(0, index).concat(this.rules.slice(index + 1)); |
|
|
} |
|
|
} |
|
|
|
|
|
|
|
|
|
|
|
this.rules_named[functionName(fn)] = fn; |
|
|
}; |
|
|
}; |
|
|
|
|
|
|
|
|
|
|
|
|
|
@ -238,6 +330,7 @@ LexerBlock.prototype.at = function (name, fn) { |
|
|
LexerBlock.prototype.before = function (name, fn) { |
|
|
LexerBlock.prototype.before = function (name, fn) { |
|
|
if (!name) { |
|
|
if (!name) { |
|
|
this.rules.unshift(fn); |
|
|
this.rules.unshift(fn); |
|
|
|
|
|
this.rules_named[functionName(fn)] = fn; |
|
|
return; |
|
|
return; |
|
|
} |
|
|
} |
|
|
|
|
|
|
|
@ -247,6 +340,7 @@ LexerBlock.prototype.before = function (name, fn) { |
|
|
} |
|
|
} |
|
|
|
|
|
|
|
|
this.rules.splice(index, 0, fn); |
|
|
this.rules.splice(index, 0, fn); |
|
|
|
|
|
this.rules_named[functionName(fn)] = fn; |
|
|
}; |
|
|
}; |
|
|
|
|
|
|
|
|
|
|
|
|
|
@ -256,6 +350,7 @@ LexerBlock.prototype.before = function (name, fn) { |
|
|
LexerBlock.prototype.after = function (name, fn) { |
|
|
LexerBlock.prototype.after = function (name, fn) { |
|
|
if (!name) { |
|
|
if (!name) { |
|
|
this.rules.push(fn); |
|
|
this.rules.push(fn); |
|
|
|
|
|
this.rules_named[functionName(fn)] = fn; |
|
|
return; |
|
|
return; |
|
|
} |
|
|
} |
|
|
|
|
|
|
|
@ -265,6 +360,7 @@ LexerBlock.prototype.after = function (name, fn) { |
|
|
} |
|
|
} |
|
|
|
|
|
|
|
|
this.rules.splice(index + 1, 0, fn); |
|
|
this.rules.splice(index + 1, 0, fn); |
|
|
|
|
|
this.rules_named[functionName(fn)] = fn; |
|
|
}; |
|
|
}; |
|
|
|
|
|
|
|
|
|
|
|
|
|
@ -286,7 +382,7 @@ LexerBlock.prototype.tokenize = function (state, startLine, endLine) { |
|
|
// - return true
|
|
|
// - return true
|
|
|
|
|
|
|
|
|
for (i = 0; i < len; i++) { |
|
|
for (i = 0; i < len; i++) { |
|
|
ok = rules[i](state, line, endLine); |
|
|
ok = rules[i](state, line, endLine, false); |
|
|
if (ok) { break; } |
|
|
if (ok) { break; } |
|
|
} |
|
|
} |
|
|
|
|
|
|
|
|