How to use the marked.Lexer function in marked

To help you get started, we’ve selected a few marked examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github mathigon / textbooks / translations / encode.js View on Github external
function parseContent(content) {
  // Block Indentation
  content = content.split('\n').map((line) => {
    if (!line.startsWith(':::')) return line;
    return '\n\n' + encode(line) + '\n\n'
  }).join('\n');

  // Parse Markdown (but override HTML detection)
  const lexer = new marked.Lexer();
  lexer.rules.html = /^<.*[\n]{2,}/;
  const tokens = lexer.lex(content);
  let parsed = marked.Parser.parse(tokens, {renderer});

  // Parse custom element attributes
  parsed = parsed.replace(/{([^}]+)}/g, (selection, body) => {
    return body.match(/^[0-9]+$/) ? selection : encode(selection)
  });

  parsed = parsed.replace(/ /g, ' ');

  // Split into sections of length at most 1000.
  const output = [''];
  for (const row of parsed.split(/\n\n/)) {
    if (output[output.length - 1].length + row.length < 4950) {
      output[output.length - 1] += '\n\n' + row
github linuxdeepin / deepin-manual / web / src / app / services / manual-renderer.ts View on Github external
const processMarkdown = function(src) {
  // Lex
  const lexer = new marked.Lexer();
  let tokens;
  try {
    tokens = lexer.lex(src);
  } catch (err) {
    throw new Error(`Lexer Error ${err}`);
  }

  // Extract Headers
  const parsed = parseNavigationItems(tokens);
  console.log('parsed: ', parsed);

  // Pass tokens to HTML renderer
  const html = marked(src, {
    renderer: getHTMLRenderer(),
  });
github firekylin / firekylin / src / common / service / marked-with-mathjax.js View on Github external
async render(content) {
    var mathLexer = new marked.Lexer();
    var tokens = mathLexer.lex(content);
  
    for (let i = 0; i < tokens.length; i++) {
      const item = tokens[i];
  
      // 处理块级表达式
      if (item.type === 'code' && item.lang === 'math') {
        tokens[i] = {
          type: 'paragraph',
          text: await _renderMathJax(item.text),
        }
      }
  
      // 处理表格
      if (item.type === 'table') {
        // 处理表头
github jdeniau / changelog-view / src / markdown.js View on Github external
function getTokens(content) {
  const lexer = new marked.Lexer();
  const tokens = lexer.lex(content);

  return tokens;
}
github atom / atomdoc / src / parser.js View on Github external
const parse = function (docString) {
  const lexer = new marked.Lexer()
  const tokens = lexer.lex(docString)
  const firstToken = tokens[0]

  if (!firstToken || (firstToken.type !== 'paragraph')) {
    throw new Error('Doc string must start with a paragraph!')
  }

  const doc = new Doc(docString)

  Object.assign(doc, parseSummaryAndDescription(tokens))

  while (tokens.length) {
    let args, events, examples, returnValues, titledArgs
    if ((titledArgs = parseTitledArgumentsSection(tokens))) {
      if (doc.titledArguments == null) doc.titledArguments = []
      doc.titledArguments.push(titledArgs)
github louis-tru / ngui / node_modules / ngui-tools / marked / html.js View on Github external
function gen_html(text_md, title, template) {
	var lexer = new marked.Lexer();
	var tokens = lexer.lex(text_md);

	renderer.m_toc = [ ];
	renderer.m_prev_level = 0;
	template = template || get_marked_template();

	var body = marked.parser(tokens, { renderer: renderer });

	for ( var i = renderer.m_prev_level; i > 0; i-- ) {
		renderer.m_toc.push('');
		renderer.m_toc.push('');
	}
	template = template.replace(/__placeholder_toc__/g, renderer.m_toc.join('\n'));
	renderer.m_toc = null;
	renderer.m_prev_level = 0;
github louis-tru / ngui / libs / lmake / marked / html.js View on Github external
function gen_html(text_md, title, template) {
	var lexer = new marked.Lexer();
	var tokens = lexer.lex(text_md);

	renderer.m_toc = [ ];
	renderer.m_prev_level = 0;
	template = template || get_marked_template();

	var body = marked.parser(tokens, { renderer: renderer });

	for ( var i = renderer.m_prev_level; i > 0; i-- ) {
		renderer.m_toc.push('');
		renderer.m_toc.push('');
	}
	template = template.replace(/__placeholder_toc__/g, renderer.m_toc.join('\n'));
	renderer.m_toc = null;
	renderer.m_prev_level = 0;
github twinlabs / forum / app / components / markdownInitializer.js View on Github external
if (window.localStorage.getItem('forumDisableImages') === 'true') {
    markedRenderer.image = function(href, title, text) {
      return `
        <a href="${href}">
          ${href}
        </a>
      `;
    };
  }

  var lexer = new marked.Lexer();
  lexer.rules.heading = { exec: function() {} };

  marked.lexer = lexer;

  marked.setOptions({
    emoji: function(emoji) {
      return (
        '