Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
const modules = {}
remark()
.use(generator, { sections, modules })
.use(toc, { maxDepth: 2, tight: true })
.use(collapse, {
test: 'Table of Contents',
summary: () => 'Click to expand'
})
.use(github)
.use(bookmarks, { modules })
// Disable padding to lessen diff noise
.use(stringify, { paddedTable: false, looseTable: true })
.process(vfile.readSync(fp), (err, file) => {
if (err) throw err
console.error(report(file))
vfile.writeSync(file)
})
;(async function (){
// Say we are making a module that exports just enough Pi (3.14159).
// We’re documenting it with a readme file, [`example/readme.md`][example-md]:
console.log('markdown', exampleMd)
// …and an example script to document it [`example/example.js`][example-js-2]:
console.log('js', exampleJs)
// …If we use `remark-usage`, we can generate the `Usage` section
var path = require('path')
var vfile = require('to-vfile')
var remark = require('remark')
var usage = require('.')
var file = vfile.readSync({path: 'readme.md', cwd: 'example'})
var file = await remark()
.use(usage)
.process(file)
// Now, printing `file` (the newly generated readme) yields:
console.log('markdown', String(file))
// remark-usage-ignore-next
}())
'use strict';
var join = require( 'path' ).join;
var toVFile = require( 'to-vfile' );
var remark = require( 'remark' );
var insertURLs = require( './../lib' );
var fpath;
var vfile;
var opts;
var out;
// Load a Markdown file...
fpath = join( __dirname, 'fixtures/simple.txt' );
vfile = toVFile.readSync( fpath );
// Specify the directory containing equation images:
opts = {
'dir': './doc/img/', // relative to Markdown file
'prefix': '' // no prefix
};
// Insert src URLs:
out = remark().use( insertURLs, opts ).processSync( vfile );
// Output the results:
console.log( out.contents );
// that the C++ code compiles and the js code runs.
// Add .gyp files which will be used to compile the C++ code.
// Modify the require paths in the js code to pull from the build tree.
// Triggered from the build-addons target in the Makefile and vcbuild.bat.
const { mkdir, writeFile } = require('fs');
const { resolve } = require('path');
const vfile = require('to-vfile');
const unified = require('unified');
const remarkParse = require('remark-parse');
const rootDir = resolve(__dirname, '..', '..');
const doc = resolve(rootDir, 'doc', 'api', 'addons.md');
const verifyDir = resolve(rootDir, 'test', 'addons');
const file = vfile.readSync(doc, 'utf8');
const tree = unified().use(remarkParse).parse(file);
const addons = {};
let id = 0;
let currentHeader;
const validNames = /^\/\/\s+(.*\.(?:cc|h|js))[\r\n]/;
tree.children.forEach((node) => {
if (node.type === 'heading') {
currentHeader = file.contents.slice(
node.children[0].position.start.offset,
node.position.end.offset);
addons[currentHeader] = { files: {} };
} else if (node.type === 'code') {
const match = node.value.match(validNames);
if (match !== null) {
addons[currentHeader].files[match[1]] = node.value;
async function processMdxAsync(algoliaIndex: any, file: File, indexName: string): Promise {
const content = await read(file.path);
await remark()
.use(slug) // slugify heading text as ids
.use(mdx)
.use(() => async (tree: Node[]) => {
await processContentTreeAsync(tree, file, algoliaIndex, indexName);
})
.process(content);
}
async function processMdxAsync(indexName: string, file: File): Promise {
const content = await read(file.path);
await remark()
.use(slug) // slugify heading text as ids
.use(mdx)
.use(() => (tree: Node[]) => processContentTree(tree, file, indexName))
.process(content);
}
t.test(fixture, function(st) {
var file = vfile.readSync(path.join(fp, 'index.html'), 'utf8')
var messages = JSON.parse(
fs.readFileSync(path.join(fp, 'messages.json'), 'utf8')
)
file.dirname = ''
rehype()
.data('settings', {emitParseErrors: true})
.parse(file)
st.deepEqual(
JSON.parse(JSON.stringify(file.messages)),
messages,
'should emit messages for `' + fixture + '`'
)
t.test(fixture, function(st) {
var input = vfile.readSync(join(base, fixture, 'input.md'))
var treePath = join(base, fixture, 'tree.json')
var outputPath = join(base, fixture, 'output.md')
var output
var actual
var expected
var config
var proc
try {
config = JSON.parse(read(join(base, fixture, 'config.json')))
} catch (error) {}
proc = remark().use(frontmatter, config)
actual = proc.parse(input)
try {
var vfile = require('to-vfile')
var report = require('vfile-reporter')
var unified = require('unified')
var parse = require('orga-unified')
var mutate = require('orga-rehype')
var stringify = require('rehype-stringify')
var doc = require('rehype-document')
unified()
.use(parse)
.use(mutate)
.use(doc, {title: 'Hi!'})
.use(stringify)
.process(vfile.readSync('./README.org'), function (err, file) {
console.error(report(err || file))
console.log(String(file))
})
visit(markdownAST, 'code', async (node) => {
const lang = (node.lang || '').toLowerCase();
const fileContent = vfile.readSync(markdownNode.fileAbsolutePath);
if (lang === language) {
await remark()
.use(mermaid)
.process(fileContent, (err, file) => {
if (err) throw err;
const mermaidSrcMatch = file.messages.filter(
item => item.message.source === node.value
)[0];
node.type = 'html';
node.value = `<img src="${mermaidSrcMatch.message.imgSrc}">`;
});
}
});