Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
fs.readFile(filePath, { encoding: 'utf8' }, function (err, data) {
t.error(err, 'read ' + basename + ' file without error ')
var formatted
try {
formatted = fmt(data)
} catch (e) {
t.error(e, 'format ' + basename + ' without error')
}
standard.lintText(formatted, function (err, result) {
t.error(err, 'linting ' + basename + ' should be error free')
t.equal(result.errorCount, 0, basename + ' error free after formatting')
t.equal(result.warningCount, 0, basename + ' warning free after formatting')
if (result.errorCount || result.warningCount !== 0) {
// If there is an issue, print the details
console.log(inspect(result, { depth: depth || null }))
}
t.end()
})
})
}
raw[l] = raw[l].slice(0, -1)
}
raw.push('},')
// raw.push(',')
} else {
raw.push(key + ': ' + result + ',')
}
}
parseRaw(result.subs)
raw[raw.length - 1] = raw[raw.length - 1].slice(0, -1)
// raw.unshift(`const { findParent } = require('./framework')\n`)
// fs.writeFileSync(__dirname + '/jsx.transpiled.real.js', raw.join('\n'))
// will make this super nice
const str = result.init + '\n' + raw.join('\n')
standard.lintText(str, { fix: true }, (err, data) => {
if (err) console.log('ERR!', err)
// console.log(data.results[0])
fs.writeFileSync(__dirname + '/jsx.transpiled.real.js', data.results[0].output) //eslint-disable-line
})
asciidoc += '// Use `node scripts/generate-docs-examples.js` to generate the docs examples\n\n'
var code = 'async function run (client) {\n// START\n'
for (var i = 0; i < source.length; i++) {
const { api, query, params, body } = source[i]
const apiArguments = Object.assign({}, params, query, body ? { body } : body)
var serializedApiArguments = Object.keys(apiArguments).length > 0
? JSON.stringify(apiArguments, null, 2)
: ''
code += `const response${getResponsePostfix(i)} = await client.${api.replace(/_([a-z])/g, g => g[1].toUpperCase())}(${serializedApiArguments})
console.log(response${getResponsePostfix(i)})
\n`
}
code += '// END\n}'
const { results } = standard.lintTextSync(code, { fix: true })
code = results[0].output
code = code.slice(code.indexOf('// START\n') + 9, code.indexOf('\n\n// END'))
asciidoc += `[source, js]
----
${dedent(code)}
----
`
return asciidoc
function getResponsePostfix (i) {
if (source.length === 1) return ''
return String(i)
}
}
// Backward compatibility
process.emitWarning('It is recommended to use a Web3 instance instead of Web3Provider')
contract.setProvider(web3)
} else {
throw Error('You should provide a valid web3 instance')
}
}
return contract
}
module.exports = {
${Object.keys(artifacts).map(toFactoryScript).join(',')}
}
`
// It makes the script prettier with standardJS style
const lintResult = standard.lintTextSync(module, { fix: true })
const scriptToWrite = lintResult.results[0].output
// Check does it already exist, if then check the diff and decide to update or not
try {
const legacy = fs.readFileSync(outputPath, 'utf8')
if (legacy === scriptToWrite) {
console.log('truffle-plugin-modularizer: No updates')
resolve()
return
}
} catch (e) {}
// Make directory if it does not exist
fs.mkdir(path.dirname(outputPath), { recursive: true }, (err) => {
if (err) reject(err)
else {
return Promise.all(compare.data.files.map(async file => {
if (!whiteList.includes(file.filename)) {
const content = await context.github.repos.getContent(context.repo({
path: file.filename,
ref: branch
}));
console.log(content);
const text = Buffer.from(content.data.content, 'base64').toString();
Object.assign(linterItems, {cwd: '', fix: true, filename: file.filename});
console.log(linterItems);
standard.lintText(text, linterItems, (err, results) => {
if (err) {
console.log('err: ', err);
}
console.log('res', results);
return Promise.all(results.results.map(result => {
console.log(result);
if (result.output) {
console.log('output', result.output);
// Checks that we have a fixed version and the file isn't part of the whiteList
context.github.repos.updateFile(context.repo({
path: file.filename,
message: `Fix lint errors for ${file.filename}`,
content: Buffer.from(result.output).toString('base64'),
sha: content.data.sha,
branch
}));
let file_navigation_documents_xhtml = Template["navigation-documents.xhtml"];
const navigationList = State.mangaInfo.contents.map(navPointInfo => {
if (navPointInfo.refindex === 1)
return '<li><a href="text/p_cover.xhtml">' + htmlToEscape(navPointInfo.text) + '</a></li>';
return '<li><a href="text/p_' + counter(navPointInfo.refindex - 2, 4) + '.xhtml">' + htmlToEscape(navPointInfo.text) + '</a></li>';
}).join('\n');
file_navigation_documents_xhtml = file_navigation_documents_xhtml
.replace('', navigationList)
// render standard.opf file
let file_opf = Template["standard.opf"];
const imageItemStr = State.pageInfo.list.map((blobIndex, index) => {
const blob = BlobStore.getBlobObject(blobIndex);
const mimetype = String(blob.type);
if (index === 0)
return ''
const num = counter(index - 1, 4);
return '';
}).join('\n');
const pageItemStr = State.pageInfo.list.map((b, index) => {
if (index === 0)
return '';
var run = require('./helpers/run')
var put = require('./helpers/put')
var create = require('./helpers/create')
var validate = require('./helpers/fuzzing').validate
tape('autogenerated failing fuzz test', function (t) {
var writesPerReplication = ${JSON.stringify(writeArrays)}.map(b => new Map(b))
create.many(${dbCount}, function (err, dbs, replicateByIndex) {
t.error(err)
run(${writeOps.map(op => op.toString())})
})
})`, { singleQuote: true, semi: false })
var standardized = standard.lintTextSync(source, { fix: true })
console.log(standardized.results[0].output)
console.log('\n')
}
result.results.forEach(function (result) {
result.messages.forEach(function (message) {
console.log(
' %s:%d:%d: %s%s',
result.filePath, message.line || 0, message.column || 0, message.message,
Args.value('verbose') ? ' (' + message.ruleId + ')' : ''
)
})
})
process.exitCode = result.errorCount ? 1 : 0
}
}
if (scanFile) {
standard.lintText(fs.readFileSync(scanpath).toString(), opts, output)
} else {
standard.lintFiles(scanpath, opts, output)
}
console.log(
' %s:%d:%d: %s%s',
result.filePath, message.line || 0, message.column || 0, message.message,
Args.value('verbose') ? ' (' + message.ruleId + ')' : ''
)
})
})
process.exitCode = result.errorCount ? 1 : 0
}
}
if (scanFile) {
standard.lintText(fs.readFileSync(scanpath).toString(), opts, output)
} else {
standard.lintFiles(scanpath, opts, output)
}
async run (message, args) {
var hrStart = await process.hrtime(this.hrStart)
var lintResult = await standard.lintTextSync(`${args.code}\n`)
var lintLatency = await process.hrtime(hrStart)
var messages = lintResult.results[0].messages.map(m => stripIndents`
**${m.ruleId ? m.ruleId : 'Error'} - (${m.line}:${m.column})**
❯ ${m.message}
`).join('\n')
message.embed({
author: { name: this.client.user.tag, icon_url: this.client.user.displayAvatarURL() },
footer: { text: message.author.tag, icon_url: message.author.displayAvatarURL() },
timestamp: new Date(),
title: `*Linted in ${lintLatency[0] > 0 ? `${lintLatency[0]}s ` : ''}${lintLatency[1] / 1000000}ms.*`,
description: messages ? messages.slice(0, 2000) : '✅ Lint Success!',
color: this.client.getClientColor(message)
})
}