Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
async function loadschemas(dir) {
const schemaloader = loader();
const schemadir = path.resolve(__dirname, 'fixtures', dir);
const schemas = await readdirp.promise(schemadir, { fileFilter: '*.schema.json' });
return traverse(schemas
.map(({ fullPath }) => schemaloader(
// eslint-disable-next-line global-require, import/no-dynamic-require
require(fullPath), fullPath,
)));
}
async function readDirRecursive(dirPath) {
const files = await readdirp.promise(dirPath, { type: 'files' });
const dirs = await readdirp.promise(dirPath, { type: 'directories' });
const relativePaths = [];
// windows 下需要将压缩文件路径住转换为 POXIS 路径
if (process.platform === 'win32') {
files.map(file => relativePaths.push((file.path).split(path.sep).join('/')));
for (let dir of dirs) {
if (await isEmptyDir(dir.fullPath)) {
relativePaths.push(`${(dir.path).split(path.sep).join('/')}/`);
}
}
} else {
files.map(file => relativePaths.push(file.path));
for (let dir of dirs) {
if (await isEmptyDir(dir.fullPath)) {
relativePaths.push(`${dir.path}/`);
const read = async (directory: string) => {
const stream = readdirp(directory, { type: 'all' });
let i = 0;
for await (const chunk of stream) {
// Check memory usage with this line. It should be 10MB or so.
// Comment it out if you simply want to list files.
await new Promise(resolve => setTimeout(resolve, 500));
console.log(`${++i}: ${chunk.path}`);
}
console.log('Stream done', i);
const entries = await readdirp.promise(directory);
console.log('Promise done', entries.map(e => e.path));
};
loadFromDir(dir: string) :Promise<{[string]: PlainNodeMetadata}> {
return readdirp.promise(dir, {
fileFilter: ['*.yaml', '*.yml'],
})
.then(entries => {
let tasks = entries.map(entry => {
return util.promisify(fs.readFile)(entry.fullPath)
})
// FIXIT: Promise.all fails when one task fails
return Promise.all(tasks)
})
.then(contents => {
let output = {}
contents.map(content => {
return YAML.parse(content.toString())
})
.forEach(meta => {
output = { ... output, ... meta }
const logResults = async function(FUNCTIONS_DIST) {
const files = await readdirp.promise(FUNCTIONS_DIST)
if (files.length === 0) {
console.log('No functions were packaged')
return
}
const paths = files.map(getLoggedPath)
console.log(`Functions packaged in ${FUNCTIONS_DIST}
${serializeList(paths)}`)
}
const logResults = async function(FUNCTIONS_DIST) {
const files = await readdirp.promise(FUNCTIONS_DIST)
if (files.length === 0) {
console.log('No functions were packaged')
return
}
const paths = files.map(getLoggedPath)
console.log(`Functions packaged in ${FUNCTIONS_DIST}
${serializeList(paths)}`)
}
export async function getStaticMdxPaths(root) {
const files = await readdirp.promise(root, { fileFilter: ['*.mdx'] })
return files.map(({ path: p }) => {
return {
params: {
page: p
.replace(/\.mdx$/, '')
.split('/')
.filter((p) => p !== 'index'),
},
}
})
}
async _registerChariotEvents() {
const directory = path.dirname(require.main.filename);
const readFiles = await readdirp.promise(directory, { fileFilter: '*.js', directoryFilter: ['!.git', '!*modules'] });
this.eventFiles = readFiles.map(file => file.path);
for (const chariotEventFile of this.eventFiles) {
const chariotEvent = require(path.join(directory, chariotEventFile));
chariotEvent.client = this;
if (chariotEvent instanceof Event) {
if (!Constants.EVENTS.EVENT_NAMES.includes(chariotEvent._eventName)) {
throw new Error(`Unknown event called "${chariotEvent._eventName}" in file "${chariotEventFile}". Event names are case sensitive! Check https://abal.moe/Eris/docs/Client for an event overview.`)
}
if (typeof chariotEvent.execute === 'undefined') {
throw new Error(`Couldn't find main executor "execute" in event file "${chariotEventFile}"!`);
}
const hashDir = async function(dirPath, fileStat, base) {
const files = await readdirp.promise(dirPath, { fileFilter, alwaysStat: true })
const dirHashInfo = getHashInfo(dirPath, fileStat, base)
const hashInfos = await pMap(files, ({ fullPath, stats }) => getFileInfo(fullPath, stats, base), {
concurrency: MAX_CONCURRENCY,
})
const hash = await computeHash([dirHashInfo, ...hashInfos])
return hash
}