Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
const npath = rootPath || '*'
if (pkgs[npath]) return pkgs[npath]
const findUp = require('find-up')
let obj = {}
try {
let startDir = rootPath || require('require-main-filename')(parentRequire || require)
// When called in an environment that lacks require.main.filename, such as a jest test runner,
// startDir is already process.cwd(), and should not be shortened.
// Whether or not it is _actually_ a directory (e.g., extensionless bin) is irrelevant, find-up handles it.
if (!rootPath && path.extname(startDir)) {
startDir = path.dirname(startDir)
}
const pkgJsonPath = findUp.sync('package.json', {
cwd: startDir
})
obj = JSON.parse(fs.readFileSync(pkgJsonPath))
} catch (noop) {}
pkgs[npath] = obj || {}
return pkgs[npath]
}
/* global process require */
// there seems to be a bug in yarn whereby forwarding arguments with -- like so:
// $ yarn monorun [...] -- --foo
// results in [...] being stripped away; can workaround with:
// $ yarn monorun -- [...] -- --foo
// but note that yarn warns about a future behavioral change re: yarn and --
const {dirname} = require('path');
const {spawn} = require('child_process');
const {sync: findUp} = require('find-up');
const minimist = require('minimist');
const monorepoRootPath = dirname(findUp('lerna.json'));
let cliArgs = process.argv.slice(2);
const options = minimist(
cliArgs,
{boolean: [
'include-filtered-dependents',
'include-filtered-dependencies',
'no-bail',
'no-prefix',
'no-private',
'no-progress',
'no-sort',
'parallel',
'reject-cycles',
'stream'
import { readFileSync } from 'fs';
import findUp from 'find-up';
let options;
const pkgPath = findUp.sync('package.json');
// Read compiler options from `package.json`.
if (pkgPath) {
const pkg = JSON.parse(readFileSync(pkgPath, 'utf8'));
options = pkg['svelte:compiler'];
}
Plugin.registerCompiler({
extensions: (options && options.extensions) || [
'html',
'svelte'
]
}, () => new SvelteCompiler(options));
const parseAssetsManifest = (config: IConfig): IAssetsConfig => {
const { dev, defaultEntry, outputDir, errorHtmlPath } = config;
const assetsManifestPath = findUp.sync(ASSETS_MANIFEST, { cwd: outputDir });
if (!assetsManifestPath || !assetsManifestPath.length) {
printAndExit('> Your webpack config does not use lissom/webpack wrapping');
}
if (dev) {
deleteCache(assetsManifestPath);
}
const assetsManifest = require(assetsManifestPath);
const {
entrypoints,
HtmlWebpackPlugin,
outputPath,
modules,
chunks,
} = assetsManifest;
const routers = getRouters(entrypoints, outputPath, defaultEntry);
const parseHtml = getParseHtml(HtmlWebpackPlugin, outputPath);
input,
dirname,
filename,
stats,
outDir: 'dist',
basename: '',
scope: {},
pkg,
}, config, cli.flags)
opts.outDir = path.resolve(opts.outDir)
if (opts.config) opts.config = path.resolve(opts.config)
if (opts.webpack) {
opts.webpack = require(path.resolve(opts.webpack))
} else {
const webpackConfig = findup.sync('webpack.config.js', { cwd: dirname })
if (webpackConfig) opts.webpack = require(webpackConfig)
}
if (opts.template) {
opts.template = require(path.resolve(opts.template))
}
const handleError = err => {
log.error(err)
process.exit(1)
}
log(chalk.cyan('@compositor/x0'))
switch (cmd) {
case 'build':
const findUp = require('find-up')
let test = findUp.sync('package.json', {cwd: 'C:\\Users\\pahmeyer\\Documents\\linter-gams\\specs'})
console.log(test)
let FarmDyn = findUp.sync('exp_starter.gms',{cwd: 'N:agpo\\work1\\FarmDyn_Pah\\FarmD\\gams\\coeffgen'})
console.log(FarmDyn)
let capri = findUp.sync('capmod.gms', {cwd: 'N:agpo\\work1\\SUSTAg\\capri\\capri3\\gams\\feed'})
console.log(capri)
let remote = findUp.sync('apilib.exe', {cwd: 'Y:gams-org\\testlib_ml'})
console.log(remote)
function pkgUp (path) {
const npath = path || '*'
if (pkgs[npath]) return pkgs[npath]
const findUp = require('find-up')
let obj = {}
try {
const pkgJsonPath = findUp.sync('package.json', {
cwd: path || require('path').dirname(require('require-main-filename')(parentRequire || require)),
normalize: false
})
obj = JSON.parse(fs.readFileSync(pkgJsonPath))
} catch (noop) {}
pkgs[npath] = obj || {}
return pkgs[npath]
}
#! /usr/bin/env node
const puppeteer = require('puppeteer');
const findUp = require('find-up');
const fs = require('fs');
const { enterGiveaways, unfollowGiveaways } = require('./src/giveaways');
const signIn = require('./src/signIn');
const sqlite = require('./src/database');
const { updateDB } = require('./src/updateDB');
//look for config file
const configPath = findUp.sync(['.ggrc.json']);
const config = configPath ? JSON.parse(fs.readFileSync(configPath)) : undefined;
//set up CLI
const args = require('yargs')
.scriptName('gg')
.command(require('./src/init'))
.describe('page', 'page to start script on')
.number('page')
.describe('unfollow', 'unfollow giveaways script')
.boolean('unfollow')
.describe('config', 'path to JSON config file')
.string('config')
.config(config)
.help().argv;
if (args._[0] === 'init') {
function getCurrentSiteRootDirectory() {
const htdocsPath = findUp.sync('htdocs');
if (!htdocsPath) {
return null;
}
return htdocsPath.replace('/htdocs', '');
}
function pkgUp (path) {
const npath = path || '*'
if (pkgs[npath]) return pkgs[npath]
const findUp = require('find-up')
let obj = {}
try {
const pkgJsonPath = findUp.sync('package.json', {
cwd: path || require('path').dirname(require('require-main-filename')(parentRequire || require)),
normalize: false
})
obj = JSON.parse(fs.readFileSync(pkgJsonPath))
} catch (noop) {}
pkgs[npath] = obj || {}
return pkgs[npath]
}