Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
// See the License for the specific language governing permissions and
// limitations under the License.
'use strict';
var assert = require('assert');
var cp = require('child_process');
var glob = require('glob');
var path = require('path');
var tmp = require('tmp');
var expressVersion = '4.15.3';
// Setup
var express_dir = tmp.dirSync().name;
cp.execFileSync('git', ['clone', '--branch', expressVersion,
'https://github.com/strongloop/express.git', '--depth', '1', express_dir]);
var test_glob = path.join(express_dir, 'test', '*.js');
var error;
process.chdir(express_dir);
// Remove name to allow for cyclic dependency
console.log('Updating express metadata');
cp.execFileSync('sed', ['-i.bak', 's/"express"/"e"/', 'package.json']);
// Install express as its own dependency
console.log('Installing express dependencies');
cp.execFileSync('npm', ['--version'], { stdio: 'inherit' });
cp.execFileSync('npm', ['install']);
cp.execFileSync('npm', ['install', 'express@' + expressVersion]);
// Reformat tests to use newly installed express
private saveAuthData(dataPath: string): ICookie[] {
let electronExecutable = this._authOptions.electron || 'electron';
let isWindows = (process.platform.lastIndexOf('win') === 0);
let options: any = isWindows ? { shell: true } : undefined;
const output = childProcess.execFileSync(electronExecutable, [path.join(__dirname, 'electron/main.js'), '--', this._siteUrl, this._authOptions.force.toString()], options).toString();
let cookieRegex = /#\{([\s\S]+?)\}#/gm;
let cookieData = cookieRegex.exec(output);
let cookiesJson = cookieData[1].split(';#;');
let cookies: ICookie[] = [];
cookiesJson.forEach((cookie) => {
let data: string = cookie.replace(/(\n|\r)+/g, '').replace(/^["]+|["]+$/g, '');
if (data) {
let cookieData = JSON.parse(data) as ICookie;
if (cookieData.httpOnly) {
cookies.push(cookieData);
// explicitly set 1 hour expiration for on-premise
if (isOnPremUrl(this._siteUrl)) {
return arg.substring("--proto_path=".length);
}
});
const notOnProtoPath = filesToCompile.filter(file => {
// We mark this file as not on the proto path if it doesn't start with any of the passed in proto paths,
// and it exists.
return protoPaths.findIndex(p => file.startsWith(p + path.sep)) === -1 &&
fs.existsSync(file);
});
const additionalProtoPaths = [...new Set(notOnProtoPath.map(file => "--proto_path=" + path.dirname(file)))];
protocArgs.push(...additionalProtoPaths);
protocArgs.push(...userArgs);
console.log("Compiling descriptor with command: " + protoc + " " + protocArgs.join(" "));
execFileSync(protoc, protocArgs);
// Make sure we have the latest archive.
execFileSync("git", [
"fetch", "--depth=1",
upstream.fetch.remote, "gh-pages",
])
// Set up archive directories
execFileSync("git", [
"checkout", `${upstream.fetch.remote}/gh-pages`,
"--", "archive",
])
await fs.rename(r("archive"), r("dist/archive"))
await fs.mkdir(r(`dist/archive/v${version}`), {recursive: true})
// Tell Git to ignore our changes - it's no longer there.
execFileSync("git", ["add", "archive"])
return new Generator({version, guides, methods, layout})
}
function exec(command, args) {
console.log('> ' + [command].concat(args).join(' '));
var options = {};
return execFileSync(command, args, options).toString();
}
const composePath = join(rootPath, 'tasks/docker_docs/docker-compose.yml');
const htmlDocsDir = join(rootPath, 'html_docs');
const env = Object.assign(process.env, {
KIBANA_DOCS_CONTAINER_NAME: 'kibana_docs',
KIBANA_DOCS_CONTEXT: rootPath
});
const stdio = [0, 1, 2];
const execOptions = { env, stdio };
exec('docker-compose', [
'-f', composePath,
'up'
], execOptions);
const containerId = String(exec('docker-compose', [
'-f', composePath,
'ps',
'-q', env.KIBANA_DOCS_CONTAINER_NAME
], { env })).trim();
grunt.log.write('Clearing old docs ... ');
rimraf.sync(htmlDocsDir);
grunt.log.writeln('done');
grunt.log.write('Copying new docs ... ');
exec('docker', [
'cp',
`${containerId}:/home/kibana/html_docs`,
htmlDocsDir
]);
grunt.log.writeln('done');
function runCommandSync(command: string, args: any[]): string {
return asString(
ChildProcess.execFileSync(
vscode_helpers.toStringSafe(command),
vscode_helpers.asArray(args, false)
.map(x => vscode_helpers.toStringSafe(x)),
)
);
}
function compileWithTSCConfig() {
logger.debug("compiling typescript project with configuration from tsconfig.json")
try {
execFileSync("tsc", [], prepExecOpts)
} catch(e) {
throw new Error(`error executing tsc:\n\n${e.output}`)
}
}
if (!process.env.NDD_DATA)
process.env.NDD_DATA = process.pid + '_ndbId';
process.versions['ndb'] = '1.1.5';
const inspector = require('inspector');
inspector.open(0, undefined, false);
const info = {
cwd: pathToFileURL(process.cwd()),
argv: process.argv.concat(process.execArgv),
data: process.env.NDD_DATA,
ppid: ppid,
id: String(process.pid),
inspectorUrl: inspector.url(),
scriptName: scriptName
};
const {execFileSync} = require('child_process');
execFileSync(process.execPath, [__filename], {
env: {
NDD_IPC: process.env.NDD_IPC,
NDD_PUBLISH_DATA: JSON.stringify(info)
}
});
} else {
const net = require('net');
const TIMEOUT = 30000;
const socket = net.createConnection(process.env.NDD_IPC, () => {
socket.write(process.env.NDD_PUBLISH_DATA);
const timeoutId = setTimeout(() => socket.destroy(), TIMEOUT);
socket.on('data', () => {
clearTimeout(timeoutId);
socket.destroy();
});
});