Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
async setup () {
// eslint-disable-next-line
console.log('Set-up puppeteer environment')
await super.setup()
this.global.__BROWSER__ = await puppeteer.connect({ browserWSEndpoint: 'ws://chrome:3000' })
this.global.__BROWSER__.on('disconnected', () => {
// eslint-disable-next-line
console.log('Browser disconnected.')
})
}
async _downloadBrowser (revision) {
const downloadHost = process.env.PUPPETEER_DOWNLOAD_HOST; // defaults to https://storage.googleapis.com if null
const browserFetcher = puppeteer.createBrowserFetcher({ host: downloadHost });
// same logic as puppeteer
revision = revision || process.env.PUPPETEER_CHROMIUM_REVISION || require('puppeteer-core/package.json').puppeteer.chromium_revision;
const revisionInfo = browserFetcher.revisionInfo(revision);
// do nothing if the revision is already downloaded.
if (revisionInfo.local)
return;
await browserFetcher.download(revisionInfo.revision, this._downloadProgressCallback(revision));
},
async start() {
this._browser = await pptr.launch({
headless: this._headless,
executablePath: this._executablePath,
pipe: this._pipe,
}).catch((err) => console.log(err));
this._page = await this._browser.pages()
.then((pageArr) => {
return pageArr[0];
});
this._page.goto(this._url, { waitUntil: 'networkidle0' });
return await this._page;
}
// Recursive React component scraping algorithm
async function downloadChromium() {
const browserFetcher = puppeteer.createBrowserFetcher({
path: chromeTempPath,
host: downloadHost,
})
const revision =
process.env.PUPPETEER_CHROMIUM_REVISION ||
process.env.npm_config_puppeteer_chromium_revision ||
process.env.npm_package_config_puppeteer_chromium_revision ||
pptrCoreJson.puppeteer.chromium_revision
const revisionInfo = browserFetcher.revisionInfo(revision)
// If already downloaded
if (revisionInfo.local) return revisionInfo
try {
console.log(`Downloading Chromium r${revision}...`)
async function downloadChromium(options, targetRevision) {
const browserFetcher = puppeteer.createBrowserFetcher({ path: options.localDataDir });
const revision = targetRevision || require('puppeteer-core/package.json').puppeteer.chromium_revision;
const revisionInfo = browserFetcher.revisionInfo(revision);
// Do nothing if the revision is already downloaded.
if (revisionInfo.local)
return revisionInfo;
// Override current environment proxy settings with npm configuration, if any.
try {
console.log(`Downloading Chromium r${revision}...`);
const newRevisionInfo = await browserFetcher.download(revisionInfo.revision);
console.log('Chromium downloaded to ' + newRevisionInfo.folderPath);
let localRevisions = await browserFetcher.localRevisions();
localRevisions = localRevisions.filter(revision => revision !== revisionInfo.revision);
// Remove previous chromium revisions.
const cleanupOldVersions = localRevisions.map(revision => browserFetcher.remove(revision));
async function launchChromeAndRunLighthouse(url, opts, config) {
// eslint-disable-next-line no-unused-vars
const chrome = await chromeLauncher.launch({
port: 9222,
logLevel: 'silent',
chromeFlags: ['--headless', '--disable-gpu'],
});
const browser = await puppeteer.connect({
browserURL: 'http://localhost:9222',
});
browser.on('targetchanged', async target => {
const page = await target.page();
if (NETWORK[opts.connection]) {
await page
.target()
.createCDPSession()
.then(client => {
console.log(
`CDP: network conditions set to WPT ${opts.connection} profile.`,
);
return client.send('Network.emulateNetworkConditions', {
offline: NETWORK[opts.connection].offline,
async function downloadChromium() {
const browserFetcher = puppeteer.createBrowserFetcher({
path: chromeTempPath,
host: downloadHost,
})
const revision =
process.env.PUPPETEER_CHROMIUM_REVISION ||
process.env.npm_config_puppeteer_chromium_revision ||
process.env.npm_package_config_puppeteer_chromium_revision ||
pptrCoreJson.puppeteer.chromium_revision
const revisionInfo = browserFetcher.revisionInfo(revision)
// If already downloaded
if (revisionInfo.local) return revisionInfo
try {
console.log(`Downloading Chromium r${revision}...`)
const newRevisionInfo = await browserFetcher.download(revisionInfo.revision)
console.log(`Chromium downloaded to ${newRevisionInfo.folderPath}`)
let localRevisions = await browserFetcher.localRevisions()
localRevisions = localRevisions.filter(r => r !== revisionInfo.revision)
// Remove previous revisions
const cleanupOldVersions = localRevisions.map(r => browserFetcher.remove(r))
await Promise.all(cleanupOldVersions)
export default async function (email, headers, year, month, invoiceDate, html) {
const documentDir = jetpack.cwd(store.get('invoicePath'))
const chrome = await launch(puppeteer)
store.set('processPID', chrome.pid) // Store process ID to kill when app quits
const resp = await util.promisify(request)(`http://localhost:${chrome.port}/json/version`)
const { webSocketDebuggerUrl } = JSON.parse(resp.body)
const browser = await puppeteer.connect({
browserWSEndpoint: webSocketDebuggerUrl
})
const page = await browser.newPage()
await page.setCacheEnabled(true)
await page.setExtraHTTPHeaders(headers)
await page.setContent(html)
await page.waitFor(2000)
if (!jetpack.exists(documentDir.path(`${documentDir.path()}/${email}/Lyft/${year}/${month}/`))) {
jetpack.dir(documentDir.path(`${documentDir.path()}/${email}/Lyft/${year}/${month}/`))
}
await page.emulateMedia('print')
const receiptFilePath = `${documentDir.path()}/${email}/Lyft/${year}/${month}/Receipt-${invoiceDate}.pdf`
await page.pdf({
export default async function () {
// Selectors Needed
const EMAIL_SELECTOR = '#useridInput'
const PASSWORD_SELECTOR = '#password'
const SMS_SELECTOR = '#verificationCode'
const NEXT_BUTTON = '#app-body > div > div:nth-child(1) > form > button'
const VERIFY_BUTTON = '#app-body > div > div > form > button'
const DASHBOARD = '#root'
const documentDir = jetpack.cwd(store.get('invoicePath'))
const chrome = await launch(puppeteer)
store.set('processPID', chrome.pid) // Store process ID to kill when app quits
const resp = await util.promisify(request)(`http://localhost:${chrome.port}/json/version`)
const { webSocketDebuggerUrl } = JSON.parse(resp.body)
const browser = await puppeteer.connect({
browserWSEndpoint: webSocketDebuggerUrl
})
const page = await browser.newPage()
await page.setViewport({
width: 1440,
height: 990,
deviceScaleFactor: 2
})
await page.setCacheEnabled(true)
await page.setJavaScriptEnabled(true)
// Launch Page
await page.goto('https://auth.uber.com/login?next_url=https://riders.uber.com', { waitUntil: 'domcontentloaded' })
export default async function (email, headers, year, month, invoiceDate, html, rideType) {
const documentDir = jetpack.cwd(store.get('invoicePath'))
const chrome = await launch(puppeteer)
store.set('processPID', chrome.pid) // Store process ID to kill when app quits
const resp = await util.promisify(request)(`http://localhost:${chrome.port}/json/version`)
const { webSocketDebuggerUrl } = JSON.parse(resp.body)
const browser = await puppeteer.connect({
browserWSEndpoint: webSocketDebuggerUrl
})
const rideDirectory = rideType
const page = await browser.newPage()
await page.setCacheEnabled(true)
await page.setExtraHTTPHeaders(headers)
await page.setContent(html)
await page.waitFor(1000)
if (!jetpack.exists(documentDir.path(`${documentDir.path()}/${email}/${rideDirectory}/${year}/`))) {
jetpack.dir(documentDir.path(`${documentDir.path()}/${email}/${rideDirectory}/${year}/`))
}
await page.emulateMedia('print')