Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
async fetch(path, opts = {}) {
await this._sema.v()
this._currContext.fetchesMade++;
if(this._currContext.fetchesMade >= MAX_REQUESTS_PER_CONNECTION) {
const ctx = context()
ctx.fetchesMade = 1
ctx.ongoingFetches = 0
this._contexts.push(ctx)
this._currContext = ctx
}
// If we're changing contexts, we don't want to record the ongoingFetch on the old context
// That'll cause an off-by-one error when trying to close the old socket later
this._currContext.ongoingFetches++;
const currentContext = this._currContext
if (this._debug) {
console.log('> [debug] Total requests made on socket #%d: %d', this._contexts.length, this._currContext.fetchesMade)
console.log('> [debug] Concurrent requests on socket #%d: %d', this._contexts.length, this._currContext.ongoingFetches)
}
async fetch(path, opts = {}) {
const { debug } = this._output;
await this._sema.v();
let currentContext;
this._currContext.fetchesMade++;
if (this._currContext.fetchesMade >= MAX_REQUESTS_PER_CONNECTION) {
const ctx = context();
ctx.fetchesMade = 1;
ctx.ongoingFetches = 0;
this._contexts.push(ctx);
this._currContext = ctx;
}
// If we're changing contexts, we don't want to record the ongoingFetch on the old context
// That'll cause an off-by-one error when trying to close the old socket later
this._currContext.ongoingFetches++;
currentContext = this._currContext;
debug(
`Total requests made on socket #${this._contexts.length}: ${this
._currContext.fetchesMade}`
);
debug(
constructor(url, { tls = true, debug } = {}) {
// We use multiple contexts because each context represent one connection
// With nginx, we're limited to 1000 requests before a connection is closed
// http://nginx.org/en/docs/http/ngx_http_v2_module.html#http2_max_requests
// To get arround this, we keep track of requests made on a connection. when we're about to hit 1000
// we start up a new connection, and re-route all future traffic through the new connection
// and when the final request from the old connection resolves, we auto-close the old connection
this._contexts = [context()]
this._currContext = this._contexts[0]
this._currContext.fetchesMade = 0
this._currContext.ongoingFetches = 0
this._url = url
const parsed = parse(url)
this._protocol = parsed.protocol
this._sema = new Sema(20)
this._debug = debug
if (tls) {
this._initAgent()
}
}
constructor(url, { tls = true, debug } = {}) {
// We use multiple contexts because each context represent one connection
// With nginx, we're limited to 1000 requests before a connection is closed
// http://nginx.org/en/docs/http/ngx_http_v2_module.html#http2_max_requests
// To get arround this, we keep track of requests made on a connection. when we're about to hit 1000
// we start up a new connection, and re-route all future traffic through the new connection
// and when the final request from the old connection resolves, we auto-close the old connection
this._contexts = [context()];
this._currContext = this._contexts[0];
this._currContext.fetchesMade = 0;
this._currContext.ongoingFetches = 0;
this._url = url;
const parsed = parse(url);
this._protocol = parsed.protocol;
this._sema = new Sema(20);
this._output = createOutput({ debug });
if (tls) {
this._initAgent();
}
}
const { body } = opts;
if (this._agent) {
opts.agent = this._agent;
}
if (body && typeof body === 'object' && typeof body.pipe !== 'function') {
opts.headers['Content-Type'] = 'application/json';
opts.body = new JsonBody(body);
}
if (
body &&
typeof body === 'object' &&
typeof body.pipe === 'function'
) {
opts.body = new StreamBody(body);
}
const handleCompleted = async res => {
currentContext.ongoingFetches--;
if (
currentContext !== this._currContext &&
currentContext.ongoingFetches <= 0
) {
// We've completely moved on to a new socket
// close the old one
// TODO: Fix race condition:
// If the response is a stream, and the server is still streaming data
// we should check if the stream has closed before disconnecting
// hasCompleted CAN technically be called before the res body stream is closed
debug('Closing old socket');
const { body } = opts
if (this._agent) {
opts.agent = this._agent
}
if (body && typeof body === 'object' && typeof body.pipe !== 'function') {
opts.headers['Content-Type'] = 'application/json'
if (shouldUseHttp2(this)) {
opts.body = new JsonBody(body)
} else {
opts.body = JSON.stringify(body)
}
}
if(shouldUseHttp2(this) && body && typeof body === 'object' && typeof body.pipe === 'function') {
opts.body = new StreamBody(body)
}
if (!shouldUseHttp2(this) && opts.body && typeof body.pipe !== 'function') {
opts.headers['Content-Length'] = Buffer.byteLength(opts.body)
}
const handleCompleted = async (res) => {
currentContext.ongoingFetches--;
if(currentContext !== this._currContext && currentContext.ongoingFetches <= 0) {
// We've completely moved on to a new socket
// close the old one
// TODO: Fix race condition:
// If the response is a stream, and the server is still streaming data
// we should check if the stream has closed before disconnecting
// hasCompleted CAN technically be called before the res body stream is closed
async function download(opts) {
try {
if (typeof opts === 'string') opts = { url: opts }
const response = await fetch(opts.url, {})
let data = await response.text()
const file =
opts.file || pathUtil.basename(urlUtil.parse(opts.url).pathname)
if (await exists(file)) {
if (opts.overwrite === false) {
return Promise.resolve()
}
const localData = (await read(file)).toString()
const localLines = localData.split('\n')
const localCustomIndex = localLines.findIndex(line =>
/^# CUSTOM/i.test(line)
)
if (localCustomIndex !== -1) {
const remoteLines = data.split('\n')
const remoteCustomIndex = remoteLines.findIndex(line =>
/^# CUSTOM/i.test(line)
async function getGithubCommit(slug, fallback = 'master') {
const url = `${ghapi}/repos/${slug}/commits?${githubQueryString}`
try {
const response = await fetch(url, {
headers: {
Accept: 'application/vnd.github.v3+json'
}
})
if (response.status < 200 || response.status >= 300) {
throw await response.text()
}
const result = await response.json()
if (result.message) {
throw new Error(result.message + '\n' + url)
}
if (!result[0] || !result[0].sha) {
return fatal(
new Error(`${url} did not return the expected result`),
result
)
if (!this._agent) {
if (this._debug) {
console.log('> [debug] re-initializing agent')
}
this._initAgent()
}
const { body } = opts
if (this._agent) {
opts.agent = this._agent
}
if (body && typeof body === 'object' && typeof body.pipe !== 'function') {
opts.headers['Content-Type'] = 'application/json'
if (shouldUseHttp2(this)) {
opts.body = new JsonBody(body)
} else {
opts.body = JSON.stringify(body)
}
}
if(shouldUseHttp2(this) && body && typeof body === 'object' && typeof body.pipe === 'function') {
opts.body = new StreamBody(body)
}
if (!shouldUseHttp2(this) && opts.body && typeof body.pipe !== 'function') {
opts.headers['Content-Length'] = Buffer.byteLength(opts.body)
}
const handleCompleted = async (res) => {
currentContext.ongoingFetches--;
if(currentContext !== this._currContext && currentContext.ongoingFetches <= 0) {
async function getNodeLTSVersions() {
const url =
'https://raw.githubusercontent.com/nodejs/Release/master/schedule.json'
try {
const response = await fetch(url)
const json = await response.json()
const lts = Object.entries(json)
return lts
} catch (err) {
throw new Errlop(`failed to fetch node.js LTS releases from ${url}`, err)
}
}
async function getMinimumNodeLTSVersion() {