Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
if (redirectTo && self.allowRedirect.call(self, response)) {
debug('redirect to', redirectTo)
// ignore any potential response body. it cannot possibly be useful
// to us at this point.
if (self._paused) response.resume()
if (self._redirectsFollowed >= self.maxRedirects) {
self.emit('error', new Error("Exceeded maxRedirects. Probably stuck in a redirect loop "+self.uri.href))
return
}
self._redirectsFollowed += 1
if (!isUrl.test(redirectTo)) {
redirectTo = url.resolve(self.uri.href, redirectTo)
}
var uriPrev = self.uri
self.uri = url.parse(redirectTo)
// handle the case where we change protocol from https to http or vice versa
if (self.uri.protocol !== uriPrev.protocol) {
self._updateProtocol()
}
self.redirects.push(
{ statusCode : response.statusCode
, redirectUri: redirectTo
}
)
if (self.followAllRedirects && response.statusCode != 401 && response.statusCode != 307) self.method = 'GET'
var srcUrl = null;
if (is.a || is.link) {
srcUrl = $elem.attr('href');
} else if (is.script) {
srcUrl = $elem.attr('src');
} else {
// imgの場合はsrcAttrsの優先順に従って属性を見ていく
for (var i = 0; i < srcAttrs.length; i++) {
srcUrl = $elem.attr(srcAttrs[i]);
if (srcUrl) {
break;
}
}
}
var absUrl = (srcUrl) ? urlParser.resolve(doc.url, srcUrl) : srcUrl;
// 除外判定
if (valUrl.isWebUri(absUrl)) {
var isAbsoluteLink = /^[a-z]+:\/\//i.test(srcUrl);
if (isAbsoluteLink && ! filter.absolute) {
return;
}
if (! isAbsoluteLink && ! filter.relative) {
return;
}
} else if (! filter.invalid) {
return;
}
result.push(absUrl);
});
import registryUrl from 'registry-url';
import fetch from 'node-fetch';
import url from 'url';
import localPackageJson from '../../package.json';
const remotePackageUrl = url.resolve(registryUrl(), localPackageJson.name);
// Get the package.json version published to NPM
async function fetchPublishedVersion() {
return fetch(remotePackageUrl)
.then(res => res.json())
.then(pkg => pkg['dist-tags'].latest);
}
export default fetchPublishedVersion;
request.head({url:args.url, headers:headers, followRedirect:false}, function(err, res){
if(err) { args.err = err; return callback(args); }
// process a redirect
if (res.statusCode === 301 || res.statusCode === 302 || res.statusCode === 307)
{
// re-basing like a browser would, yes sam, this happens
var newup = urllib.parse(urllib.resolve(args.urlp,urllib.parse(res.headers.location)));
// if we're redirected to a login page, bail, kinda lame heuristic here but it works pretty well!
if(newup.pathname.indexOf("login") > 0 && newup.pathname.indexOf("login") < 10) return callback(args);
args.url = urllib.format(newup);
args.headers = res.headers; // convenience for callback
return exports.expand(args, callback);
}
args.headers = res.headers; // convenience for callback
// everything else, we're done done!
return callback(args);
});
}
it('should work with missing url/path in a nested folder', async () => {
expect.assertions(2);
const req = await fetch(resolve(url, '/api/v1/object-missing-url'));
const res = await req.json();
expect(req.status).toStrictEqual(200);
expect(res).toMatchObject({ hello: 'missing nested url world' });
});
});
export async function onPostBuild({ graphql }, pluginOptions) {
const userOptions = getOptions(pluginOptions);
const mergedOptions = { ...defaultOptions, ...userOptions };
if (
!Object.prototype.hasOwnProperty.call(mergedOptions, 'host') ||
!Object.prototype.hasOwnProperty.call(mergedOptions,'sitemap')
) {
const {
site: {
siteMetadata: { siteUrl }
}
} = await runQuery(graphql, mergedOptions.query);
mergedOptions.host = siteUrl;
mergedOptions.sitemap = url.resolve(siteUrl, 'sitemap.xml');
}
const { policy, sitemap, host, output, configFile } = mergedOptions;
const content = await robotsTxt({
policy,
sitemap,
host,
configFile
});
const filename = path.join(publicPath, output);
return await writeFile(path.resolve(filename), content);
}
function makeDiff(request, baseURL, resource, callback) {
var resourceURL = urllib.resolve(baseURL, resource);
var options = { url: resourceURL, encoding: null };
request(options, function (err, response, body) {
if (err) {
logger.error('------ERROR------\n' + err.message);
logger.info('URL: ' + resourceURL);
if (err.message.substring(0, 11) === 'Invalid URI' || err.message.substring(0, 16) === 'Invalid protocol') {
callback(null, response, {});
} else {
callback(err, response, {});
}
} else if (response === null) {
callback(null, null, {});
} else {
var datauri = dataURI(response, resourceURL, body);
var diff = {};
diff[resource] = datauri;
var cb = next(function(err, image) {
image = image || empty(src);
images[i] = node.image = {
width: image.width,
height: image.height,
src: image.src,
complete: image.complete,
data: image
};
});
if(!src) return cb();
src = he.decode(src);
src = url.resolve(base, src);
var image = new Image();
image.onload = function() {
cb(null, image);
};
image.onerror = function() {
cb();
};
image.src = src;
});
};
function ghApi() {
return resolve('https://api.github.com', join.apply(null, arguments)) + auth + (auth ? '&' : '?') + 'per_page=100'
}
function doGetSubmissionFile(fileGroupId, outputFileName, cb) {
//TODO the main part of this function should be moved into utils/request.js
var fhUrl = url.resolve(fhreq.getFeedHenryUrl(), "api/v2/forms/submission/file/" + fileGroupId);
var uri = url.parse(fhUrl);
var proto = uri.protocol === 'https:' ? https : http;
var headers = {};
var cookie = fhc.config.get("cookie");
if (cookie !== undefined) {
headers.cookie = "feedhenry=" + cookie + ";";
}
uri.headers = headers;
var req = proto.get(uri, function(res){
if(res.statusCode !== 200) return cb("Unexpected response code for file download: " + res.statusCode + " message: " + res.body);
var stream = fs.createWriteStream(outputFileName);