Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
req.on("aborted", () => {
logger.debug(
`HTTPService.fileRequestHandler: ${path} requested aborted.`
);
// Decrement download. No change since it was incremented before
// But we have to ensure that downInc has been resolved
downUpdate = downUpdate.then(() =>
this.metaData.updateDownload(path, -1)
);
});
// Serve entire file if no range specified
// If range was specified then previous if block would have been
// run
res.writeHead(200, {
"Content-Type": Mime.lookup(path) || "application/octet-stream",
"Content-Length": `${data.size}`,
"Content-Disposition": `inline; filename="${encodeURIComponent(
data.name
)}"`
});
let fileStream = Fs.createReadStream(path);
fileStream.pipe(res);
}
}
}
if (!profileService.enableRule(ctx.userID)) {
await next();
return;
}
const { userID } = ctx;
const { req } = ctx;
const { method, url } = req;
const urlObj = URL.parse(url);
const processRule = ruleService.getProcessRule(userID, method, urlObj);
if (!processRule) {
await next();
return;
}
ctx.res.setHeader('zan-proxy-rule-match', processRule.match);
if (urlObj.pathname && mime.lookup(urlObj.pathname)) {
ctx.res.setHeader('Content-Type', mime.lookup(urlObj.pathname));
}
// 规则的响应头先缓存在这里
const resHeaders = {};
for (const action of processRule.actionList) {
const { data } = action;
switch (action.type) {
case 'mockData':
const { dataId } = data;
const content = await mockDataService.getDataFileContent(userID, dataId);
const contentType = await mockDataService.getDataFileContentType(userID, dataId);
ctx.res.body = content;
ctx.res.setHeader('Content-Type', contentType);
break;
case 'addRequestHeader':
ctx.req.headers[data.headerKey] = data.headerValue;
(request, callback) => {
const urlPath = url.parse(request.url).pathname;
const decodedPath = decodeURI(urlPath);
const rootlessPath = decodedPath.replace(/^\//, "");
const filePath = join(fileRoot, rootlessPath);
try {
var stats = statSync(filePath);
var stream = createReadStream(filePath);
callback({
headers: {
server: "itch",
"content-type": mime.lookup(filePath),
"content-length": stats.size,
"access-control-allow-origin": "*",
},
statusCode: 200,
data: stream as any, // *sigh*
});
} catch (e) {
logger.warn(`while serving ${request.url}, got ${e.stack}`);
let statusCode = 400;
switch (e.code) {
case "ENOENT":
statusCode = 404;
break;
case "EPERM":
statusCode = 401;
break;
async function uploadFile(s3Client, hostingBucketName, filePath, file) {
const fileStream = createReadStream(filePath);
const contentType = mime.lookup(filePath);
const uploadParams = {
Bucket: hostingBucketName,
Key: file,
Body: fileStream,
ContentType: contentType || 'text/plain',
ACL: 'public-read',
};
const data = await s3Client
.upload(uploadParams)
.promise()
.catch(e => {
console.log('e', e);
});
return data;
function loadFile(filepath, options, files) {
if (!files.get(filepath)) files.set(filepath, {});
const obj = files.get(filepath);
const filename = (obj.path = filepath);
const stats = fs.statSync(filename);
let buffer = fs.readFileSync(filename);
obj.cacheControl = options.cacheControl;
obj.maxAge = obj.maxAge ? obj.maxAge : options.maxAge || 0;
obj.type = obj.mime = mime.lookup(filepath) || 'application/octet-stream';
obj.mtime = stats.mtime;
obj.length = stats.size;
obj.md5 = crypto
.createHash('md5')
.update(buffer)
.digest('base64');
if (options.buffer) obj.buffer = buffer;
buffer = null;
return obj;
}
module.exports = function saveFile( filePath, fileData ) {
const params = {
Bucket: S3_BUCKET,
Key: filePath,
Body: fileData,
CacheControl: getCache( filePath ),
ContentType: mime.lookup( filePath ),
};
if ( argv.stage ) {
const fullPath = path.join( STAGE_PATH, filePath );
fs.mkdir( path.parse( fullPath ).dir, {
recursive: true,
}, ( writeError ) => {
if ( writeError ) {
throw writeError;
}
fs.writeFileSync( fullPath, fileData );
} );
} else {
s3.putObject( params, ( uploadError, data ) => {
const manifestItem = (file) => {
const props = Props.testHTML(file)
let res = null
if (mime.lookup(file.rootPath) !== 'application/oebps-package+xml') {
res = [
`',
]
.filter(Boolean)
.join(' ')
}
return res
}
const files = await Promise.all(
fileNames.sort().map(async fileName => {
const filePath = join(path, fileName);
const docID = await checksumPath(filePath);
return { docID, fileName };
}),
);
return {
...directory,
type: "Directory",
files,
};
} else {
const file = await fs.readFile(path);
const isBinary = await isBinaryFile(file, file.length);
const contentType = mime.lookup(path);
if (isBinary) {
fileValue = {
type: "Buffer",
value: file.toString("base64"),
contentType,
};
} else {
try {
fileValue = JSON.parse(file.toString());
} catch (e) {
fileValue = { type: "String", value: file.toString(), contentType };
}
}
}
return fileValue;
}
return this.dispatchToRoute();
}
}
if (!stats || stats?.isDirectory()) {
return this.dispatchToRoute();
}
const response = new Response();
response.setHeader('Content-Length', stats.size);
if (!this.request.getHeader('Last-Modified')) response.setHeader('Last-Modified', stats.mtime.toUTCString());
if (!this.request.getHeader('Cache-Control')) {
const directives = [`max-age=${this.publicOptions.maxage / 1000 | 0}`];
response.setHeader('Cache-Control', directives.join(','));
}
response.setHeader('Content-Type', mime.lookup(path.extname(staticPath)));
response.setData(fs.createReadStream(staticPath));
return response;
}
}
const newmsg = {
userId: muser,
roomId: roomid,
time: time,
type: "m.room.message",
content: content,
};
const media_promises = [];
if(tweet.entities.hasOwnProperty("media") && this.media_cfg.enable_download) {
for(const media of tweet.entities.media) {
if(media.type !== 'photo') {
continue;
}
const mimetype = mime.lookup(media.media_url_https);
const media_info = {
w: media.sizes.large.w,
h: media.sizes.large.h,
mimetype,
size: 0
}
media_promises.push(
util.uploadContentFromUrl(
this._bridge,
media.media_url_https
).then( (obj) => {
media_info.size = obj.size;
return {
userId: muser,
roomId: roomid,