Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
loadBucketMetadata(path: string,
cb: (metadata: BucketMetadata) => void): void {
const { bucket } = this.path.analyze(path);
const params = { Bucket: bucket };
const funcs = async.reflectAll({
acceleration: async.apply(this.getBucketAcceleration.bind(this), params),
acl: async.apply(this.getBucketAcl.bind(this), params),
encryption: async.apply(this.getBucketEncryption.bind(this), params),
logging: async.apply(this.getBucketLogging.bind(this), params),
tagging: async.apply(this.getBucketTagging.bind(this), params),
versioning: async.apply(this.getBucketVersioning.bind(this), params),
website: async.apply(this.getBucketWebsite.bind(this), params)
});
// now load them all in parallel
async.parallelLimit(funcs, config.numParallelOps, (err, data: { value }) => {
// NOTE: we are ignoring errors and only recording metadata actually found
// reason: a bucket with no tags for example errors on the tagging call
// TODO: while developing, log this nicely
console.group(`%cloadBucketMetadata('${bucket}')`, `color: #004d40`);
const metadata = Object.keys(funcs).reduce((acc, key) => {
acc[key] = data[key].value || { };
console.log(`%c${key} %c${JSON.stringify(acc[key])}`, 'color: black', 'color: grey');
return acc;
}, { } as BucketMetadata);
console.groupEnd();
cb(metadata);
});
}
loadFileMetadata(path: string,
cb: (metadata: FileMetadata) => void): void {
const { bucket, prefix, version } = this.path.analyze(path);
const params = {
Bucket: bucket,
Key: prefix,
VersionId: version
};
const funcs = async.reflectAll({
acl: async.apply(this.getObjectAcl.bind(this), params),
head: async.apply(this.getObjectHead.bind(this), params),
tagging: async.apply(this.getObjectTagging.bind(this), params)
});
// now load them all in parallel
async.parallelLimit(funcs, config.numParallelOps, (err, data: { value }) => {
// NOTE: we are ignoring errors and only recording metadata actually found
// reason: a file with no tags for example errors on the tagging call
// TODO: while developing, log this nicely
console.group(`%cloadFileMetadata('${path}')`, `color: #006064`);
const metadata = Object.keys(funcs).reduce((acc, key) => {
acc[key] = data[key].value || { };
console.log(`%c${key} %c${JSON.stringify(acc[key])}`, 'color: black', 'color: grey');
return acc;
}, { } as FileMetadata);
console.groupEnd();
cb(metadata);
});
}
updateFileMetadata(path: string,
metadata: FileMetadata,
cb?: () => void): void {
const { bucket, prefix, version } = this.path.analyze(path);
const params = {
Bucket: bucket,
Key: prefix,
VersionId: version
};
const funcs = [
async.apply(this.putObjectAcl.bind(this), params, metadata.acl),
async.apply(this.putObjectHead.bind(this), params, path, metadata.head),
async.apply(this.putObjectTagging.bind(this), params, metadata.tagging)
];
// now update them all in parallel
async.parallelLimit(funcs, config.numParallelOps, (err, results: any) => {
// TODO: we'd like the watcher to see this automagically
this.watcher.touch(path);
if (err)
this.store.dispatch(new Message({ level: 'error', text: err.toString() }));
else {
// TODO: while developing, log this nicely
console.group(`%cupdateFileMetadata('${bucket}')`, `color: #1b5e20`);
Object.keys(metadata).forEach(key => {
console.log(`%c${key} %c${JSON.stringify(metadata[key])}`, 'color: black', 'color: grey');
});
console.groupEnd();
if (cb) cb();
}
});
}
updateBucketMetadata(path: string,
metadata: BucketMetadata,
cb?: () => void): void {
const { bucket } = this.path.analyze(path);
const params = { Bucket: bucket };
const funcs = [
async.apply(this.putBucketAcceleration.bind(this), params, metadata.acceleration),
async.apply(this.putBucketAcl.bind(this), params, metadata.acl),
async.apply(this.putBucketEncryption.bind(this), params, metadata.encryption),
async.apply(this.putBucketLogging.bind(this), params, metadata.logging),
async.apply(this.putBucketTagging.bind(this), params, metadata.tagging),
async.apply(this.putBucketVersioning.bind(this), params, metadata.versioning),
async.apply(this.putBucketWebsite.bind(this), params, metadata.website)
];
// now update them all in parallel
async.parallelLimit(funcs, config.numParallelOps, (err, results: any) => {
// TODO: we'd like the watcher to see this automagically
this.watcher.touch(path);
if (err)
this.store.dispatch(new Message({ level: 'error', text: err.toString() }));
else {
// TODO: while developing, log this nicely
console.group(`%cupdateBucketMetadata('${bucket}')`, `color: #0d47a1`);
Object.keys(metadata).forEach(key => {
console.log(`%c${key} %c${JSON.stringify(metadata[key])}`, 'color: black', 'color: grey');
});
console.groupEnd();
if (cb) cb();
}
});
}
const funcs = paths
.map(path => this.path.analyze(path))
.filter((info: PathInfo) => info.isFile)
.map((info: PathInfo) => {
return {
Bucket: info.bucket,
Key: info.prefix,
VersionId: info.version
} as S3.DeleteObjectRequest;
})
.reduce((acc, params) => {
acc.push(async.apply(this.s3.deleteObject, params));
return acc;
}, []);
// now delete them all in parallel
async.parallelLimit(funcs, config.numParallelOps, (err, data: S3.DeleteObjectOutput) => {
this.trace('deleteObjects', paths, err, data);
// TODO: we'd like the watcher to see this automagically
paths.forEach(path => {
const { parent } = this.path.analyze(path);
this.watcher.touch(parent);
});
if (err)
this.store.dispatch(new Message({ level: 'error', text: err.toString() }));
else if (cb)
cb();
});
}
truncated: S3.IsTruncated,
token: S3.Token,
versioning: boolean) => void): void {
const { bucket, prefix } = this.path.analyze(path);
const funcs = {
objects: async.apply(this.s3.listObjectsV2, {
Bucket: bucket,
Delimiter: config.s3.delimiter,
FetchOwner: true,
MaxKeys: config.s3.maxKeys,
Prefix: prefix
} as S3.ListObjectsV2Request),
versioning: async.apply(this.s3.getBucketVersioning, { Bucket: bucket })
};
// now load them all in parallel
async.parallelLimit(funcs, config.numParallelOps, (err, results: any) => {
if (err)
this.store.dispatch(new Message({ level: 'error', text: err.toString() }));
else {
const versioning: S3.GetBucketVersioningOutput = results.versioning;
const data: S3.ListObjectsV2Output = results.objects;
// NOTE: versioning once set can never be turned off
cb(data.Name, data.CommonPrefixes, this.filter(bucket, data.Contents, s3filter), data.IsTruncated, data.NextContinuationToken, !!versioning.Status);
}
});
}
this.s3.listBuckets((err, data: S3.ListBucketsOutput) => {
if (err)
this.store.dispatch(new Message({ level: 'error', text: err.toString()}));
else {
const funcs = data.Buckets.map(bucket => {
const params: S3.GetBucketLocationRequest = { Bucket: bucket.Name };
return async.apply(this.s3.getBucketLocation, params);
});
// now run them all in parallel
async.parallelLimit(funcs, config.numParallelOps, (err, results: S3.GetBucketLocationOutput[]) => {
if (err)
this.store.dispatch(new Message({ level: 'error', text: err.toString() }));
else {
const locations: string[] = results.map(result => {
// an empty string means us-east-1
// @see https://docs.aws.amazon.com/AmazonS3/
// latest/API/RESTBucketGETlocation.html
return result.LocationConstraint || 'us-east-1';
});
cb(data.Buckets, data.Owner, locations);
}
});
}
});
}