How to use the archiver function in archiver

To help you get started, we’ve selected a few archiver examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github jaydp17 / serverless-plugin-ncc / src / zipper.ts View on Github external
export default async function createZip({ zipPath, zipContents }: CreateZipOptions) {
  const zipStream = fs.createWriteStream(zipPath);
  const archive = archiver('zip', { zlib: { level: 9 } });

  return new Promise((resolve, reject) => {
    // listen for all archive data to be written
    // 'close' event is fired only when a file descriptor is involved
    zipStream.on('close', () => {
      const totalBytes = archive.pointer();
      console.log(`${totalBytes} total bytes`);
      console.log('archiver has been finalized and the output file descriptor has closed.');
      resolve({ totalBytes });
    });

    // This event is fired when the data source is drained no matter what was the data source.
    // It is not part of this library but rather from the NodeJS Stream API.
    // @see: https://nodejs.org/api/stream.html#stream_event_end
    zipStream.on('end', () => {
      console.log('Data has been drained');
github AlexSapoznikov / node-alexa-starter-kit / src / scripts / deployToLambda.js View on Github external
return new Promise((resolve) => {
    console.log('1. Compressing...');  // eslint-disable-line
    const output = createWriteStream(compressedProjectLocation);
    const archive = archiver('zip');

    archive.pipe(output);
    archive
      .directory('config', 'config')
      .directory('node_modules', 'node_modules')
      .directory('public', 'public')
      .file('package.json')
      .file('index.js')
      .finalize();

    output.on('close', () => {
      console.log(' - Compressing was successful');  // eslint-disable-line
      resolve();
    });
    archive.on('error', (err) => {
      throw new Error('Could not compress: ' + err);
github vivekratnavel / omniboard / server / app.js View on Github external
if (fileType === FILE_TYPE.SOURCE_FILES) {
        if (result && result.experiment && result.experiment.sources) {
          files = result.experiment.sources.map(source => {
            return {
              name: source[0],
              file_id: source[1]
            }
          });
        } else {
          res.status(500).json({message: 'Error: Unable to fetch source files for runId: ' + runId});
        }
      } else {
        // fileType: artifacts
        files = result.artifacts;
      }
      const archive = archiver('zip', {
        zlib: { level: 5 } // Sets the compression level.
      });
      const fileName = `${fileType}-${runId}.zip`; // ex: source-files-1.zip
      const dirName = `${fileType}-${runId}`; // ex: source-files-1
      archive.on('error', function(err) {
        /* eslint-disable no-console */
        console.error('An error occurred: ', err);
        res.status(500);
        next(err);
      });
      files.forEach(function(file) {
        const readStream = gfs.createReadStream({
          _id: file.file_id
        });
        //error handling, e.g. file does not exist
        readStream.on('error', function (err) {
github EvHaus / rocketry / src / utils / local.js View on Github external
return new Promise((
		resolve: (zipPath: any) => void,
		reject: (err: Error) => void
	) => {
		const spinner = ora('Creating deployment archive...');
		if (!program.debug) spinner.start();

		const outputPath = getZipFilePath(program);
		const output = fs.createWriteStream(outputPath);
		const archive = archiver('zip');

		output.on('close', () => {
			spinner.succeed('Deployment package created.');
			resolve(outputPath);
		});

		archive.on('error', reject);
		archive.pipe(output);

		// Add the files and directories to the ZIP archive
		for (let i = 0, l = sources.length; i < l; i++) {
			if (fs.lstatSync(sources[i]).isDirectory()) {
				const dirName = sources[i].replace(dir, '');
				archive.directory(sources[i], dirName);
			} else {
				const fileName = sources[i].replace(dir, '');
github happo / happo.io / src / createStaticPackage.js View on Github external
return new Promise((resolve, reject) => {
    const archive = new Archiver('zip');

    const stream = new Writable();
    const data = [];

    // eslint-disable-next-line no-underscore-dangle
    stream._write = (chunk, enc, done) => {
      data.push(...chunk);
      done();
    };
    stream.on('finish', () => {
      const buffer = Buffer.from(data);
      resolve(buffer);
    });
    archive.pipe(stream);

    publicFolders.forEach((folder) => {
github React-Proto / react-proto / src / utils / createWorkspaceFile.util.js View on Github external
outputStream.on('finish', () => {
      const rprotoOutput = fs.createWriteStream(slimWorkspaceData.workspaceFilePath);
      const archive = archiver(
        'zip',
        {
          comment: 'React-Proto Workspace archive',
          zlib: { level: 9 }, // Maximum compression level
        },
      );

      // archiver has been finalized and the output file
      // descriptor has closed.
      rprotoOutput.on('close', () => {
        // Clean-up by deleting .json file
        fs.unlink(metaJSONFile, (unlinkErr) => {
          if (unlinkErr) reject(unlinkErr);
          resolve(slimWorkspaceData.workspaceFilePath);
        });
      });
github zodern / mup-aws-beanstalk / src / prepare-bundle.js View on Github external
return new Promise((resolve, reject) => {
    logStep('=> Archiving Bundle');
    const sourceDir = api.resolvePath(buildLocation, 'bundle');

    const output = fs.createWriteStream(bundlePath);
    const archive = archiver('zip', {
      gzip: true,
      gzipOptions: {
        level: 9
      }
    });

    archive.pipe(output);
    output.once('close', resolve);

    archive.once('error', (err) => {
      logStep('=> Archiving failed:', err.message);
      reject(err);
    });

    let nextProgress = 0.1;
    archive.on('progress', ({ entries }) => {
github deskfiler / deskfiler / example-plugins / zipPw / index.js View on Github external
const archive = ({ filePaths, password, fs, path, filePath }) => new Promise((resolve, reject) => {
  const output = fs.createWriteStream(filePath);

  const compressed = archiver(password ? 'zip-encryptable' : 'zip', {
    zlib: { level: 9 },
    forceLocalTime: true,
    password,
  });

  compressed.pipe(output);
  filePaths.forEach((fp) => {
    const fileBuffer = fs.readFileSync(fp);
    const { name, ext } = path.parse(fp);
    compressed.append(fileBuffer, { name: `${name}${ext}` });
  });

  compressed.finalize();

  resolve();
});
github microsoft / BotFramework-Composer / Composer / packages / server / src / models / connector / csharpBotConnector.ts View on Github external
return new Promise((resolve, reject) => {
      const archive = archiver('zip');
      const output = fs.createWriteStream(dest);

      archive.pipe(output);
      archive.directory(src, false);
      archive.finalize();

      output.on('close', () => resolve(archive));
      archive.on('error', err => reject(err));
    });
  };
github nrkno / core-icons / bin / build.js View on Github external
function build () {
  const svgFiles = fs.readdirSync('lib').filter((file) => file.endsWith('.svg'))
  const docFiles = ['readme.md', 'lib/readme.md']
  const svgZipper = archiver('zip')
  const pdfZipper = archiver('zip')
  const icons = svgtojs({
    input: 'lib/',
    banner: `@nrk/core-icons v${pkg.version}`,
    scale: 16,
    cjs: 'core-icons.js',
    esm: 'core-icons.mjs',
    dts: 'core-icons.d.ts',
    cjsx: 'jsx/core-icons.js',
    esmx: 'jsx/core-icons.mjs',
    dtsx: 'jsx/core-icons.d.ts'
  })
  
  fs.writeFileSync('lib/core-icons.rss', rss())
  fs.writeFileSync('lib/core-icons.min.js', icons.iife)
  fs.writeFileSync('lib/core-icons.js', icons.iife)

archiver

a streaming interface for archive generation

MIT
Latest version published 2 months ago

Package Health Score

88 / 100
Full package analysis