How to use the hasha.fromFileSync function in hasha

To help you get started, we’ve selected a few hasha examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github cypress-io / github-action / index.js View on Github external
restoreCache,
  saveCache
} = require('cache/lib/index')
const fs = require('fs')
const os = require('os')
const path = require('path')
const quote = require('quote')
const cliParser = require('argument-vector')()

const homeDirectory = os.homedir()

const useYarn = fs.existsSync('yarn.lock')
const lockFilename = useYarn
  ? 'yarn.lock'
  : 'package-lock.json'
const lockHash = hasha.fromFileSync(lockFilename)
const platformAndArch = `${process.platform}-${process.arch}`

// enforce the same NPM cache folder across different operating systems
const NPM_CACHE_FOLDER = path.join(homeDirectory, '.npm')
const NPM_CACHE = (() => {
  const o = {}
  let key = core.getInput('cache-key')

  if (!key) {
    if (useYarn) {
      key = `yarn-${platformAndArch}-${lockHash}`
    } else {
      key = `npm-${platformAndArch}-${lockHash}`
    }
  } else {
    console.log('using custom cache key "%s"', key)
github Yoctol / bottender / packages / bottender / src / cli / providers / messenger / attachment.js View on Github external
success: [],
      error: [],
      unchanged: [],
    };

    print(`Trying to upload ${files.length} files...`);

    for (let i = 0; i < files.length; i++) {
      const _uploadedFiles = jsonfile.readFileSync(pathOfMappingFile);
      const uploadedFiles = _uploadedFiles.messenger || {};

      const name = files[i];
      const basename = path.basename(name);

      const fileMeta = uploadedFiles[basename];
      const checksum = hasha.fromFileSync(name);

      let pageId;
      if (force || !fileMeta || checksum !== fileMeta.checksum) {
        try {
          if (!pageId) {
            // eslint-disable-next-line no-await-in-loop
            const pageInfo = await client.getPageInfo();
            pageId = pageInfo.id;
          }
          // eslint-disable-next-line no-await-in-loop
          const data = await client.uploadAttachment(
            getFileType(name),
            fs.createReadStream(name),
            {
              is_reusable: true,
            }
github Yoctol / bottender / src / cli / actions / uploadImages.js View on Github external
: path.resolve('uploaded-images.json');

  if (!fs.existsSync(pathOfMappingFile)) {
    jsonfile.writeFileSync(pathOfMappingFile, {});

    print(`initialize ${bold('uploaded-images.json')} for you`);
  }

  for (let i = 0; i < filenames.length; i++) {
    const uploadedImages = jsonfile.readFileSync(pathOfMappingFile);

    const name = filenames[i];
    const basename = path.basename(name);
    const imageMeta = uploadedImages[basename];

    const checksum = hasha.fromFileSync(name);
    if (!imageMeta || checksum !== imageMeta.checksum) {
      // overwrite
      const file = fs.readFileSync(name);
      try {
        const data = await manager.pushFile(
          container,
          shortid.generate(),
          file,
          {
            contentType: fileType(file).mime,
          }
        );
        jsonfile.writeFileSync(
          pathOfMappingFile,
          {
            ...uploadedImages,
github paazmaya / image-duplicate-remover / lib / read-image.js View on Github external
}
  catch (error) {
    return false;
  }

  /*
  let meta = identifyImage(filepath);
  if (!meta) {
    // Identifying via GM failed...
    meta = {};
  }
  */

  const meta = {};
  //const color = getPixelColor(filepath);
  const sha256 = hasha.fromFileSync(filepath, {
    algorithm: 'sha256'
  });
  const size = imageSize(filepath);

  const data = Object.assign({
    filepath: filepath,
    hash: sha256,
    filesize: size
  }, meta);

  return data;
};
github bahmutov / npm-install / index.js View on Github external
// @ts-check
const core = require('@actions/core')
const exec = require('@actions/exec')
const io = require('@actions/io')
const hasha = require('hasha')
const { restoreCache, saveCache } = require('cache/lib/index')
const fs = require('fs')
const os = require('os')
const path = require('path')
const quote = require('quote')

const homeDirectory = os.homedir()

const useYarn = fs.existsSync('yarn.lock')
const lockFilename = useYarn ? 'yarn.lock' : 'package-lock.json'
const lockHash = hasha.fromFileSync(lockFilename)
const platformAndArch = `${process.platform}-${process.arch}`

// enforce the same NPM cache folder across different operating systems
const NPM_CACHE_FOLDER = path.join(homeDirectory, '.npm')
const NPM_CACHE = (() => {
  const o = {}
  if (useYarn) {
    o.inputPath = path.join(homeDirectory, '.cache', 'yarn')
    o.restoreKeys = `yarn-${platformAndArch}-`
  } else {
    o.inputPath = NPM_CACHE_FOLDER
    o.restoreKeys = `npm-${platformAndArch}-`
  }
  o.primaryKey = o.restoreKeys + lockHash
  return o
})()
github pento / testpress / src / services / node-downloader / index.js View on Github external
}

	if ( ! existsSync( checksumFilename ) ) {
		debug( 'Downloading latest checksum file' );
		const writeFile = createWriteStream( checksumFilename );

		const downloadedChecksums = await fetchWrite( NODE_URL + 'SHASUMS256.txt', writeFile );

		if ( ! downloadedChecksums ) {
			debug( 'Unable to download checksum file' );
			return false;
		}
	}

	debug( 'Checking checksum of the local archive against checksum file' );
	const localSum = hasha.fromFileSync( archiveFilename, { algorithm: 'sha256' } );
	const checksums = readFileSync( checksumFilename ).toString();

	const passed = checksums.split( '\n' ).reduce( ( allowed, line ) => {
		const [ checksum, checkname ] = line.split( /\s+/ ).map( ( value ) => value.trim() );
		if ( checkname === filename && checksum === localSum ) {
			return true;
		}
		return allowed;
	}, false );

	if ( passed ) {
		debug( 'Checksum passed' );
	} else {
		debug( 'Checksum failed' );
	}
github fluidtrends / chunky / cli / executors / package / web.js View on Github external
files.map(file => {
        const f = path.relative(dir, file)
        fingerprint.files[f] = {
          hash: hasha.fromFileSync(file, { algorithm: 'md5' })
        }

        coreutils.logger.ok(`${f}`)
      })

hasha

Hashing made simple. Get the hash of a buffer/string/stream/file.

MIT
Latest version published 7 months ago

Package Health Score

76 / 100
Full package analysis