How to use the spark-md5.ArrayBuffer function in spark-md5

To help you get started, we’ve selected a few spark-md5 examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github CalderaWP / Caldera-Forms / clients / render / util.js View on Github external
export const hashFile = (file, callback) => {
	let blobSlice = File.prototype.slice || File.prototype.mozSlice || File.prototype.webkitSlice,
		chunkSize = 2097152,                             // Read in chunks of 2MB
		chunks = Math.ceil(file.size / chunkSize),
		currentChunk = 0,
		spark = new SparkMD5.ArrayBuffer(),
		fileReader = new FileReader();

	fileReader.onload = function (e) {
		spark.append(e.target.result);                   // Append array buffer
		currentChunk++;

		if (currentChunk < chunks) {
			loadNext();
		} else {
			callback(spark.end());
		}
	};

	fileReader.onerror = function () {
		console.warn('oops, something went wrong.');
	};
github FE-Kits / fractal-components / code / src / showcase / dragbox / chooser / chooser.js View on Github external
function md5SingleFile(file: object, cb): string {
  const blobSlice =
    File.prototype.slice ||
    File.prototype.mozSlice ||
    File.prototype.webkitSlice;
  const chunkSize = 2097152; // Read in chunks of 2MB
  const chunks = Math.ceil(file.size / chunkSize);
  let currentChunk = 0;
  const spark = new SparkMD5.ArrayBuffer();
  const fileReader = new FileReader();

  function loadNext() {
    const start = currentChunk * chunkSize;
    const end = start + chunkSize >= file.size ? file.size : start + chunkSize;

    fileReader.readAsArrayBuffer(blobSlice.call(file, start, end));
  }

  fileReader.onload = event => {
    // 该值记录了对某个文件进行MD5操作的进度
    const percent = parseFloat((currentChunk / chunks * 100).toFixed(1));

    spark.append(event.target.result); // Append array buffer

    currentChunk++;
github windsome / windpress / src / utils / upload.js View on Github external
return new Promise(function(resolve, reject) {
        var totalSize = src.size;
        var chunkSize = opts && opts.chunkSize || DEFAULT_CHUNK_SIZE;
        var file = src;
        var count = Math.ceil (totalSize/chunkSize);
        var current = 0;

        var blobSlice = File.prototype.slice || File.prototype.mozSlice || File.prototype.webkitSlice;
        var startTime = new Date().getTime();
        var spark = new SparkMD5.ArrayBuffer();
        var reader = new FileReader();
        reader.onerror = function (e) {
            reject (new Error("file read error!"));
        }
        reader.onload = function (e) {
            spark.append( e.target.result);                 // append array buffer
            //xdebug ("e:",e);
            var percent = Math.floor(100 * (current * chunkSize + e.loaded) / src.size);
            if (opts && opts.onProgress) opts.onProgress (percent);

            current += 1;
            if (current < count) {
                loadNext();
            } else {
                var hash = spark.end().toUpperCase();
                var endTime = new Date().getTime();
github learningequality / studio / contentcuration / contentcuration / frontend / channelEdit / vuex / file / utils.js View on Github external
return new Promise((resolve, reject) => {
    let fileReader = new FileReader();
    let spark = new SparkMD5.ArrayBuffer();
    let currentChunk = 0;
    let chunks = Math.ceil(file.size / CHUNK_SIZE);
    fileReader.onload = function(e) {
      spark.append(e.target.result);
      currentChunk++;

      if (currentChunk < chunks) {
        loadNext();
      } else {
        resolve(spark.end());
      }
    };
    fileReader.onerror = reject;

    function loadNext() {
      var start = currentChunk * CHUNK_SIZE,
github pouchdb-community / pouchdb-load / lib / md5.js View on Github external
module.exports = function (data, callback) {
  if (!process.browser) {
    var base64 = crypto.createHash('md5').update(data).digest('base64');
    callback(null, base64);
    return;
  }
  var inputIsString = typeof data === 'string';
  var len = inputIsString ? data.length : data.byteLength;
  var chunkSize = Math.min(MD5_CHUNK_SIZE, len);
  var chunks = Math.ceil(len / chunkSize);
  var currentChunk = 0;
  var buffer = inputIsString ? new Md5() : new Md5.ArrayBuffer();

  function append(buffer, data, start, end) {
    if (inputIsString) {
      buffer.appendBinary(data.substring(start, end));
    } else {
      buffer.append(sliceShim(data, start, end));
    }
  }

  function loadNextChunk() {
    var start = currentChunk * chunkSize;
    var end = start + chunkSize;
    if ((start + chunkSize) >= data.size) {
      end = data.size;
    }
    currentChunk++;
github rails / rails / activestorage / app / javascript / activestorage / file_checksum.js View on Github external
create(callback) {
    this.callback = callback
    this.md5Buffer = new SparkMD5.ArrayBuffer
    this.fileReader = new FileReader
    this.fileReader.addEventListener("load", event => this.fileReaderDidLoad(event))
    this.fileReader.addEventListener("error", event => this.fileReaderDidError(event))
    this.readNextChunk()
  }
github danielmahon / opencrud-admin / src / components / ui / forms / FormikFileField.js View on Github external
return new Promise((resolve, reject) => {
      const spark = new SparkMD5.ArrayBuffer();
      const reader = new ChunkedFileReader();
      reader.subscribe('chunk', e => {
        spark.append(e.chunk);
      });
      reader.subscribe('end', e => {
        const rawHash = spark.end();
        resolve(rawHash);
      });
      reader.readChunks(file);
    });
  };

spark-md5

Lightning fast normal and incremental md5 for javascript

WTFPL
Latest version published 3 years ago

Package Health Score

73 / 100
Full package analysis