How to use yarn - 10 common examples

To help you get started, we’ve selected a few yarn examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github greenkeeperio / greenkeeper / test / lib / get-files.js View on Github external
const fileList = [
    'package.json',
    'backend/package.json'
  ]

  const files = await getFiles({ installationId: '123', fullName: 'owner/repo', files: fileList, log })
  // returns an Object with the 5 file types
  expect(Object.keys(files)).toHaveLength(5)
  // The Object has 2 files at the `package.json` key
  expect(files['package.json']).toHaveLength(2)
  expect(files['package.json'][0].path).toEqual('package.json')
  expect(files['package.json'][0].content).toEqual('eyJuYW1lIjoidGVzdCJ9')
  expect(files['package.json'][1].path).toEqual('backend/package.json')
  expect(files['package.json'][1].content).toEqual('eyJuYW1lIjoidGVzdCJ9')
  expect(files['yarn.lock']).toHaveLength(2)
})
github patternfly / patternfly-react / scripts / incrementalBuild.js View on Github external
async function getInvalidPackages() {
  const packages = (await new Project(__dirname).getPackages())
    .filter(p => p.scripts.build) // Only packages that have a build target
    .filter(p => (isPf3 ? p.location.indexOf('patternfly-3') > 0 || commonPackages.indexOf(p.name) >= 0 : true)) // Based off argv
    .filter(p => (isPf4 ? p.location.indexOf('patternfly-4') > 0 || commonPackages.indexOf(p.name) >= 0 : true)); // Based off argv

  for (let p of packages) {
    p.hash = hashPackageSrc(p.location, p.name);
    p.valid = cache && cache[p.name] === p.hash;
    if (p.valid) {
      console.info('Skipping', p.name, '(already built).');
    }
  }

  // Invalidate everything if any deps change.
  if (cache['yarn.lock'] !== yarnLockHash) {
    return packages;
  }

  return packages.filter(p => !p.valid);
}
github brodybits / prettierx / scripts / build / cache.js View on Github external
Cache.prototype.load = async function() {
  // This should never throw, if it does, let it fail the build
  const lockfile = await readFile("yarn.lock", "utf-8");
  const lockfileHash = hashString(lockfile);
  this.updated.checksums["yarn.lock"] = lockfileHash;

  try {
    const manifest = await readFile(this.manifest, "utf-8");
    const { version, checksums, files } = JSON.parse(manifest);

    // Ignore the cache if the version changed
    assert.equal(this.version, version);

    assert.ok(typeof checksums === "object");
    // If yarn.lock changed, rebuild everything
    assert.equal(lockfileHash, checksums["yarn.lock"]);
    this.checksums = checksums;

    assert.ok(typeof files === "object");
    this.files = files;
github brodybits / prettierx / scripts / build / cache.js View on Github external
Cache.prototype.load = async function() {
  // This should never throw, if it does, let it fail the build
  const lockfile = await readFile("yarn.lock", "utf-8");
  const lockfileHash = hashString(lockfile);
  this.updated.checksums["yarn.lock"] = lockfileHash;

  try {
    const manifest = await readFile(this.manifest, "utf-8");
    const { version, checksums, files } = JSON.parse(manifest);

    // Ignore the cache if the version changed
    assert.equal(this.version, version);

    assert.ok(typeof checksums === "object");
    // If yarn.lock changed, rebuild everything
    assert.equal(lockfileHash, checksums["yarn.lock"]);
    this.checksums = checksums;

    assert.ok(typeof files === "object");
    this.files = files;

    for (const files of Object.values(this.files)) {
      assert.ok(Array.isArray(files));
    }
  } catch (err) {
    this.checksums = {};
    this.files = {};
  }
};
github greenkeeperio / greenkeeper / lib / create-branch.js View on Github external
const createLockfileCommits = async ({ commits, repoDoc, installationId, commitMessageTemplates, transforms, owner, repoName, branch }, log) => {
  const ghqueue = githubQueue(installationId)
  const lockfileCommits = []

  // we need to iterate over every changed package file, not every package file commit
  // we reverse because we want the most recent commit with the all the changes to the file (the last one)
  // we clone because we don’t actually want to do the commits backwards
  const dedupedCommits = _.uniqBy(_.clone(commits).reverse(), commit => commit.path)
  // For yarn workspaces, we have to send the updated packages object (after applying all the update commits).
  // So we need to iterate over all commits, replace all updated packages in the packages object,
  // send all of them (old and updated together) to the exec server, tell it in which directory to run
  // yarn install, and get the old yarn lock from that dir as well
  let updatedPackages = _.clone(repoDoc.packages)
  let workspaceRootsToUpdate = []
  let packageJsonPathsWithWorkspaceDefinitions = []
  const isYarn = repoDoc.files['yarn.lock'].length > 0
  if (isYarn) {
    packageJsonPathsWithWorkspaceDefinitions = Object.keys(repoDoc.packages).filter(path => {
      const packageJson = repoDoc.packages[path]
      const workspaceDefinition = packageJson.workspaces
      // either has simple workspace definition…
      if (workspaceDefinition && workspaceDefinition.length > 0) {
        return path
      }
      // or a complex definition
      if (workspaceDefinition && workspaceDefinition.packages && workspaceDefinition.packages.length > 0) {
        return path
      }
    })
  }
  const execTokens = await getExecTokens({
    installationId,
github apache / ambari / ambari-web / app / utils / configs / defaults_providers / hive_defaults_provider.js View on Github external
getDefaults: function (localDB) {
    var configs = this._super(localDB);
    if (configs['yarn.scheduler.maximum-allocation-mb'] != null && configs['mapreduce.map.memory.mb'] != null
      && configs['mapreduce.reduce.memory.mb'] != null) {
      var containerSize = configs['mapreduce.map.memory.mb'] > 2048 ? configs['mapreduce.map.memory.mb'] : configs['mapreduce.reduce.memory.mb'];
      containerSize = Math.min(configs['yarn.scheduler.maximum-allocation-mb'], containerSize);
      configs['hive.auto.convert.join.noconditionaltask.size'] = Math.round(containerSize / 3) * 1048576; // MB to Bytes
      configs['hive.tez.java.opts'] = "-server -Xmx" + Math.round(0.8 * containerSize) + "m -Djava.net.preferIPv4Stack=true -XX:NewRatio=8 -XX:+UseNUMA -XX:+UseParallelGC";
      configs['hive.tez.container.size'] = containerSize;
    } else {
      jQuery.extend(configs, this.get('configsTemplate'));
    }
    return configs;
  }
github apache / ambari / ambari-web / app / utils / configs / defaults_providers / hive_defaults_provider.js View on Github external
getDefaults: function (localDB) {
    var configs = this._super(localDB);
    if (configs['yarn.scheduler.maximum-allocation-mb'] != null && configs['mapreduce.map.memory.mb'] != null
      && configs['mapreduce.reduce.memory.mb'] != null) {
      var containerSize = configs['mapreduce.map.memory.mb'] > 2048 ? configs['mapreduce.map.memory.mb'] : configs['mapreduce.reduce.memory.mb'];
      containerSize = Math.min(configs['yarn.scheduler.maximum-allocation-mb'], containerSize);
      configs['hive.auto.convert.join.noconditionaltask.size'] = Math.round(containerSize / 3) * 1048576; // MB to Bytes
      configs['hive.tez.java.opts'] = "-server -Xmx" + Math.round(0.8 * containerSize) + "m -Djava.net.preferIPv4Stack=true -XX:NewRatio=8 -XX:+UseNUMA -XX:+UseParallelGC";
      configs['hive.tez.container.size'] = containerSize;
    } else {
      jQuery.extend(configs, this.get('configsTemplate'));
    }
    return configs;
  }
github apache / ambari / contrib / views / tez / src / main / resources / ui / ambari-scripts / init-view.js View on Github external
"//" +
      window.location.hostname +
      (window.location.port ? ':' + window.location.port: ''),
      urlParts = getViewInfoFromPathname(),
      // .replace() call is necessary to work properly behind the proxy
      resourcesPrefix = '/api/v1/views/%@/versions/%@/instances/%@/resources/'.replace(/^\//, '').fmt(
        urlParts[0],
        urlParts[1],
        urlParts[2]
      );

  parameters = parameters || {};

  return {
    host: host,
    yarnProtocol: parameters["yarn.protocol"],
    resourcesPrefix: resourcesPrefix,
    namespaces: {
      webService: {
        timeline: '%@atsproxy/ws/v1/timeline'.fmt(resourcesPrefix),
        appHistory: '%@atsproxy/ws/v1/applicationhistory'.fmt(resourcesPrefix),
        rm: '%@rmproxy/ws/v1/cluster'.fmt(resourcesPrefix),
        am: '%@rmproxy/proxy/{app_id}/ws/v{version:2}/tez'.fmt(resourcesPrefix)
      },
      web: {
        rm: '%@rmredirect/cluster'.fmt(resourcesPrefix)
      },
    }
  };
}
github greenkeeperio / greenkeeper / test / jobs / create-initial-branch.js View on Github external
.reply(200, {
        path: 'package-lock.json',
        name: 'package-lock.json',
        content: encodePkg({ who: 'cares' })
      })
      .get('/repos/finnp/test')
      .reply(200, {
        default_branch: 'custom'
      })

    const newJob = await createInitialBranch({ repositoryId: 44 })
    expect(newJob).toBeFalsy()
    const repodoc = await repositories.get('44')
    expect(repodoc.files['package.json']).not.toHaveLength(0)
    expect(repodoc.files['package-lock.json']).not.toHaveLength(0)
    expect(repodoc.files['yarn.lock']).toHaveLength(0)
    expect(repodoc.enabled).toBeTruthy()
  })
github apache / ambari / contrib / views / tez / src / main / resources / ui / scripts / init-ambari-view.js View on Github external
(window.location.port ? ':' + window.location.port: ''),
      urlParts = location.pathname.split('/'),
      resourcesPrefix = 'api/v1/views/%@/versions/%@/instances/%@/resources/'.fmt(
        urlParts[2],
        urlParts[3],
        urlParts[4]
      );

  parameters = parameters || {};

  $.extend(true, App.Configs, {
    envDefaults: {
      isStandalone: false,
      timelineBaseUrl: host,
      RMWebUrl: host,
      yarnProtocol: parameters["yarn.protocol"]
    },
    restNamespace: {
      timeline: '%@atsproxy/ws/v1/timeline'.fmt(resourcesPrefix),
      applicationHistory: '%@atsproxy/ws/v1/applicationhistory'.fmt(resourcesPrefix),

      aminfo: '%@rmproxy/proxy/__app_id__/ws/v1/tez'.fmt(resourcesPrefix),
      aminfoV2: '%@rmproxy/proxy/__app_id__/ws/v2/tez'.fmt(resourcesPrefix),
      cluster: '%@rmproxy/ws/v1/cluster'.fmt(resourcesPrefix)
    },
    otherNamespace: {
      cluster: '%@rmredirect/cluster'.fmt(resourcesPrefix)
    }
  });

  App.TimelineRESTAdapter.reopen({
    namespace: App.Configs.restNamespace.timeline