How to use the twitter.common.dirutil.safe_mkdir function in twitter

To help you get started, we’ve selected a few twitter examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github apache / incubator-retired-cotton / mysos / testing / mysos_test_client.py View on Github external
def validate_common_options(options):
    if not options.api_host:
      app.error("--api_host is required")

    if not options.api_port:
      app.error("--api_port is required")

    if not options.cluster_name:
      app.error("--cluster is required")

    if not options.password_file:
      app.error("--password_file is required")
    log.info("Using --password_file=%s" % options.password_file)
    safe_mkdir(os.path.dirname(options.password_file))
github twitter-archive / commons / src / python / twitter / pants / tasks / protobuf_gen.py View on Github external
def genlang(self, lang, targets):
    protobuf_binary = select_binary(
      self.protoc_supportdir,
      self.protoc_version,
      'protoc',
      self.context.config
    )

    bases, sources = self._calculate_sources(targets)

    if lang == 'java':
      safe_mkdir(self.java_out)
      gen = '--java_out=%s' % self.java_out
    elif lang == 'python':
      safe_mkdir(self.py_out)
      gen = '--python_out=%s' % self.py_out
    else:
      raise TaskError('Unrecognized protobuf gen lang: %s' % lang)

    args = [self.protobuf_binary, gen]

    for base in bases:
      args.append('--proto_path=%s' % base)

    args.extend(sources)
    log.debug('Executing: %s' % ' '.join(args))
    process = subprocess.Popen(args)
    result = process.wait()
github apache / aurora / src / main / python / twitter / thermos / core / helper.py View on Github external
def finalize_task(cls, spec):
    active_task = spec.given(state='active').getpath('task_path')
    finished_task = spec.given(state='finished').getpath('task_path')
    is_active, is_finished = os.path.exists(active_task), os.path.exists(finished_task)
    if not is_active:
      raise cls.Error('Cannot finalize task with no "active" record!')
    elif is_finished:
      raise cls.Error('Cannot finalize task with "finished" record!')
    safe_mkdir(os.path.dirname(finished_task))
    os.rename(active_task, finished_task)
    os.utime(finished_task, None)
github pantsbuild / pants / src / python / twitter / pants / tasks / jvm_compile.py View on Github external
def __init__(self, context, workdir):
    NailgunTask.__init__(self, context, workdir=workdir)
    concrete_class = self.__class__
    config_section = concrete_class._config_section

    def get_lang_specific_option(opt):
      full_opt_name = self.language() + '_' + opt
      return getattr(context.options, full_opt_name, None)

    # Various working directories.
    workdir = context.config.get(config_section, 'workdir')
    self._classes_dir = os.path.join(workdir, 'classes')
    self._resources_dir = os.path.join(workdir, 'resources')
    self._analysis_dir = os.path.join(workdir, 'analysis')

    safe_mkdir(self._classes_dir)
    safe_mkdir(self._analysis_dir)

    # A temporary, but well-known, dir to munge analysis/dependency files in before caching.
    # It must be well-known so we know where to find the files when we retrieve them from the cache.
    self._analysis_tmpdir = os.path.join(self._analysis_dir, 'artifact_cache_tmpdir')

    # Compiler options.
    self._args = context.config.getlist(config_section, 'args')
    if get_lang_specific_option('compile_warnings'):
      self._args.extend(context.config.getlist(config_section, 'warning_args'))
    else:
      self._args.extend(context.config.getlist(config_section, 'no_warning_args'))

    # The rough number of source files to build in each compiler pass.
    self._partition_size_hint = get_lang_specific_option('partition_size_hint')
    if self._partition_size_hint == -1:
github pantsbuild / pants / src / python / twitter / pants / tasks / jvmdoc_gen.py View on Github external
def _generate_combined(self, classpath, targets, create_jvmdoc_command):
    gendir = os.path.join(self._output_dir, 'combined')
    if targets:
      safe_mkdir(gendir, clean=True)
      command = create_jvmdoc_command(classpath, gendir, *targets)
      if command:
        create_jvmdoc(command, gendir)
    if self.open:
      binary_util.ui_open(os.path.join(gendir, 'index.html'))
github apache / aurora / src / main / python / apache / thermos / core / process.py View on Github external
def _prepare_fork(self):
    user, current_user = self._getpwuid()
    if self._user:
      if user != current_user and os.geteuid() != 0:
        raise self.PermissionError('Must be root to run processes as other users!')
    self._fork_time = self._platform.clock().time()
    self._setup_ckpt()
    # Since the forked process is responsible for creating log files, it needs to own the log dir.
    safe_mkdir(self.process_logdir())
    os.chown(self.process_logdir(), user.pw_uid, user.pw_gid)
github twitter-archive / commons / src / python / twitter / pants / tasks / jvmdoc_gen.py View on Github external
def _generate_combined(self, classpath, targets, create_jvmdoc_command):
    gendir = os.path.join(self._output_dir, 'combined')
    if targets:
      safe_mkdir(gendir, clean=True)
      command = create_jvmdoc_command(classpath, gendir, *targets)
      if command:
        create_jvmdoc(command, gendir)
    if self.open:
      binary_util.ui_open(os.path.join(gendir, 'index.html'))
github pantsbuild / pants / src / python / twitter / pants / base / artifact_cache.py View on Github external
def try_insert(self, cache_key, build_artifacts):
    cache_dir = self._cache_dir_for_key(cache_key)
    safe_rmtree(cache_dir)
    for artifact in build_artifacts or ():
      rel_path = os.path.relpath(artifact, self.artifact_root)
      assert not rel_path.startswith('..'), \
        'Artifact %s is not under artifact root %s' % (artifact, self.artifact_root)
      artifact_dest = os.path.join(cache_dir, rel_path)
      safe_mkdir(os.path.dirname(artifact_dest))
      if os.path.isdir(artifact):
        shutil.copytree(artifact, artifact_dest)
      else:
        shutil.copy(artifact, artifact_dest)
github apache / incubator-retired-cotton / mysos / scheduler / state.py View on Github external
def dump_scheduler_state(self, state):
    if not isinstance(state, Scheduler):
      raise TypeError("'state' should be an instance of Scheduler")
    path = self._get_scheduler_state_path()
    safe_mkdir(os.path.dirname(path))

    try:
      with open(path, 'wb') as f:
        cPickle.dump(state, f)
    except PickleError as e:
      raise self.Error('Failed to persist Scheduler: %s' % e)
github pantsbuild / pants / src / python / pants / tasks / ide_gen.py View on Github external
def map_internal_jars(self, targets):
    internal_jar_dir = os.path.join(self.gen_project_workdir, 'internal-libs')
    safe_mkdir(internal_jar_dir, clean=True)

    internal_source_jar_dir = os.path.join(self.gen_project_workdir, 'internal-libsources')
    safe_mkdir(internal_source_jar_dir, clean=True)

    internal_jars = self.context.products.get('jars')
    internal_source_jars = self.context.products.get('source_jars')
    for target in targets:
      mappings = internal_jars.get(target)
      if mappings:
        for base, jars in mappings.items():
          if len(jars) != 1:
            raise TaskError('Unexpected mapping, multiple jars for %s: %s' % (target, jars))

          jar = jars[0]
          cp_jar = os.path.join(internal_jar_dir, jar)
          shutil.copy(os.path.join(base, jar), cp_jar)

          cp_source_jar = None
          mappings = internal_source_jars.get(target)