How to use the deprecated.production.util function in Deprecated

To help you get started, we’ve selected a few Deprecated examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github constantinpape / cluster_tools / deprecated / production / components / threshold_components.py View on Github external
self._prepare_jobs(n_jobs, n_blocks, config)

        # submit the jobs
        if self.run_local:
            # this only works in python 3 ?!
            with futures.ProcessPoolExecutor(n_jobs) as tp:
                tasks = [tp.submit(self._submit_job, job_id)
                         for job_id in range(n_jobs)]
                [t.result() for t in tasks]
        else:
            for job_id in range(n_jobs):
                self._submit_job(job_id)

        # wait till all jobs are finished
        if not self.run_local:
            util.wait_for_jobs('papec')

        # check the job outputs
        processed_blocks, n_components, times = self._collect_outputs(n_blocks)
        assert len(processed_blocks) == len(n_components) == len(times)
        success = len(processed_blocks) == n_blocks

        # write output file if we succeed, otherwise write partial
        # success to different file and raise exception
        if success:
            out = self.output()
            # TODO does 'out' support with block?
            fres = out.open('w')
            json.dump({'n_components': n_components,
                       'times': times}, fres)
            fres.close()
        else:
github constantinpape / cluster_tools / deprecated / production / watershed / filling_watershed.py View on Github external
def _submit_jobs(self, n_jobs, prefix):
        from .. import util
        if self.run_local:
            # this only works in python 3 ?!
            with futures.ProcessPoolExecutor(n_jobs) as tp:
                tasks = [tp.submit(self._submit_job, job_id, prefix)
                         for job_id in range(n_jobs)]
                [t.result() for t in tasks]
        else:
            for job_id in range(n_jobs):
                self._submit_job(job_id, prefix)

        # wait till all jobs are finished
        if not self.run_local:
            util.wait_for_jobs('papec')
github constantinpape / cluster_tools / deprecated / production / stitching / compute_merge_votes.py View on Github external
def run(self):
        from .. import util

        # copy the script to the temp folder and replace the shebang
        file_dir = os.path.dirname(os.path.abspath(__file__))
        util.copy_and_replace(os.path.join(file_dir, 'compute_merge_votes.py'),
                              os.path.join(self.tmp_folder, 'compute_merge_votes.py'))

        with open(self.config_path) as f:
            config = json.load(f)
            block_shape = config['block_shape2']
            # chunks = tuple(config['chunks'])
            # we need to pop the block shift from the config
            # because the first blocking is without block shift !
            block_shift = config.pop('block_shift')
            roi = config.get('roi', None)

        # find the shape and number of blocks
        f5 = z5py.File(self.path)
        shape = f5[self.ws_key].shape

        # for debugging
github constantinpape / cluster_tools / deprecated / production / evaluation / skeleton_evaluation.py View on Github external
self.seg_key,
                                                                                  ' '.join(self.skeleton_keys),
                                                                                  self.n_threads,
                                                                                  self.tmp_folder)
        log_file = os.path.join(self.tmp_folder, 'logs', 'log_skeleton_eval.log')
        err_file = os.path.join(self.tmp_folder, 'error_logs', 'err_skeleton_eval.err')
        bsub_command = 'bsub -n %i -J compute_skeleton_eval -We %i -o %s -e %s \'%s\'' % (self.n_threads,
                                                                                          self.time_estimate,
                                                                                          log_file, err_file, command)

        # submit the job
        if self.run_local:
            subprocess.call([command], shell=True)
        else:
            subprocess.call([bsub_command], shell=True)
            util.wait_for_jobs('papec')

        # load and check the output
        out_path = self.output().path
        try:
            with open(out_path) as f:
                evaluation = json.load(f)
            for key, eva in evaluation.items():
                print("Skeleton evaliation for %s:" % key)
                print("Correct:     ", eva['correct'])
                print("Split:       ", eva['split'])
                print("Merge:       ", eva['merge'])
                print("Merge Points:", eva['n_merges'])
        except Exception:
            raise RuntimeError("SkeletonEvaluationTask failed")
github constantinpape / cluster_tools / deprecated / production / components / node_assignment.py View on Github external
def run(self):
        from .. import util

        # copy the script to the temp folder and replace the shebang
        file_dir = os.path.dirname(os.path.abspath(__file__))
        script_path = os.path.join(self.tmp_folder, 'node_assignment.py')
        util.copy_and_replace(os.path.join(file_dir, 'node_assignment.py'), script_path)

        # find the number of blocks
        with open(self.config_path) as f:
            config = json.load(f)
            block_shape = config['block_shape']
            n_threads = config['n_threads']

        f = z5py.File(self.path)
        ds = f[self.out_key]
        shape = ds.shape
        blocking = nifty.tools.blocking([0, 0, 0], shape, block_shape)
        n_blocks = blocking.numberOfBlocks

        n_jobs = min(n_blocks, self.max_jobs)

        # prepare the job
github constantinpape / cluster_tools / deprecated / production / multicut / multicut.py View on Github external
json.dump(config, f)

        command = '%s %s %s %s %s %s %s' % (script_path, self.path, self.aff_key, self.ws_key,
                                            self.out_key, self.tmp_folder, config_path)
        log_file = os.path.join(self.tmp_folder, 'logs', 'log_multicut')
        err_file = os.path.join(self.tmp_folder, 'error_logs', 'err_multicut.err')
        bsub_command = 'bsub -J multicut -We %i -o %s -e %s \'%s\'' % (self.time_estimate,
                                                                       log_file, err_file, command)
        if self.run_local:
            subprocess.call([command], shell=True)
        else:
            subprocess.call([bsub_command], shell=True)

        # wait till all jobs are finished
        if not self.run_local:
            util.wait_for_jobs('papec')
        t = self._collect_outputs()
        success = t is not None

        out_path = self.output().path
        success = os.path.exists(out_path)

        if not success:
            raise RuntimeError("MulticutTask failed")
github constantinpape / cluster_tools / deprecated / production / multicut / multicut.py View on Github external
def run(self):
        from .. import util

        # copy the script to the temp folder and replace the shebang
        script_path = os.path.join(self.tmp_folder, 'multicut.py')
        file_dir = os.path.dirname(os.path.abspath(__file__))
        util.copy_and_replace(os.path.join(file_dir, 'multicut.py'), script_path)

        with open(self.config_path) as f:
            config = json.load(f)
            if 'roi' in config:
                have_roi = True

        assert have_roi

        config_path = os.path.join(self.tmp_folder, 'multicut_config.json')
        with open(config_path, 'w') as f:
            json.dump(config, f)

        command = '%s %s %s %s %s %s %s' % (script_path, self.path, self.aff_key, self.ws_key,
                                            self.out_key, self.tmp_folder, config_path)
        log_file = os.path.join(self.tmp_folder, 'logs', 'log_multicut')
        err_file = os.path.join(self.tmp_folder, 'error_logs', 'err_multicut.err')
github constantinpape / cluster_tools / deprecated / production / components / compute_block_offsets.py View on Github external
def run(self):
        from .. import util

        # get input and output path
        in_path = self.input().path
        out_path = self.output().path

        # copy the script to the temp folder and replace the shebang
        file_dir = os.path.dirname(os.path.abspath(__file__))
        util.copy_and_replace(os.path.join(file_dir, 'compute_block_offsets.py'),
                              os.path.join(self.tmp_folder, 'compute_block_offsets.py'))

        # assemble the commands
        script_path = os.path.join(self.tmp_folder, 'compute_block_offsets.py')
        command = '%s %s %s' % (script_path, in_path, out_path)
        log_file = os.path.join(self.tmp_folder, 'logs', 'log_compute_offsets.log')
        err_file = os.path.join(self.tmp_folder, 'error_logs', 'err_compute_offsets.err')
        bsub_command = 'bsub -J compute_offsets -We %i -o %s -e %s \'%s\'' % (self.time_estimate,
                                                                              log_file, err_file, command)

        # submit job
        if self.run_local:
            subprocess.call([command], shell=True)
        else:
            subprocess.call([bsub_command], shell=True)
github constantinpape / cluster_tools / deprecated / production / relabel / find_labeling.py View on Github external
with open(self.config_path) as f:
            config = json.load(f)
            n_threads = config['n_threads']
            # block_shape = config['block_shape']
            # TODO support computation with roi
            # if 'roi' in config:
            #     have_roi = True

        # self._submit_job(block_shape)
        # TODO find proper n_jobs
        n_jobs = self.max_jobs
        self._submit_job(n_jobs, n_threads)
        # wait till all jobs are finished
        if not self.run_local:
            util.wait_for_jobs('papec')

        try:
            out_path = self.output().path
            success = os.path.exists(out_path)
        except Exception:
            success = False

        if not success:
            raise RuntimeError("FindLabelingTask failed")
github constantinpape / cluster_tools / deprecated / production / relabel / find_labeling.py View on Github external
def run(self):
        from .. import util

        # copy the script to the temp folder and replace the shebang
        file_dir = os.path.dirname(os.path.abspath(__file__))
        util.copy_and_replace(os.path.join(file_dir, 'find_labeling.py'),
                              os.path.join(self.tmp_folder, 'find_labeling.py'))

        with open(self.config_path) as f:
            config = json.load(f)
            n_threads = config['n_threads']
            # block_shape = config['block_shape']
            # TODO support computation with roi
            # if 'roi' in config:
            #     have_roi = True

        # self._submit_job(block_shape)
        # TODO find proper n_jobs
        n_jobs = self.max_jobs
        self._submit_job(n_jobs, n_threads)
        # wait till all jobs are finished
        if not self.run_local: