How to use opentuner - 10 common examples

To help you get started, we’ve selected a few opentuner examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github jansel / opentuner / tests / test_manipulator.py View on Github external
def setUp(self):
        """
        Set up a few configurations. The values of the PermutationParameter are:
        config1 - 0 1 2 3 4 5 6 7 8 9
        config2 - 4 3 2 1 0 9 8 7 6 5
        config3 - 1 0 4 2 7 9 5 3 6 8

        """
        self.manipulator = manipulator.ConfigurationManipulator()
        self.param1 = manipulator.PermutationParameter("param1", [0,1,2,3,4,5,6,7,8,9])
        self.manipulator.add_parameter(self.param1)

        self.cfg = self.manipulator.seed_config()
        self.config1 = self.manipulator.seed_config()
        self.config2 = self.manipulator.seed_config()
        self.config3 = self.manipulator.seed_config()

        # repeating values
        self.config4 = self.manipulator.seed_config()
        self.config5 = self.manipulator.seed_config()


        self.param1.set_value(self.config1, [0,1,2,3,4,5,6,7,8,9])
        self.param1.set_value(self.config2, [4,3,2,1,0,9,8,7,6,5])
        self.param1.set_value(self.config3, [1,0,4,2,7,9,5,3,6,8])
github jansel / opentuner / tests / test_manipulator.py View on Github external
def setUp(self):
        """
        Set up a few configurations. The values of the PermutationParameter are:
        config1 - 0 1 2 3 4 5 6 7 8 9
        config2 - 4 3 2 1 0 9 8 7 6 5
        config3 - 1 0 4 2 7 9 5 3 6 8

        """
        self.manipulator = manipulator.ConfigurationManipulator()
        self.param1 = manipulator.PermutationParameter("param1", [0,1,2,3,4,5,6,7,8,9])
        self.manipulator.add_parameter(self.param1)

        self.cfg = self.manipulator.seed_config()
        self.config1 = self.manipulator.seed_config()
        self.config2 = self.manipulator.seed_config()
        self.config3 = self.manipulator.seed_config()

        # repeating values
        self.config4 = self.manipulator.seed_config()
        self.config5 = self.manipulator.seed_config()


        self.param1.set_value(self.config1, [0,1,2,3,4,5,6,7,8,9])
        self.param1.set_value(self.config2, [4,3,2,1,0,9,8,7,6,5])
        self.param1.set_value(self.config3, [1,0,4,2,7,9,5,3,6,8])
github jansel / opentuner / tests / test_technique.py View on Github external
def test_get_default_oeprator(self):
    default = self.technique.get_default_operator(manipulator.PermutationParameter)
    self.assertDictEqual(default, {'op_name': 'op1_nop', 'args': [], 'kwargs': {}})
github jansel / opentuner / tests / test_technique.py View on Github external
def setUp(self):
    self.operator_map = {}
    ComposableEvolutionaryTechnique.add_to_map(self.operator_map,
                                  manipulator.PermutationParameter,
                                  "op3_cross", xchoice='op3_cross_CX')
    ComposableEvolutionaryTechnique.add_to_map(self.operator_map,
                                  "FloatArray",
                                  "op3_cross", strength=0.4)
    self.technique = EmptyComposableEvolutionaryTechnique(operator_map = self.operator_map)
github jbosboom / streamjit / lib / opentuner / streamjit / onlinetuner.py View on Github external
print self.trycount
		cfg = desired_result.configuration.data
		#self.niceprint(cfg)
		self.sdk.sendmsg("%s\n"%cfg)
		msg = self.sdk.recvmsg()
		if (msg == "exit\n"):
			#data = raw_input ( "exit cmd received. Press Keyboard to exit..." )
			self.sdk.close()
			sys.exit(1)
		exetime = float(msg)
		if exetime < 0:
			print "Error in execution"
			return opentuner.resultsdb.models.Result(state='ERROR', time=float('inf'))
		else:	
			print "Execution time is %f"%exetime
			return opentuner.resultsdb.models.Result(time=exetime)
github jansel / opentuner / opentuner / utils / stats_matplotlib.py View on Github external
q = (db.query(resultsdb.models.TuningRun)
            .filter_by(state='COMPLETE')
            .order_by('name'))
    if labels:
      q = q.filter(resultsdb.models.TuningRun.name.in_(labels))
    for tr in q:
      dir_label_runs[run_label(tr)][run_label(tr)].append((tr, db))
  all_run_ids = list()
  returned_values = {}
  for d, label_runs in list(dir_label_runs.items()):
    all_run_ids = list(map(_[0].id, itertools.chain(*list(label_runs.values()))))
    session = list(label_runs.values())[0][0][1]
    objective = list(label_runs.values())[0][0][0].objective

    q = (session.query(resultsdb.models.Result)
         .filter(resultsdb.models.Result.tuning_run_id.in_(all_run_ids))
         .filter(resultsdb.models.Result.time < float('inf'))
         .filter_by(was_new_best=True, state='OK'))
    total = q.count()
    q = objective.filter_acceptable(q)
    acceptable = q.count()
    q = q.order_by(*objective.result_order_by_terms())
    best = q.limit(1).one()
    worst = q.offset(acceptable - 1).limit(1).one()

    for label, runs in sorted(label_runs.items()):
      (mean_values, percentile_values) = combined_stats_over_time(label, runs, objective, worst, best)
      returned_values[label] = (mean_values, percentile_values)
      final_scores = list()
      for run, session in runs:
        try:
          final = (session.query(resultsdb.models.Result)
github GraphIt-DSL / graphit / autotune / graphit_autotuner.py View on Github external
finally:
            self.call_program('rm test')
            self.call_program('rm test.cpp')

        if run_result['timeout'] == True:
            val = self.args.runtime_limit
        else:
            val = self.parse_running_time();
        
        self.call_program('rm test.out')
        print ("run result: " + str(run_result))
        print ("running time: " + str(val))

        if run_result['timeout'] == True:
            print ("Timed out after " + str(self.args.runtime_limit) + " seconds")
            return opentuner.resultsdb.models.Result(time=val)
        elif run_result['returncode'] != 0:
            if self.args.killed_process_report_runtime_limit == 1 and run_result['stderr'] == 'Killed\n':
                print ("process killed " + str(run_result))
                return opentuner.resultsdb.models.Result(time=self.args.runtime_limit)
            else:
                print (str(run_result))
                exit()
        else:
            return opentuner.resultsdb.models.Result(time=val)
github jansel / opentuner / examples / petabricks / import_old_result.py View on Github external
args.database = 'sqlite:///' + args.database
  engine, Session = opentuner.resultsdb.connect(args.database)
  session = Session()

  program_settings = json.load(open(args.program + '.settings'))
  args.n = program_settings['n']
  args.technique = ['Imported']
  objective = ThresholdAccuracyMinimizeTime(program_settings['accuracy'])

  tuningrun = resultsdb.models.TuningRun(
    uuid=uuid.uuid4().hex,
    name='import',
    args=args,
    start_date=datetime.now(),
    objective=objective,
    program_version=resultsdb.models.ProgramVersion.get(
      session, 'PetaBricksInterface', args.program, 'imported'),
    state='COMPLETE',
  )
  session.add(tuningrun)

  for gen, line in enumerate(open(args.candidatelog)):
    if line[0] != '#':
      line = re.split('\t', line)
      date = tuningrun.start_date + timedelta(seconds=float(line[0]))
      cfg = os.path.normpath(
        os.path.join(os.path.dirname(args.candidatelog), '..', line[5]))
      result = run(args, cfg)
      result.was_new_best = True
      result.tuning_run = tuningrun
      result.collection_date = date
      session.add(result)
github jbosboom / streamjit / lib / opentuner / streamjit / tuner3.py View on Github external
freqInlineSize = jvmIntegerParameter("freqInlineSize", 100, 10000, 325, "-XX:FreqInlineSize=%d")
	inlineSmallCode = jvmIntegerParameter("inlineSmallCode", 500, 10000, 1000, "-XX:InlineSmallCode=%d")
	maxInlineSize = jvmIntegerParameter("maxInlineSize", 20, 1000, 35, "-XX:MaxInlineSize=%d")
	maxInlineLevel = jvmIntegerParameter("maxInlineLevel", 5, 20, 9, "-XX:MaxInlineLevel=%d")

	eliminateArrays = jvmIntegerParameter("eliminateAllocationArraySizeLimit", 64, 2048, 64, "-XX:EliminateAllocationArraySizeLimit=%d")
	useNuma = jvmFlag("useNuma", "-XX:+UseNUMA")
	bindGCTaskThreadsToCPUs = jvmFlag("bindGCTaskThreadsToCPUs", "-XX:+BindGCTaskThreadsToCPUs")

	enabledJvmOptions = [aggressiveOpts, compileThreshold, clipInlining, freqInlineSize,
		maxInlineSize, maxInlineLevel, eliminateArrays, useNuma, bindGCTaskThreadsToCPUs]
	return {x.name:x for x in enabledJvmOptions}

if __name__ == '__main__':
	logging.basicConfig(level=logging.INFO)
	parser = argparse.ArgumentParser(parents=opentuner.argparsers())
	parser.add_argument('--program', help='StreamJIT benchmark to tune (with first input)')
	parser.add_argument('--timestamp', help='timestamp to use for final config/errors',
		default=time.strftime('%Y%m%d-%H%M%S'))
	args = parser.parse_args()
	(cfg_json, error_str) = call_java([], "edu.mit.streamjit.tuner.ConfigGenerator2",
		["edu.mit.streamjit.impl.compiler2.Compiler2BlobFactory", args.program])
	if len(error_str) > 0:
		sys.exit("Getting config JSON: "+error_str)
	cfg = configuration.getConfiguration(cfg_json)
	jvm_options = make_jvm_options();

	manipulator = StreamJITConfigurationManipulator(cfg)
	for p in cfg.getAllParameters().values() + jvm_options.values():
		manipulator.add_parameter(p)

	# create seed configurations
github jansel / opentuner / examples / halide / halidetuner.py View on Github external
from opentuner.search.manipulator import ConfigurationManipulator
from opentuner.search.manipulator import PowerOfTwoParameter
from opentuner.search.manipulator import PermutationParameter
from opentuner.search.manipulator import BooleanParameter
from opentuner.search.manipulator import ScheduleParameter


COMPILE_CMD = (
  '{args.cxx} "{cpp}" -o "{bin}" -I "{args.halide_dir}/include" '
  '"{args.halide_dir}/bin/$BUILD_PREFIX/libHalide.a" -ldl -lcurses -lpthread {args.cxxflags} '
  '-DAUTOTUNE_N="{args.input_size}" -DAUTOTUNE_TRIALS={args.trials} '
  '-DAUTOTUNE_LIMIT={limit} -fno-rtti')

log = logging.getLogger('halide')

parser = argparse.ArgumentParser(parents=opentuner.argparsers())
parser.add_argument('source', help='Halide source file annotated with '
                                   'AUTOTUNE_HOOK')
parser.add_argument('--halide-dir', default=os.path.expanduser('~/Halide'),
                    help='Installation directory for Halide')
parser.add_argument('--input-size',
                    help='Input size to test with')
parser.add_argument('--trials', default=3, type=int,
                    help='Number of times to test each schedule')
parser.add_argument('--nesting', default=2, type=int,
                    help='Maximum depth for generated loops')
parser.add_argument('--max-split-factor', default=8, type=int,
                    help='The largest value a single split() can add')
parser.add_argument('--compile-command', default=COMPILE_CMD,
                    help='How to compile generated C++ code')
parser.add_argument('--cxx', default='c++',
                    help='C++ compiler to use (e.g., g++ or clang++)')