How to use the temci.utils.util function in temci

To help you get started, we’ve selected a few temci examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github parttimenerd / temci / temci / report / testers.py View on Github external
"""
Contains the tester base class and several simple implementations
that simplify the work with statistical hypothesis tests.
"""

import warnings
import temci.utils.util as util
import typing as t
if util.can_import("scipy"):
    import scipy.stats as st
    import scipy.optimize as opti
    import numpy as np
from temci.utils.typecheck import *
from temci.utils.registry import AbstractRegistry, register
import logging

Number = t.Union[int, float]

class TesterRegistry(AbstractRegistry):

    settings_key_path = "stats"
    use_key = "tester"
    use_list = False
    default = "t"
    registry = {}
github parttimenerd / temci / test / util / test_util.py View on Github external
def test_recursive_exec_for_leafs(self):
        paths = []
        values = []
        map = {"a": {"b": "3", "c": "4"}, "ds": "3", "z": {"b": "3", "c": {"b": "3", "c": "4"}}}
        exp_paths = [["a", "b"], ["a", "c"], ["ds"], ["z", "b"], ["z", "c", "b"], ["z", "c", "c"]]
        exp_values = ["3", "4", "3", "3", "3", "4"]
        def func(key, path, value):
            paths.append(path)
            values.append(value)
        util.recursive_exec_for_leafs(map, func)
        self.assertTrue(sorted(paths) == sorted(exp_paths))
        self.assertTrue(sorted(values) == sorted(exp_values))
github parttimenerd / temci / temci / report / report.py View on Github external
base_mean = baseline.get_single_properties()[prop].mean()
            base_std = baseline.get_single_properties()[prop].std()
            mean_str = str(FNumber(mean / base_mean, abs_deviation=std / base_mean, is_percent=True))
            dev = "{:>5.5%}".format(std / mean) if mean != 0 else "{:>5.5}".format(std)
            print_func("\t {{prop:<{}}} mean = {{mean:>15s}}, confidence = {{conf:>5.0%}}, dev = {{dev:>11s}}, "
                       "{{dbase:>11s}}".format(descr_size)
                .format(
                    prop=prop,
                    mean=mean_str,
                    dev=dev,
                    conf=tester.test(block[prop], baseline[prop]),
                    dbase="{:>5.5%}".format(base_std / base_mean) if base_mean != 0 else "{:>5.5}".format(base_std)))
        rels = [(block.get_single_properties()[prop].mean() / baseline.get_single_properties()[prop].mean())
                            for prop in combined_props]
        gmean = stats.gmean(rels)
        gstd = util.geom_std(rels)
        print_func("geometric mean of relative mean = {:>15}, std dev = {:>15}"
                   .format(FNumber(gmean, is_percent=True).format(), FNumber(gstd, is_percent=True).format()))
github parttimenerd / temci / temci / report / rundata.py View on Github external
def get_description_clusters(self) -> t.Dict[str, t.List['RunData']]:
        """
        Set of runs per description, call RunDataStatsHelper.make_descriptions_distinct first

        :return: set of runs per description
        """
        clusters = util.InsertionTimeOrderedDict()
        for r in self.runs:
            d = r.attributes["__description" if "__description" in r.attributes else "description"] \
                if "description" in r.attributes \
                else ""
            if d not in clusters:
                clusters[d] = []
            clusters[d].append(r)
        return clusters
github parttimenerd / temci / temci / misc / game.py View on Github external
from temci.report.rundata import RunData

from temci.report.stats import SingleProperty, Single, SinglesProperty
from temci.utils.typecheck import *
import os, shutil, copy
from pprint import pprint
from temci.report import report
import scipy as sp
import  scipy.stats as stats

from temci.utils.util import InsertionTimeOrderedDict, geom_std

itod_from_list = InsertionTimeOrderedDict.from_list

if util.can_import("scipy"):
    import scipy.stats as stats
    #import ruamel.yaml as yaml
import yaml

from temci.report.report import HTMLReporter2, html_escape_property
from temci.utils.settings import Settings
Settings().load_files()

USABLE_WITH_SERVER = True
FIG_WIDTH = 15
FIG_HEIGHT_PER_ELEMENT = 1.5

class Mode(Enum):
    geom_mean_rel_to_best = 1
    """ calculate all mean scores as "mean / best mean" and use the geometric mean for summaries"""
    mean_rel_to_first = 2
github parttimenerd / temci / temci / misc / game.py View on Github external
"""
Benchmarks game inspired comparison of different implementations for a given language.

It doesn't really belong directly to the temci tool, but uses big parts of it.
It's currently in a pre alpha state as it's a part of the evaluation for my bachelor thesis
that I'm currently doing,
"""

import temci.utils.util as util

if __name__ == "__main__":
    util.allow_all_imports = True

import logging, time
import typing as t
import inspect

import multiprocessing

import zlib
from collections import defaultdict
from enum import Enum

from temci.report.testers import Tester, TTester, TesterRegistry

START_TIME = time.time()

import subprocess
github parttimenerd / temci / temci / report / stats.py View on Github external
:param img: store as png image
        :return: dictionary mapping each used format to the resulting files name
        """
        if fig_height is None:
            fig_height = self._height_for_width(fig_width)
        #filename = # self._get_new_file_name(dir)
        ret_dict = {}
        if img:
            ret_dict["img"] = self._store_as_image(filename + self.img_filename_ending, fig_width, fig_height)
        if tex:
            ret_dict["tex"] = self._store_as_tex(filename + ".tex", fig_width, fig_height, standalone=False)
        if pdf:
            if util.has_pdflatex():
                ret_dict["pdf"] = self._store_as_pdf(filename + ".pdf", fig_width, fig_height)
            else:
                util.warn_for_pdflatex_non_existence_once()
        if tex_standalone:
            ret_dict["tex_standalone"] = self._store_as_tex(filename + "____standalone.tex", fig_width,
                                                            fig_height, standalone=True)
        if self._fig is not None:
            plt.close('all')
        return ret_dict
github parttimenerd / temci / temci / report / stats.py View on Github external
import logging
import os
from collections import defaultdict
from enum import Enum

import itertools

import math
from temci.report.rundata import RunData
from temci.report.testers import Tester, TesterRegistry
from temci.utils.settings import Settings
import typing as t
import temci.utils.util as util
from temci.utils.sudo_utils import chown

if util.can_import("scipy"):
    import numpy as np
    import scipy as sp
    import scipy.stats as st
    import pandas as pd
    import matplotlib as mpl
    mpl.use("agg")
from temci.utils.typecheck import *


Number = t.Union[float, int]
""" Numeric value type """


class StatMessageType(Enum):
    """
    Types of StatMessages.
github parttimenerd / temci / temci / report / rundata.py View on Github external
def get_description_clusters_and_single(self) -> t.Tuple[t.List['RunData'], t.Dict[str, t.List['RunData']]]:
        """
        Set of runs per description, call RunDataStatsHelper.make_descriptions_distinct first

        :return: set of runs per description
        """
        clusters = self.get_description_clusters()
        new_clusters = util.InsertionTimeOrderedDict()
        single = []
        for n, c in clusters.items():
            if len(c) is 1:
                single.extend(c)
            else:
                new_clusters[n] = c
        return single, new_clusters
github parttimenerd / temci / temci / scripts / cli.py View on Github external
import locale
from enum import Enum

from temci.utils.number import FNumber
from temci.utils.plugin import load_plugins
from temci.utils.util import sphinx_doc, get_doc_for_type_scheme

import warnings

from temci.scripts.temci_completion import completion_file_name, create_completion_dir
from temci.utils import util
if __name__ == "__main__":
    util.allow_all_imports = True

warnings.simplefilter("ignore")
import shutil

from temci.utils.typecheck import *

from temci.build.builder import BuildError
from temci.run.run_processor import RunProcessor
from temci.build.build_processor import BuildProcessor
import temci.run.run_driver as run_driver
import temci.run.run_driver_plugin
from temci.report.report import ReporterRegistry
from temci.utils.settings import Settings
from temci.report.report_processor import ReportProcessor
import temci.report.report
import temci.report.testers