How to use the traits.api.Dict function in traits

To help you get started, we’ve selected a few traits examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github enthought / envisage / envisage / resource / resource_manager.py View on Github external
# Enthought library imports.
from traits.api import Dict, HasTraits, Str, provides

# Local imports.
from .i_resource_manager import IResourceManager
from .i_resource_protocol import IResourceProtocol


@provides(IResourceManager)
class ResourceManager(HasTraits):
    """ The default resource manager. """

    #### 'IResourceManager' interface #########################################

    # The protocols used by the manager to resolve resource URLs.
    resource_protocols = Dict(Str, IResourceProtocol)

    ###########################################################################
    # 'IResourceManager' interface.
    ###########################################################################

    #### Trait initializers ###################################################

    def _resource_protocols_default(self):
        """ Trait initializer. """

        # We do the import(s) here in case somebody wants a resource manager
        # that doesn't use the default protocol(s).
        from .file_resource_protocol import FileResourceProtocol
        from .http_resource_protocol import HTTPResourceProtocol
        from .package_resource_protocol import PackageResourceProtocol
github jaidevd / pysemantic / pysemantic / validator.py View on Github external
# Dictionary of arguments loaded from the pickle file.
    pickled_args = Property(Dict, depends_on=['pickle_file'])

    # List of required traits
    # FIXME: Arguments required by the schema should't have to be programmed
    # into the validator class. There must be a way to enforce requirements
    # right in the schema itself.
    required_args = ValidTraitList

    # Parser args for pandas
    parser_args = Property(Dict, depends_on=['filepath', 'delimiter', 'nrows',
                                             'dtypes', 'colnames'])

    # Protected traits

    _dtypes = Property(Dict(key_trait=Str, value_trait=Type),
                       depends_on=['specification'])

    # Public interface

    def get_parser_args(self):
        """Return parser args as required by pandas parsers."""
        return self.parser_args

    to_dict = get_parser_args

    def set_parser_args(self, specs, write_to_file=False):
        """Magic method required by Property traits."""
        self.parser_args = specs
        if write_to_file:
            logger.info("Following specs for dataset {0}".format(self.name) +
                        " were written to specfile {0}".format(self.specfile))
github enthought / mayavi / enthought / mayavi / sources / image_reader.py View on Github external
# Information about what this object can produce.
    output_info = PipelineInfo(datasets=['image_data'])

    # Our view.
    view = View(Group(Include('time_step_group'),
                      Item(name='base_file_name'),
                      Item(name='reader',
                           style='custom',
                           resizable=True),
                      show_labels=False),
                resizable=True)

    ######################################################################
    # Private Traits
    _image_reader_dict = Dict(Str, Instance(tvtk.Object))

    ######################################################################
    # `object` interface
    ######################################################################
    def __init__(self, **traits):
        d = {'bmp':tvtk.BMPReader(),
             'jpg':tvtk.JPEGReader(),
             'png':tvtk.PNGReader(),
             'pnm':tvtk.PNMReader(),
             'dcm':tvtk.DICOMImageReader(),
             'tiff':tvtk.TIFFReader(),
             'ximg':tvtk.GESignaReader(),
             'dem':tvtk.DEMReader(),
             'mha':tvtk.MetaImageReader(),
             'mhd':tvtk.MetaImageReader(),
            }
github bpteague / cytoflow / cytoflow / operations / logicle.py View on Github external
implementations.
        Moore WA, Parks DR.
        Cytometry A. 2012 Apr;81(4):273-7. 
        doi: 10.1002/cyto.a.22030 
        PMID: 22411901
        http://onlinelibrary.wiley.com/doi/10.1002/cyto.a.22030/full
    """
    
    #traits
    id = Constant('edu.mit.synbio.cytoflow.operations.logicle')
    friendly_id = Constant("Logicle Transform")
    
    name = Str()
    channels = List(Str)
    
    W = Dict(Str, Float, desc="the width of the linear range, in log10 decades.")
    M = Float(4.5, desc = "the width of the display in log10 decades")
    A = Dict(Str, Float, desc = "additional decades of negative data to include.")
    r = Float(0.05, desc = "quantile to use for estimating the W parameter.")
    
    def __init__(self, **kwargs):
        warnings.warn("Transforming data with LogicleTransformOp is deprecated; "
                      "rescale the data with the 'logicle' scale instead.",
                      exceptions.DeprecationWarning)
        super(LogicleTransformOp, self).__init__(**kwargs)
    
    def estimate(self, experiment, subset = None):
        """Estimate A and W per-channel from the data (given r.)
        
        Actually, that's not quite right. Set A to 0.0; and estimate W given r.
        
        Parameters
github swift-nav / piksi_firmware / scripts / solution_view.py View on Github external
import struct
import math
import os
import numpy as np
import datetime
import time

import sbp_piksi as sbp_messages

class SimpleAdapter(TabularAdapter):
  columns = [('Item', 0), ('Value',  1)]
  width = 80

class SolutionView(HasTraits):
  python_console_cmds = Dict()
  # we need to doubleup on Lists to store the psuedo absolutes separately
  # without rewriting everything
  lats = List()
  lngs = List()
  alts = List()

  lats_psuedo_abs = List()
  lngs_psuedo_abs = List()
  alts_psuedo_abs = List()

  table_spp = List()
  table_psuedo_abs = List()
  dops_table = List()
  pos_table_spp = List()
  vel_table = List()
github ruoyu0088 / scpy2 / matplotlib / polygon_widget.py View on Github external
# -*- coding: utf-8 -*-
import numpy as np
from traits.api import (HasTraits, Str, Int, List, Instance, Button, Bool,
                        on_trait_change, Array, Dict, Event)
from traitsui.api import View, Item, Handler, VGroup, HGroup, EnumEditor
from matplotlib.axes import Axes
from matplotlib.lines import Line2D
from matplotlib.figure import Figure
from scpy2.traits import PositionHandler, MPLFigureEditor


class PolygonWidget(HasTraits):
    points = Array(dtype=float, shape=(None, 2))
    axe = Instance(Axes)
    style = Dict()
    line = Instance(Line2D)
    _selected_index = Int(-1)
    changed = Event

    view = View(
        "points"
    )

    def __init__(self, **kw):
        super(PolygonWidget, self).__init__(**kw)
        x, y = self.get_xy()
        self.line, = self.axe.plot(x, y, **self.style)
        canvas = self.axe.figure.canvas
        canvas.mpl_connect('button_press_event', self.button_press_callback)
        canvas.mpl_connect('button_release_event', self.button_release_callback)
        canvas.mpl_connect('motion_notify_event', self.motion_notify_callback)
github bpteague / cytoflow / cytoflow / operations / flowpeaks.py View on Github external
channels = List(Str)
    scale = Dict(Str, util.ScaleEnum)
    by = List(Str)
#     find_outliers = Bool(False)
    
    # parameters that control estimation, with sensible defaults
    h = util.PositiveFloat(1.5, allow_zero = False)
    h0 = util.PositiveFloat(1, allow_zero = False)
    tol = util.PositiveFloat(0.5, allow_zero = False)
    merge_dist = util.PositiveFloat(5, allow_zero = False)
    
    # parameters that control outlier selection, with sensible defaults
    
    
    _kmeans = Dict(Any, Instance(sklearn.cluster.MiniBatchKMeans), transient = True)
    _means = Dict(Any, List, transient = True)
    _normals = Dict(Any, List(Function), transient = True)
    _density = Dict(Any, Function, transient = True)
    _peaks = Dict(Any, List(Array), transient = True)  
    _peak_clusters = Dict(Any, List(Array), transient = True)
    _cluster_peak = Dict(Any, List, transient = True)  # kmeans cluster idx --> peak idx
    _cluster_group = Dict(Any, List, transient = True) # kmeans cluster idx --> group idx
    _scale = Dict(Str, Instance(util.IScale), transient = True)
    
    def estimate(self, experiment, subset = None):
        """
        Estimate the k-means clusters, then hierarchically merge them.
        
        Parameters
        ----------
        experiment : Experiment
            The :class:`.Experiment` to use to estimate the k-means clusters
github jaidevd / pysemantic / pysemantic / validator.py View on Github external
is_drop_duplicates = Property(Bool, depends_on=['rules'])

    # whether to drop NAs
    is_drop_na = Property(Bool, depends_on=['rules'])

    # Names of columns to be rewritten
    column_names = Property(Any, depends_on=['rules'])

    # Specifications relating to the selection of rows.
    nrows = Property(Any, depends_on=['rules'])

    # Whether to shuffle the rows of the dataframe before returning
    shuffle = Property(Bool, depends_on=['rules'])

    # Unique values to maintain per column
    unique_values = Property(Dict, depends_on=['column_rules'])

    def _rules_default(self):
        return {}

    @cached_property
    def _get_shuffle(self):
        return self.rules.get("shuffle", False)

    @cached_property
    def _get_index_col(self):
        return self.rules.get('index_col', False)

    @cached_property
    def _get_nrows(self):
        return self.rules.get('nrows', {})
github bpteague / cytoflow / cytoflow / operations / bleedthrough_linear.py View on Github external
.. plot::
        :context: close-figs
    
        >>> ex2 = bl_op.apply(ex2)  
    
    """
    
    # traits
    id = Constant('edu.mit.synbio.cytoflow.operations.bleedthrough_linear')
    friendly_id = Constant("Linear Bleedthrough Correction")
    
    name = Constant("Bleedthrough")

    controls = Dict(Str, File)
    spillover = Dict(Tuple(Str, Str), Float)
    control_conditions = Dict(Str, Dict(Str, Any), {})
    
    _sample = Dict(Str, Any, transient = True)
    
    def estimate(self, experiment, subset = None): 
        """
        Estimate the bleedthrough from simgle-channel controls in :attr:`controls`
        """
        if experiment is None:
            raise util.CytoflowOpError('experiment', "No experiment specified")
        
        channels = list(self.controls.keys())

        if len(channels) < 2:
            raise util.CytoflowOpError('channels',
                                       "Need at least two channels to correct bleedthrough.")
github enthought / envisage / examples / plugins / tasks / attractors / attractors_preferences.py View on Github external
# See TasksApplication for more information.
    always_use_default_layout = Bool


class AttractorsPreferencesPane(PreferencesPane):
    """ The preferences pane for the Attractors application.
    """

    #### 'PreferencesPane' interface ##########################################

    # The factory to use for creating the preferences model object.
    model_factory = AttractorsPreferences

    #### 'AttractorsPreferencesPane' interface ################################

    task_map = Dict(Str, Str)

    view = View(
        VGroup(
            HGroup(
                Item("always_use_default_layout"),
                Label("Always use the default active task on startup"),
                show_labels=False,
            ),
            HGroup(
                Label("Default active task:"),
                Item(
                    "default_task", editor=EnumEditor(name="handler.task_map")
                ),
                enabled_when="always_use_default_layout",
                show_labels=False,
            ),