How to use the future.utils.with_metaclass function in future

To help you get started, we’ve selected a few future examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github PythonCharmers / python-future / tests / test_future / test_object.py View on Github external
def test_with_metaclass_and_object(self):
        """
        Issue #91
        """
        from future.utils import with_metaclass

        class MetaClass(type):
            pass

        class TestClass(with_metaclass(MetaClass, object)):
            pass
github eyadgaran / SimpleML / simpleml / pipelines / validation_split_mixins.py View on Github external
poppable_keys = [k for k, v in self.items() if self.is_null_type(v)]
        [self.pop(k) for k in poppable_keys]

        # Return self for easy chaining
        return self


class SplitContainer(defaultdict):
    '''
    Explicit instantiation of a defaultdict returning split objects
    '''
    def __init__(self, default_factory=Split, **kwargs):
        super(SplitContainer, self).__init__(default_factory, **kwargs)


class SplitMixin(with_metaclass(ABCMeta, object)):
    @abstractmethod
    def split_dataset(self):
        '''
        Set the split criteria

        Must set self._dataset_splits
        '''

    def containerize_split(self, split_dict):
        return SplitContainer(**split_dict)

    def get_split_names(self):
        if not hasattr(self, '_dataset_splits') or self._dataset_splits is None:
            self.split_dataset()
        return list(self._dataset_splits.keys())
github FrostyX / tracer / tracer / resources / processes.py View on Github external
super(ProcessMeta, cls).__init__(name, bases, attributes)
		def reset_cache():
			cls._cache = {}
		reset_cache()
		setattr(cls, 'reset_cache', reset_cache)

	def __call__(cls, *args, **kwargs):
		pid = args[0]
		if pid not in cls._cache:
			self = cls.__new__(cls, *args, **kwargs)
			cls.__init__(self, *args, **kwargs)
			cls._cache[pid] = self
		return cls._cache[pid]


class Process(with_metaclass(ProcessMeta, ProcessWrapper)):
	"""
	Represent the process instance uniquely identifiable through PID

	For all class properties and methods, please see
	http://pythonhosted.org/psutil/#process-class

	Below listed are only reimplemented ones.

	For performance reasons, instances are cached based on PID, and
	multiple instantiations of a ``Process`` object with the same PID will
	return the same object. To clear the cache, invoke
	``Process.reset_cache()``. Additionally, as with ``ProcessWrapper``,
	process information is cached at object creation. To force a refresh,
	invoke the ``rebuild_cache()`` method on the object.
	"""
github google / rekall / rekall-core / rekall / plugin.py View on Github external
mode = getattr(subclass, "mode", None)

            if isinstance(mode, basestring):
                if not session.GetParameter(mode):
                    return False

            elif isinstance(mode, (list, tuple)):
                for i in mode:
                    if not session.GetParameter(i):
                        return False

        return True



class Command(with_metaclass(registry.MetaclassRegistry, ModeBasedActiveMixin)):
    """A command can be run from the rekall command line.

    Commands can be automatically imported into the shell's namespace and are
    expected to produce textual (or other) output.

    In order to define a new command simply extend this class.
    """

    # these attribute are not inherited.

    # The name of this command (The command will be registered under this
    # name). If empty, the command will not be imported into the namespace but
    # will still be available from the Factory below.
    __name = ""

    # Name of the category of this command. This is used when showing help and
github line / line-bot-sdk-python / linebot / models / actions.py View on Github external
:param str label: Label for the action
            Max: 20 characters
        :param str uri: URI opened when the action is performed.
        :param alt_uri: URI opened when the desktop app.
        :type alt_uri: T <= :py:class:`linebot.models.actions.AltUri`
        :param kwargs:
        """
        super(URIAction, self).__init__(**kwargs)

        self.type = 'uri'
        self.label = label
        self.uri = uri
        self.alt_uri = self.get_or_new_from_json_dict(alt_uri, AltUri)


class AltUri(with_metaclass(ABCMeta, Base)):
    """AltUri.

    https://github.com/line/line-bot-sdk-python/issues/155

    URI opened when the desktop app.
    """

    def __init__(self, desktop=None, **kwargs):
        """__init__ method.

        :param str desktop: URI opened on LINE for macOS and Windows
            when the action is performed.
            If the altUri.desktop property is set,
            the uri property is ignored on LINE for macOS and Windows.
        :param kwargs:
        """
github MatthieuDartiailh / pyclibrary / pyclibrary / c_library.py View on Github external
backend = identify_library(lib)
            backend_cls = cls.backends[backend]
            lib_path = get_library_path(lib, backend)

        # Check whether or not this library has already been opened.
        if lib_path in cls.libs:
            return cls.libs[lib_path]

        else:
            obj = super(CLibraryMeta, backend_cls).__call__(lib, *args,
                                                            **kwargs)
            cls.libs[lib_path] = obj
            return obj


class CLibrary(with_metaclass(CLibraryMeta, object)):
    """The CLibrary class is intended to automate much of the work in using
    ctypes by integrating header file definitions from CParser. This class
    serves as a proxy to a backend, adding a few features:

        - allows easy access to values defined via CParser.
        - automatic type conversions for function calls using CParser function
          signatures.
        - creates ctype classes based on type definitions from CParser.

    Initialize using a ctypes shared object and a CParser:
    >>> headers = CParser.winDefs()
    >>> lib = CLibrary(windll.User32, headers)

    There are 3 ways to access library elements:

    - lib(type, name):
github yashaka / selene / selene / driver.py View on Github external
class SharedWebDriverSource(IWebDriverSource):

    @property
    def driver(self):
        return self._webdriver

    @driver.setter
    def driver(self, value):
        self._webdriver = value

    def __init__(self):
        self._webdriver = NoneObject("SharedWebDriverSource#_webdriver")  # type: IWebDriver


class SeleneDriver(with_metaclass(DelegatingMeta, IWebDriver)):

    @property
    def __delegate__(self):
        return self._webdriver

    @property
    def _webdriver(self):
        return self._source.driver

    # todo: consider the usage: `SeleneDriver(FirefoxDriver())` over `SeleneDriver.wrap(FirefoxDriver())`
    # todo: it may be possible if __init__ accepts webdriver_or_source and IWebDriverSource implements IWebDriver...
    @classmethod
    def wrap(cls, webdriver):
        # type: (WebDriver) -> SeleneDriver
        return SeleneDriver(ExplicitWebDriverSource(webdriver))
github ELEKTRONN / ELEKTRONN2 / elektronn2 / neuromancer / node_basic.py View on Github external
"not permitted to create other nodes within"
                                   "the class definiton of a node. Use a factory"
                                   "function to create several nodes at once"
                                   %(args[0].__class__, diff))
            elif diff == 0:
                logger.debug("Initialisation of %s registered 0 instead of 1 nodes. "
                            "This might be the case, if this function is "
                            "called from a derived node that has already registered."
                                   % (args[0].__class__,))

        return new_init


###############################################################################

class Node(with_metaclass(MetaNode, object)):
    """
    Basic node class. All neural network nodes should inherit from ``Node``.

    Parameters
    ----------
    parent: Node or list[Node]
        The input node(s).
    name: str
        Given name of the ``Node``, may be an empty string.
    print_repr: bool
        Whether to print the node representation upon initialisation.


    Models are built from the interplay of *Nodes* to form a (directed,
    acyclic) computational graph.
github google / grr / grr / core / grr_response_core / lib / communicator.py View on Github external
Returns:
      None
    Raises:
      rdf_crypto.VerificationError: A signature and a key were both given but
                                    verification fails.

    """
    if self.cipher_metadata.signature and remote_public_key:
      GRR_RSA_OPERATIONS.Increment()
      remote_public_key.Verify(self.serialized_cipher,
                               self.cipher_metadata.signature)
      return True


class Communicator(with_metaclass(abc.ABCMeta, object)):
  """A class responsible for encoding and decoding comms."""
  server_name = None
  common_name = None

  def __init__(self, certificate=None, private_key=None):
    """Creates a communicator.

    Args:
       certificate: Our own certificate.
       private_key: Our own private key.
    """
    self.private_key = private_key
    self.certificate = certificate
    self._ClearServerCipherCache()

    # A cache for encrypted ciphers
github KrishnaswamyLab / graphtools / graphtools / base.py View on Github external
----------
        kernel : array-like, shape=[n_samples, n_samples]
            Kernel matrix.

        Returns
        -------
        Adjacency matrix, shape=[n_samples, n_samples]
        """

        weight = kernel.copy()
        self._diagonal = weight.diagonal().copy()
        weight = utils.set_diagonal(weight, 0)
        return weight


class DataGraph(with_metaclass(abc.ABCMeta, Data, BaseGraph)):
    """Abstract class for graphs built from a dataset

    Parameters
    ----------

    data : array-like, shape=[n_samples,n_features]
        accepted types: `numpy.ndarray`, `scipy.sparse.spmatrix`.

    n_pca : {`int`, `None`, `bool`, 'auto'}, optional (default: `None`)
        number of PC dimensions to retain for graph building.
        If n_pca in `[None,False,0]`, uses the original data.
        If `True` then estimate using a singular value threshold
        Note: if data is sparse, uses SVD instead of PCA
        TODO: should we subtract and store the mean?

    rank_threshold : `float`, 'auto', optional (default: 'auto')