Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
from typing import Dict
import unittest
from overrides import overrides,final,EnforceOverrides
class Enforcing(EnforceOverrides):
classVariableIsOk = "OK?"
@final
def finality(self):
return "final"
def nonfinal1(self, param: int) -> str:
return "super1"
def nonfinal2(self):
return "super2"
@property
def nonfinal_property(self):
return "super_property"
@final
def finality(self):
return "final"
from gi.repository.GLib import MainLoop
from ..module import get_introspection_module
from ..overrides import override
i3ipc = get_introspection_module('i3ipc')
__all__ = []
class Connection(i3ipc.Connection):
def main(self):
main_loop = MainLoop()
self.connect('ipc_shutdown', lambda self: main_loop.quit())
main_loop.run()
Connection = override(Connection)
__all__.append('Connection')
class Con(i3ipc.Con):
def __getattr__(self, name):
if name == 'nodes':
return self.get_nodes()
try:
return self.get_property(name)
except TypeError:
raise AttributeError
Con = override(Con)
__all__.append('Con')
@overrides
def compute_mask(self, inputs, mask=None):
# pylint: disable=unused-argument
if mask is None:
return None
return self.__collapse_tensor(mask)
@overrides
def tokens_to_indices(
self,
tokens: List[str],
vocab: Vocabulary) -> Dict[str, List[int]]:
"""
Takes a list of tokens and converts them to one or more sets of indices.
During the indexing process, each item corresponds to an index in the
vocabulary.
Parameters
----------
vocab : ``Vocabulary``
``vocab`` is used to get the index of each item.
Returns
-------
@overrides
def backward(self, input: torch.Tensor) -> Tuple[torch.Tensor, torch.Tensor]:
"""
Args:
input: Tensor
input tensor [batch, in_channels, H, W]
Returns: out: Tensor , logdet: Tensor
out: [batch, in_channels, H, W], the output of the flow
logdet: [batch], the log determinant of :math:`\partial output / \partial input`
"""
batch, channels, H, W = input.size()
out = input - self.bias
out = out.div(self.log_scale.exp() + 1e-8)
logdet = self.log_scale.sum(dim=0).squeeze(1).mul(H * -W)
@overrides
def compute_mask(self, inputs, mask=None):
return None
@overrides
def split_sentences(self, text: str) -> List[str]:
return [sent.string.strip() for sent in self.spacy(text).sents]
@overrides
def _instance_type(self):
return TextClassificationInstance
@overrides
def _compute_action_probabilities(
self,
state: GrammarBasedState,
hidden_state: torch.Tensor,
attention_weights: torch.Tensor,
predicted_action_embeddings: torch.Tensor,
) -> Dict[int, List[Tuple[int, Any, Any, Any, List[int]]]]:
# In this section we take our predicted action embedding and compare it to the available
# actions in our current state (which might be different for each group element). For
# computing action scores, we'll forget about doing batched / grouped computation, as it
# adds too much complexity and doesn't speed things up, anyway, with the operations we're
# doing here. This means we don't need any action masks, as we'll only get the right
# lengths for what we're computing.
group_size = len(state.batch_indices)
actions = state.get_valid_actions()