Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
async def test_map_vm_structure(self):
'''
Test if mapped structure is correct
'''
mock_list_flavors = patch('vmshepherd.iaas.OpenStackDriver._list_flavors').start()
mock_list_flavors.return_value = futurized([])
mock_list_images = patch('vmshepherd.iaas.OpenStackDriver._list_images').start()
mock_list_images.return_value = futurized([])
osd = OpenStackDriver(self.config)
osd.flavors_map = bidict()
osd.flavors_map['testflavorid'] = 'testflavorname'
osd.images_map = bidict()
osd.images_map['testimgid'] = 'testimgname'
result = osd._map_vm_structure(self.vm)
self.assertEqual(result.id, '099fds8f9ds89fdsf')
self.assertEqual(result.name, 'test-vm-name')
self.assertEqual(result.metadata, {})
self.assertEqual(str(result.state), 'VmState.RUNNING')
self.assertEqual(result.ip, ['10.185.138.36'])
self.assertEqual(result.flavor, 'testflavorname')
self.assertEqual(result.image, 'testimgname')
def unify(initial_exprs: List[ir0.Expr],
local_var_definitions: Mapping[str, ir0.Expr],
patterns: List[ir0.Expr],
expr_variables: Set[str],
pattern_variables: Set[str],
identifier_generator: Iterable[str],
verbose: bool) -> UnificationResult:
# We need to replace local literals before doing the unification, to avoid assuming that e.g. T in an expr
# is equal to T in a pattern just because they have the same name.
lhs_type_literal_names = set(local_var_definitions.keys())
for expr in itertools.chain(initial_exprs, local_var_definitions.values()):
for expr_literal in expr.get_free_vars():
lhs_type_literal_names.add(expr_literal.cpp_type)
unique_var_name_by_expr_type_literal_name = bidict({lhs_type_literal_name: next(identifier_generator)
for lhs_type_literal_name in lhs_type_literal_names})
unique_var_name_by_pattern_type_literal_name = bidict({pattern_literal.cpp_type: next(identifier_generator)
for pattern in patterns
for pattern_literal in pattern.get_free_vars()})
unique_var_names = set()
for expr_var_name, unique_var_name in unique_var_name_by_expr_type_literal_name.items():
if expr_var_name in expr_variables or expr_var_name in local_var_definitions:
unique_var_names.add(unique_var_name)
for pattern_var_name, unique_var_name in unique_var_name_by_pattern_type_literal_name.items():
if pattern_var_name in pattern_variables:
unique_var_names.add(unique_var_name)
literal_expr_by_unique_name: Dict[str, ir0.AtomicTypeLiteral] = dict()
expr_variables: Set[str],
pattern_variables: Set[str],
identifier_generator: Iterator[str],
verbose: bool) -> UnificationResult:
# We need to replace local literals before doing the unification, to avoid assuming that e.g. T in an expr
# is equal to T in a pattern just because they have the same name.
lhs_type_literal_names = set(local_var_definitions.keys())
for expr in itertools.chain(initial_exprs, local_var_definitions.values()):
for expr_literal in expr.free_vars:
lhs_type_literal_names.add(expr_literal.cpp_type)
unique_var_name_by_expr_type_literal_name = bidict({lhs_type_literal_name: next(identifier_generator)
for lhs_type_literal_name in lhs_type_literal_names})
unique_var_name_by_pattern_type_literal_name = bidict({pattern_literal.cpp_type: next(identifier_generator)
for pattern in patterns
for pattern_literal in pattern.free_vars})
unique_var_names = set()
for expr_var_name, unique_var_name in unique_var_name_by_expr_type_literal_name.items():
if expr_var_name in expr_variables or expr_var_name in local_var_definitions:
unique_var_names.add(unique_var_name)
for pattern_var_name, unique_var_name in unique_var_name_by_pattern_type_literal_name.items():
if pattern_var_name in pattern_variables:
unique_var_names.add(unique_var_name)
literal_expr_by_unique_name: Dict[str, ir.AtomicTypeLiteral] = dict()
lhs = tuple(_replace_var_names_in_expr(expr, unique_var_name_by_expr_type_literal_name)
for expr in initial_exprs)
rhs = tuple(_replace_var_names_in_expr(pattern, unique_var_name_by_pattern_type_literal_name)
def factor(sequences):
layer = next(iter(sequences)).layer
if layer == 0:
return list(sequences)
if len(sequences) == 1:
return list(sequences)
# holds the attributes/substances/modes as individual sets in primitives[0]/primitives[1]/primitives[2] respectively
primitives = (set(seme) for seme in zip(*sequences))
# same but now there is a bijection between the coordinate system and the primitives semes
primitives = [bidict({i: s for i, s in enumerate(p_set)}) for p_set in primitives]
# hold the mapping coordinate -> parser
scripts = {tuple(primitives[i].inv[seme] for i, seme in enumerate(s)):s for s in sequences}
# hold the primitive as coodinate described in scripts keys
shape = tuple(len(p) for p in primitives)
topology = np.full(shape, False, dtype=bool)
for s in scripts:
topology[s[0]][s[1]][s[2]] = True
# calculate the relations, ie for a seq, the others seq that can be factorized with it
relations = {}
_computed = set()
for seq in scripts:
if not topology[seq[0]][seq[1]][seq[2]]:
continue
1: 'Nor\'lander',
2: 'Barrean',
3: 'Emayu',
4: 'Therish',
5: 'Kessian'
})
axiomtable = bidict({
1: 'Atheistic',
2: 'Druidic',
3: 'Virtuous',
4: 'Nefarious',
5: 'Agnostic'
})
classtable = bidict({
1: 'Fighter',
2: 'Rogue',
3: 'Magick User',
4: 'Healer',
5: 'Ranger'
})
picidtable = bidict({
1: 'Male #1',
2: 'Male #2',
3: 'Male #3',
4: 'Male #4',
5: 'Male #5',
6: 'Male #6',
7: 'Female #1',
8: 'Female #2',
traptable = bidict({
0: 'none',
1: 'Steam Bath',
2: 'The Hobbler',
3: 'Barbed Darts',
4: 'Bixby\'s Noxious Cloud',
5: 'Festering Stew',
6: 'Thieves\' Surprise',
7: 'Wicked Sunrise',
8: 'Yara\'s Vengeance',
9: 'Dragonbite',
10: 'Sublime Armageddon'
})
containertable = bidict({
0: 'none',
1: 'closed',
2: 'open',
3: 'broken',
4: 'toggle 1',
5: 'toggle 2'
})
tilecontenttypetable = bidict({
0: '(none)',
1: 'Container (no open/close change - barrels, etc)',
2: 'Container (chests, dressers, etc)',
3: '(broken container type, don\'t use)',
4: 'Container (bag)',
5: 'Door',
6: 'Map Link',
counters : ``Dict[str, Dict[str, int]]``
Element statistics for datasets.
min_count : ``Dict[str, int]``, optional (default= ``dict()`` )
Defines the minimum number of occurrences when some counter are
converted to vocabulary.
no_pad_namespace : ``Set[str]``, optional (default= ``set()`` )
Defines which vocabularies do not have `pad` token.
no_unk_namespace : ``Set[str]``, optional (default= ``set()`` )
Defines which vocabularies do not have `oov` token.
"""
self.no_unk_namespace.update(no_unk_namespace)
self.no_pad_namespace.update(no_pad_namespace)
self.min_count.update(min_count)
for vocab_name, counter in counters.items():
self.vocab[vocab_name] = bidict()
cnt = 0
# Handle unknown token
if vocab_name not in no_unk_namespace:
self.vocab[vocab_name][self._UNK_token] = cnt
cnt += 1
# Handle padding token
if vocab_name not in no_pad_namespace:
self.vocab[vocab_name][self._PAD_token] = cnt
cnt += 1
# Build Vocabulary from Dataset Counter
minn = (min_count[vocab_name]
if min_count and vocab_name in min_count else 0)
for key, value in counter.items():
if value >= minn:
def _interfaces(self):
return bidict.bidict({i["index"]: dict(i["attrs"]).get("IFLA_IFNAME") for i in ip.get_links()})
def __init__(self, cmap_data, parse=True):
"""Create a new CMap object from the data (typically the contents)
of a CMap file or a CMap stream object."""
if isinstance(cmap_data, str):
cmap_data = cmap_data.decode()
self._data = [clean_token(t)
for t in PdfParser().iterparse(cmap_data, True)]
self._maps = bidict()
self._basemap = None
self._coderanges = defaultlist(set)
self._sys_info = []
self._unicodemap = None
if parse:
self._results = self.parse()
input()
def __init__(self):
self.mapping_dict = bidict() # key: str -> id: int
self.keys = []