Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
def test_bucketise_bucketises_by_putting_things_in_a_bucketing_bucket():
assert bucketise(iter([1, 1, 0, 1]), bool) == {True: [1, 1, 1], False: [0]}
def from_lua_table(cls, lua_table: Dict[Any, Any]) -> WeakAuras:
auras = (
cls.Meta.model.parse_obj(a) for a in lua_table['displays'].values() if a.get('url')
)
return cls(entries=bucketise(auras, key=lambda a: a.url.parts[1]))
async def match_toc_ids(manager: Manager, leftovers: FrozenSet[AddonFolder]) -> MatchGroups:
"Attempt to match add-ons from TOC-file source ID entries."
def bucket_keyer(value: AddonFolder):
return next(d for d in defns if value.defns_from_toc & d)
def sort_keyer(value: Defn):
return _sources_to_sort_weights[value.source]
matches = [a for a in sorted(leftovers) if a.defns_from_toc]
defns = list(merge_intersecting_sets(a.defns_from_toc for a in matches))
return [(f, sorted(b, key=sort_keyer)) for b, f in bucketise(matches, bucket_keyer).items()]
async def match_toc_names(manager: Manager, leftovers: FrozenSet[AddonFolder]) -> MatchGroups:
"Attempt to match add-ons from TOC-file name entries."
def normalise(value: str):
return re.sub(r'[^0-9A-Za-z]', '', value.casefold())
await manager.synchronise()
norm_to_items = bucketise(manager.catalogue.__root__, key=lambda i: normalise(i.name))
matches = ((l, norm_to_items.get(normalise(l.name))) for l in sorted(leftovers))
return [([l], uniq(Defn(i.source, i.id) for i in m)) for l, m in matches if m]
async def search(self, search_terms: str, limit: int) -> Dict[Defn, Pkg]:
"Search the master catalogue for packages by name."
import heapq
import string
from jellyfish import jaro_winkler
trans_table = str.maketrans(dict.fromkeys(string.punctuation, ' '))
def normalise(value: str):
return value.casefold().translate(trans_table).strip()
await self.synchronise()
s = normalise(search_terms)
tokens_to_defns = bucketise(
(
(normalise(i.name), (i.source, i.id))
for i in self.catalogue.__root__
if self.config.game_flavour in i.compatibility
),
key=lambda v: v[0],
)
# TODO: weigh matches under threshold against download count
matches = heapq.nlargest(
limit, ((jaro_winkler(s, n), n) for n in tokens_to_defns.keys()), key=lambda v: v[0]
)
defns = [Defn(*d) for _, m in matches for _, d in tokens_to_defns[m]]
results = await self.resolve(defns)
pkgs_by_defn = {d.with_name(r.slug): r for d, r in results.items() if is_pkg(r)}
return pkgs_by_defn
async def resolve(self, defns: Sequence[Defn], with_deps: bool = False) -> Dict[Defn, Any]:
"Resolve definitions into packages."
if not defns:
return {}
await self.synchronise()
defns_by_source = bucketise(defns, key=lambda v: v.source)
results = await gather(self.resolvers[s].resolve(b) for s, b in defns_by_source.items())
results_by_defn = dict_chain(defns, None, *(r.items() for r in results))
if with_deps:
results_by_defn.update(await self._resolve_deps(results_by_defn.values()))
return results_by_defn