How to use the cachetools.TTLCache function in cachetools

To help you get started, we’ve selected a few cachetools examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github nocproject / noc / ip / models / prefixprofile.py View on Github external
# Include/Exclude broadcast & network addresses from prefix
    prefix_special_address_policy = StringField(
        choices=[("I", "Include"), ("X", "Exclude")], default="X"
    )
    #
    tags = ListField(StringField())
    # Integration with external NRI and TT systems
    # Reference to remote system object has been imported from
    remote_system = PlainReferenceField(RemoteSystem)
    # Object id in remote system
    remote_id = StringField()
    # Object id in BI
    bi_id = LongField(unique=True)

    _id_cache = cachetools.TTLCache(maxsize=100, ttl=60)
    _bi_id_cache = cachetools.TTLCache(maxsize=100, ttl=60)

    def __str__(self):
        return self.name

    @classmethod
    @cachetools.cachedmethod(operator.attrgetter("_id_cache"), lock=lambda _: id_lock)
    def get_by_id(cls, id):
        return PrefixProfile.objects.filter(id=id).first()

    @classmethod
    @cachetools.cachedmethod(operator.attrgetter("_bi_id_cache"), lock=lambda _: id_lock)
    def get_by_bi_id(cls, id):
        return PrefixProfile.objects.filter(bi_id=id).first()
github fake-name / ReadableWebProxy / WebMirror / UrlUpserter.py View on Github external
def __init__(self, msg_queue, db_interface):
		self.response_queue = msg_queue
		self.log = logging.getLogger("Main.LinkAggregator")

		try:
			signal.signal(signal.SIGINT, signal.SIG_IGN)
		except ValueError:
			self.log.warning("Cannot configure job fetcher task to ignore SIGINT. May be an issue.")

		# LRU Cache with a maxsize of 1 million, and a TTL of 6 hours
		self.seen = cachetools.TTLCache(maxsize=1000 * 1000, ttl=60 * 60 * 6)

		self.queue_items = 0
		self.link_count = 0
		self.amqpUpdateCount = 0
		self.deathCounter = 0

		self.batched_links = []
		self.pending_upserts = []

		self.db_int = db_interface
		self.executor = concurrent.futures.ThreadPoolExecutor(max_workers=6)

		self.check_init_func()
github graphite-project / carbon / lib / carbon / aggregator / rules.py View on Github external
def get_cache():
  ttl = settings.CACHE_METRIC_NAMES_TTL
  size = settings.CACHE_METRIC_NAMES_MAX
  if ttl > 0 and size > 0:
    return TTLCache(size, ttl)
  elif size > 0:
    return LRUCache(size)
  else:
    return dict()
github nocproject / noc / main / models / handler.py View on Github external
@six.python_2_unicode_compatible
class Handler(Document):
    meta = {"collection": "handlers", "strict": False, "auto_create_index": False}

    handler = StringField(primary_key=True)
    name = StringField()
    description = StringField()
    allow_config_filter = BooleanField()
    allow_config_validation = BooleanField()
    allow_config_diff = BooleanField()
    allow_config_diff_filter = BooleanField()
    allow_housekeeping = BooleanField()
    allow_resolver = BooleanField()
    allow_threshold = BooleanField()

    _id_cache = cachetools.TTLCache(maxsize=1000, ttl=60)

    def __str__(self):
        return self.name

    @classmethod
    @cachetools.cachedmethod(operator.attrgetter("_id_cache"), lock=lambda _: id_lock)
    def get_by_id(cls, id):
        return Handler.objects.filter(handler=id).first()

    def get_handler(self):
        return get_handler(self.handler)
github Ekultek / letmein / lib / settings.py View on Github external
)

import encryption.aes_encryption
from lib.output import (
    info,
    warning,
    fatal,
    prompt
)

try:
    xrange
except:
    xrange = range

LETMEIN_CACHE = TTLCache(maxsize=2, ttl=10)
LOCK = RLock()
HOME = os.path.expanduser("~")
MAIN_DIR = "{}/.letmein".format(HOME)
DATABASE_FILE = "{}/letmein.db".format(MAIN_DIR)
VERSION = "0.0.1.11({})"
VERSION_STRING = "\033[31m\033[1m*beta\033[0m" if VERSION.count(".") == 3 else "\033[1m\033[36m~alpha\033[0m" if VERSION.count(".") == 2 else "\033[1m\033[32m+stable\033[0m"
INIT_FILE = "{}/.init".format(MAIN_DIR)
BANNER = """\n\033[32m
   __      _                _____\033[0m\033[32m      
  / /  ___| |_  /\/\   ___  \_   \ \033[0m
 / /  / _ \ __|/    \ / _ \  / /\/ '_ \ \033[0m\033[32m 
/ /__|  __/ |_/ /\/\ \  __/\/ /_ | | | | \033[0m
\____/\___|\__\/    \/\___\____/ |_| |_| \033[0m\033[32m[]\033[0m[]\033[0m\033[32m[]\033[0m[]
Version: v{}\033[0m
\n""".format(VERSION.format(VERSION_STRING))
github gramener / gramex / gramex / apps / admin2 / gramexadmin.py View on Github external
import gramex
import gramex.handlers
import json
import os
import re
import sys
from binascii import b2a_base64
from cachetools import TTLCache
from gramex.config import app_log
from gramex.http import INTERNAL_SERVER_ERROR
from six.moves import StringIO
from tornado.gen import coroutine, Return
from tornado.web import HTTPError


contexts = TTLCache(maxsize=100, ttl=1800)
# A global mapping of cid: to filenames
cidmap = TTLCache(maxsize=100, ttl=1800)


def get_auth_conf(kwargs):
    '''
    Expects kwargs.authhandler to point to an AuthHandler key in gramex config.
    The AuthHandler must have a lookup.
    Returns the authhandler, its configuration, and the FormHandler data configuration.
    Used in AdminFormHandler *and* in index.html. So keep it as a separate function.
    '''
    if 'authhandler' not in kwargs:
        raise ValueError('Missing authhandler')
    authhandler = kwargs['authhandler']
    # The authhandler key may be prefixed with a namespace. Find the *first* matching key
    for key, auth_conf in gramex.conf.get('url', {}).items():
github nocproject / noc / pm / models / metrictype.py View on Github external
]
    )
    # Text description
    description = StringField(required=False)
    # Measure name, like 'kbit/s'
    # Compatible to Grafana
    measure = StringField()
    # Optional required capability
    required_capability = PlainReferenceField(Capability)
    # Object id in BI, used for counter context hashing
    bi_id = LongField(unique=True)
    #
    category = ObjectIdField()

    _id_cache = cachetools.TTLCache(maxsize=100, ttl=60)
    _name_cache = cachetools.TTLCache(maxsize=100, ttl=60)
    _bi_id_cache = cachetools.TTLCache(maxsize=100, ttl=60)

    def __str__(self):
        return self.name

    @property
    def json_data(self):
        r = {
            "name": self.name,
            "$collection": self._meta["json_collection"],
            "uuid": self.uuid,
            "scope__name": self.scope.name,
            "field_name": self.field_name,
            "field_type": self.field_type,
            "description": self.description,
            "measure": self.measure,
github avrae / avrae / cogs5e / models / character.py View on Github external
from cogs5e.models.sheet.spellcasting import Spellbook, SpellbookSpell
from cogs5e.models.sheet.statblock import DESERIALIZE_MAP as _DESER, StatBlock
from cogs5e.sheets.abc import SHEET_VERSION
from utils.functions import search_and_select

log = logging.getLogger(__name__)


# constants at bottom (yay execution order)

class Character(StatBlock):
    # cache characters for 10 seconds to avoid race conditions
    # this makes sure that multiple calls to Character.from_ctx() in the same invocation or two simultaneous ones
    # retrieve/modify the same Character state
    # caches based on (owner, upstream)
    _cache = cachetools.TTLCache(maxsize=50, ttl=5)

    def __init__(self, owner: str, upstream: str, active: bool, sheet_type: str, import_version: int,
                 name: str, description: str, image: str, stats: BaseStats, levels: Levels, attacks: AttackList,
                 skills: Skills, resistances: Resistances, saves: Saves, ac: int, max_hp: int, hp: int, temp_hp: int,
                 cvars: dict, options: dict, overrides: dict, consumables: list, death_saves: dict,
                 spellbook: Spellbook,
                 live, race: str, background: str, **kwargs):
        if kwargs:
            log.warning(f"Unused kwargs: {kwargs}")
        # sheet metadata
        self._owner = owner
        self._upstream = upstream
        self._active = active
        self._sheet_type = sheet_type
        self._import_version = import_version
github DurianStallSingapore / Zilliqa-Mining-Proxy / zilpool / database / basemodel.py View on Github external
@cached(cache=TTLCache(maxsize=64, ttl=1))
def get_cur_settings():
    from .ziladmin import SiteSettings
    return SiteSettings.get_setting()
github nocproject / noc / services / discovery / jobs / periodic / interfacestatus.py View on Github external
## NOC modules
from noc.services.discovery.jobs.base import DiscoveryCheck
from noc.inv.models.interface import Interface
from noc.inv.models.interfaceprofile import InterfaceProfile

ips_lock = threading.RLock()


class InterfaceStatusCheck(DiscoveryCheck):
    """
    Interface status discovery
    """
    name = "interfacestatus"
    required_script = "get_interface_status_ex"

    _ips_cache = cachetools.TTLCache(maxsize=10, ttl=60)

    @classmethod
    @cachetools.cachedmethod(operator.attrgetter("_ips_cache"), lock=lambda _: ips_lock)
    def get_profiles(cls, x):
        return list(InterfaceProfile.objects.filter(status_discovery=True))

    def handler(self):
        def get_interface(name):
            if_name = interfaces.get(name)
            if if_name:
                return if_name
            for iname in self.object.profile.get_interface_names(i["interface"]):
                if_name = interfaces.get(iname)
                if if_name:
                    return if_name
            return None