How to use the cachecontrol.caches.file_cache.FileCache function in CacheControl

To help you get started, we’ve selected a few CacheControl examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github dvershinin / lastversion / lastversion / lastversion.py View on Github external
repo_data = yaml.safe_load(fpi)
            if 'repo' in repo_data:
                if 'nginx-extras' in repo:
                    repo_data['module_of'] = 'nginx'
                name = os.path.splitext(os.path.basename(repo))[0]
                if 'module_of' in repo_data:
                    name = '{}-module-{}'.format(repo_data['module_of'], name)
                repo = repo_data['repo']
                repo_data['name'] = name

    # find the right hosting for this repo
    project_holder = HolderFactory.get_instance_for_repo(repo)

    # we are completely "offline" for 1 hour, not even making conditional requests
    # heuristic=ExpiresAfter(hours=1)   <- make configurable
    with CacheControl(project_holder, cache=FileCache(cache_dir)) as s:
        release = s.get_latest(pre_ok=pre_ok, major=major)
    s.close()

    # bail out, found nothing that looks like a release
    if not release:
        return None

    version = release['version']
    tag = release['tag_name']

    # return the release if we've reached far enough:
    if output_format == 'version':
        return version

    if output_format == 'json':
        release['version'] = str(version)
github python-poetry / poetry / poetry / repositories / legacy_repository.py View on Github external
self._inspector = Inspector()
        self._cache_dir = Path(CACHE_DIR) / "cache" / "repositories" / name
        self._cache = CacheManager(
            {
                "default": "releases",
                "serializer": "json",
                "stores": {
                    "releases": {"driver": "file", "path": str(self._cache_dir)},
                    "packages": {"driver": "dict"},
                    "matches": {"driver": "dict"},
                },
            }
        )

        self._session = CacheControl(
            requests.session(), cache=FileCache(str(self._cache_dir / "_http"))
        )

        url_parts = urlparse.urlparse(self._url)
        if not url_parts.username and self._auth:
            self._session.auth = self._auth

        if self._cert:
            self._session.verify = str(self._cert)

        if self._client_cert:
            self._session.cert = str(self._client_cert)

        self._disable_cache = disable_cache
github poppu-mtg / StackIt / StackIt / scraper.py View on Github external
import json, os, re
import requests
from . import config, globals

from cachecontrol import CacheControl
from cachecontrol.caches.file_cache import FileCache

SESSION = CacheControl(requests.Session(),
                       cache=FileCache(os.path.join(globals.CACHE_PATH, '.web_cache')))

#needed to remove the accent in 'Pokemon'
import unicodedata

from lxml import html
from StackIt.globals import Card, specmana, mtgreprints

def download_scan(name, expansion, number):
    if number is None:
        number = '0'
    expansion = expansion.lower()
    if expansion in globals.setmappings.keys():
        expansion = globals.setmappings[expansion]

    name2 = ''.join(e for e in name if e.isalnum())
    print([name2, expansion, number])
github RedHatInsights / insights-core / insights / core / remote_resource.py View on Github external
def __init__(self):

        session = requests.Session()

        if not self.__class__._cache:
            if self.backend == "RedisCache":
                pool = redis.ConnectionPool(host=self.redis_host, port=self.redis_port, db=0)
                r = redis.Redis(connection_pool=pool)
                self.__class__._cache = RedisCache(r)
            elif self.backend == "FileCache":
                self.__class__._cache = FileCache(self.file_cache_path)
            else:
                self.__class__._cache = DictCache()

        session = CacheControl(session, heuristic=DefaultHeuristic(self.expire_after), cache=self.__class__._cache)

        super(CachedRemoteResource, self).__init__(session)
github autofitcloud / git-remote-aws / gitRemoteAws / pull_ec2.py View on Github external
def t0_raw(self, ec2catalog):
    # non-cached
    # https://3.python-requests.org/
    # from requests import HTTPSession
    # http = HTTPSession()
    # r = http.request('get', ec2catalog)

    # cached https://cachecontrol.readthedocs.io/en/latest/
    # bugfix 2020-01-27 I wasn't passing FileCache before
    from cachecontrol.caches.file_cache import FileCache
    sess = requests.session()
    cached_sess = CacheControl(sess, cache=FileCache('/tmp/git-remote-aws-www.ec2instances.info'))
    r = cached_sess.request('get', ec2catalog)

    r_json = r.json()
    df_json = json.dumps(r_json, indent=4, sort_keys=True)

    # prep save
    #logger.debug("mkdir %s"%self.fn['awsCat'])
    os.makedirs(self.fn['awsCat'], exist_ok=True)

    # save raw
    fn_temp = os.path.join(self.fn['awsCat'], 't0_raw.json')
    with open(fn_temp, 'w') as fh:
        fh.write(df_json)

    return r_json, df_json