How to use the scrapelib.__init__.DummyObject function in scrapelib

To help you get started, we’ve selected a few scrapelib examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github jamesturk / scrapelib / scrapelib / __init__.py View on Github external
return super(ThrottledSession, self).request(method, url, **kwargs)


# this object exists because Requests assumes it can call
# resp.raw._original_response.msg.getheaders() and we need to cope with that
class DummyObject(object):
    def getheaders(self, name):
        return ''

    def get_all(self, name, default):
        return default


_dummy = DummyObject()
_dummy._original_response = DummyObject()
_dummy._original_response.msg = DummyObject()


class FTPAdapter(requests.adapters.BaseAdapter):

    def send(self, request, stream=False, timeout=None, verify=False, cert=None, proxies=None):
        if request.method != 'GET':
            raise HTTPMethodUnavailableError("FTP requests do not support method '%s'" %
                                             request.method, request.method)
        try:
            real_resp = urllib_urlopen(request.url, timeout=timeout)
            # we're going to fake a requests.Response with this
            resp = requests.Response()
            resp.status_code = 200
            resp.url = request.url
            resp.headers = {}
            resp._content = real_resp.read()
github jamesturk / scrapelib / scrapelib / __init__.py View on Github external
if self._throttled:
            self._throttle()
        return super(ThrottledSession, self).request(method, url, **kwargs)


# this object exists because Requests assumes it can call
# resp.raw._original_response.msg.getheaders() and we need to cope with that
class DummyObject(object):
    def getheaders(self, name):
        return ''

    def get_all(self, name, default):
        return default


_dummy = DummyObject()
_dummy._original_response = DummyObject()
_dummy._original_response.msg = DummyObject()


class FTPAdapter(requests.adapters.BaseAdapter):

    def send(self, request, stream=False, timeout=None, verify=False, cert=None, proxies=None):
        if request.method != 'GET':
            raise HTTPMethodUnavailableError("FTP requests do not support method '%s'" %
                                             request.method, request.method)
        try:
            real_resp = urllib_urlopen(request.url, timeout=timeout)
            # we're going to fake a requests.Response with this
            resp = requests.Response()
            resp.status_code = 200
            resp.url = request.url
github jamesturk / scrapelib / scrapelib / __init__.py View on Github external
self._throttle()
        return super(ThrottledSession, self).request(method, url, **kwargs)


# this object exists because Requests assumes it can call
# resp.raw._original_response.msg.getheaders() and we need to cope with that
class DummyObject(object):
    def getheaders(self, name):
        return ''

    def get_all(self, name, default):
        return default


_dummy = DummyObject()
_dummy._original_response = DummyObject()
_dummy._original_response.msg = DummyObject()


class FTPAdapter(requests.adapters.BaseAdapter):

    def send(self, request, stream=False, timeout=None, verify=False, cert=None, proxies=None):
        if request.method != 'GET':
            raise HTTPMethodUnavailableError("FTP requests do not support method '%s'" %
                                             request.method, request.method)
        try:
            real_resp = urllib_urlopen(request.url, timeout=timeout)
            # we're going to fake a requests.Response with this
            resp = requests.Response()
            resp.status_code = 200
            resp.url = request.url
            resp.headers = {}