How to use the pycurl.HTTP_CODE function in pycurl

To help you get started, we’ve selected a few pycurl examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github pycurl / pycurl / tests / test_multi_vs_thread.py View on Github external
def close(self):
        self.http_code = self._curl.getinfo(pycurl.HTTP_CODE)
        self._curl.close()
github vatlab / varianttools / source / utils.py View on Github external
try:
        import pycurl
        if not quiet:
            prog = ProgressBar(message)
        dest_tmp = TEMP(dest)
        with open(dest_tmp, 'wb') as f:
            c = pycurl.Curl()
            c.setopt(pycurl.URL, str(URL))
            c.setopt(pycurl.WRITEFUNCTION, f.write)
            if not quiet:
                c.setopt(pycurl.NOPROGRESS, False)
                c.setopt(pycurl.PROGRESSFUNCTION, prog.curlUpdate)
            c.perform()
        if not quiet:
            prog.done()
        if c.getinfo(pycurl.HTTP_CODE) == 404:
            try:
                os.remove(dest_tmp)
            except OSError:
                pass
            raise RuntimeError('ERROR 404: Not Found.')
        os.rename(dest_tmp, dest)
        if os.path.isfile(dest):
            return dest
        else:
            raise RuntimeError('Failed to download {} using pycurl'.format(URL))
    except ImportError:
        # no pycurl module
        pass
    # use wget? Almost universally available under linux
    try:
        # for some strange reason, passing wget without shell=True can fail silently.
github tp4a / teleport / server / www / packages / packages-linux / x64 / tornado / curl_httpclient.py View on Github external
info = curl.info
        curl.info = None
        self._multi.remove_handle(curl)
        self._free_list.append(curl)
        buffer = info["buffer"]
        if curl_error:
            assert curl_message is not None
            error = CurlError(curl_error, curl_message)  # type: Optional[CurlError]
            assert error is not None
            code = error.code
            effective_url = None
            buffer.close()
            buffer = None
        else:
            error = None
            code = curl.getinfo(pycurl.HTTP_CODE)
            effective_url = curl.getinfo(pycurl.EFFECTIVE_URL)
            buffer.seek(0)
        # the various curl timings are documented at
        # http://curl.haxx.se/libcurl/c/curl_easy_getinfo.html
        time_info = dict(
            queue=info["curl_start_ioloop_time"] - info["queue_start_time"],
            namelookup=curl.getinfo(pycurl.NAMELOOKUP_TIME),
            connect=curl.getinfo(pycurl.CONNECT_TIME),
            appconnect=curl.getinfo(pycurl.APPCONNECT_TIME),
            pretransfer=curl.getinfo(pycurl.PRETRANSFER_TIME),
            starttransfer=curl.getinfo(pycurl.STARTTRANSFER_TIME),
            total=curl.getinfo(pycurl.TOTAL_TIME),
            redirect=curl.getinfo(pycurl.REDIRECT_TIME),
        )
        try:
            info["callback"](
github mesosphere / marathon-lb / utils.py View on Github external
def _check_status_code(self):
        if self.status_code == 0:
            self.status_code = self.curl.getinfo(pycurl.HTTP_CODE)
        if self.status_code != 0 and self.status_code != 200:
            raise Exception(str(self.status_code) + ' ' + self.url)
github dansan / spring-replay-site / srs / contrib / pyCURLTransport.py View on Github external
def request(self, host, handler, request_body, verbose=0):
        """Performs actual request"""
        buf = StringIO()
        self._curl.setopt(pycurl.URL, "%s://%s%s" % (self._proto, host, handler))
        self._curl.setopt(pycurl.POSTFIELDS, request_body)
        self._curl.setopt(pycurl.WRITEFUNCTION, buf.write)
        self._curl.setopt(pycurl.VERBOSE, verbose)
        self.verbose = verbose
        try:
            self._curl.perform()
            httpcode = self._curl.getinfo(pycurl.HTTP_CODE)
        except pycurl.error, err:
            raise xmlrpclib.ProtocolError(host + handler, err[0], err[1], None)

        self._check_return(host, handler, httpcode, buf)

        if httpcode != 200:
            raise xmlrpclib.ProtocolError(
                host + handler, httpcode, buf.getvalue(), None
            )

        buf.seek(0)
        return self.parse_response(buf)
github StackStorm-Exchange / stackstorm-icinga2 / sensors / lib / client.py View on Github external
except Exception:
                # Network error, use linear back off up to 16 seconds
                if self.keep_trying == 0:
                    continue
                if self._sensor is not None:
                    self._sensor.logger.info('Network error: %s', self.conn.errstr())
                    self._sensor.logger.info('Waiting %s seconds before trying again',
                                             backoff_network_error)
                else:
                    print('Network error: %s' % self.conn.errstr())
                    print('Waiting %s seconds before trying again' % backoff_network_error)
                time.sleep(backoff_network_error)
                backoff_network_error = min(backoff_network_error + 1, 16)
                continue
            # HTTP Error
            sc = self.conn.getinfo(pycurl.HTTP_CODE)
            if sc == 420:
                # Rate limit, use exponential back off starting with 1 minute, and doubling
                if self._sensor is not None:
                    self._sensor.logger.info('Rate limit, waiting %s seconds', backoff_rate_limit)
                else:
                    print('Rate limit, waiting %s seconds' % backoff_rate_limit)
                time.sleep(backoff_rate_limit)
                backoff_rate_limit *= 2
            elif sc == 401:
                # Authentication error
                if self._sensor is not None:
                    self._sensor.logger.info(
                        'Authentication error, check user/password, waiting %s seconds',
                        backoff_rate_limit)
                else:
                    print('Authentication error, waiting %s seconds' % backoff_rate_limit)
github uyuni-project / uyuni / client / tools / mgr-virtualization / actions / image.py View on Github external
server = proxy_settings["proxyURL"]
        c.setopt(pycurl.PROXY, server )
        if "proxyUser" in proxy_settings and proxy_settings["proxyUser"] is not None and proxy_settings["proxyUser"] != "":
            user     = proxy_settings["proxyUser"]
            password = base64.b64decode(proxy_settings["proxyPass"])
            c.setopt(pycurl.PROXYUSERPWD, "%s:%s" % (user, password))
    # default IMAGE_BASE_PATH is /var/lib/libvirt/images
    file_path = "/%s/%s" % (IMAGE_BASE_PATH, target_filename)
    f = open(file_path, 'w')
    c.setopt(pycurl.FOLLOWLOCATION, 1)
    c.setopt(pycurl.WRITEFUNCTION, f.write)
    c.setopt(pycurl.SSL_VERIFYPEER, 0)
    c.perform()
    log.log_debug("curl got HTTP code: %s" % c.getinfo(pycurl.HTTP_CODE))
    f.close()
    return c.getinfo(pycurl.HTTP_CODE)
github voc / scripts / slides / get_attachments.py View on Github external
    @type curl: pycurl.Curl
    '''
    
    #url = "https://frab.cccv.de/en/17c3/public/schedule.xml"
    url = SCHEDULE_URL % (LOGIN_HOST, conference_acronym)

    
    print"download %s schedule" % (conference_acronym)
    buf = setupCurl(curl, url)
    curl.setopt(pycurl.TIMEOUT, 6000)
    print "GET %s" % (url)
    curl.perform()
    if curl.getinfo(pycurl.HTTP_CODE) != 200:
        print buf.getvalue()
    assert curl.getinfo(pycurl.HTTP_CODE) == 200, "failed to download schedule"
    
    #print "store schedule to disk"
    dumpfile = open("data/schedule_" + conference_acronym + ".xml", "w")   
    dumpfile.write(buf.getvalue())
    dumpfile.close()
    buf.reset()
    
    return buf

    

    print("done")
github migasfree / migasfree-client / migasfree_client / curl.py View on Github external
'Accept-Language: %s' % self.accept_lang,
            'User-Agent: %s' % self.user_agent,
            'Expect:',
        ])
        self.curl.setopt(pycurl.URL, self.url)

        if self.proxy:
            self.curl.setopt(pycurl.PROXY, self.proxy)

        if self.post:
            self.curl.setopt(pycurl.POST, 1)
            self.curl.setopt(pycurl.HTTPPOST, self.post)

        try:
            self.curl.perform()
            self.http_code = self.curl.getinfo(pycurl.HTTP_CODE)
            self.error = None
        except pycurl.error as e:
            self.error = self.curl.errstr()
            self.errno = e.args[0]
        finally:
            self.curl.close()
github teal33t / poopak / application / crawler / curl.py View on Github external
resp = None
    while try_count < max_try_count :
        try:
            query = pycurl.Curl()
            query.setopt(pycurl.URL, url)
            query.setopt(pycurl.CONNECTTIMEOUT, CONNECTION_TIMEOUT)
            query.setopt(pycurl.TIMEOUT, REQUEST_TIMEOUT)
            query.setopt(pycurl.FOLLOWLOCATION, FOLLOWLOCATION)
            query.setopt(pycurl.HTTPHEADER, get_headers())
            query.setopt(pycurl.PROXY, tor_pool_url)
            query.setopt(pycurl.PROXYPORT, tor_pool_port)
            query.setopt(pycurl.PROXYTYPE, pycurl.PROXYTYPE_SOCKS5_HOSTNAME)
            query.setopt(pycurl.WRITEFUNCTION, output.write)
            query.perform()

            http_code = query.getinfo(pycurl.HTTP_CODE)
            response = output.getvalue()
            html = response.decode('utf8')

            if http_code in http_codes:
                if http_code == 200:
                    resp = {"url": url,
                            "html": html,
                            "status": http_code,
                            "seen_time": seen_time}
                    try_count = 9999
                else:
                    resp = {"url": url,
                            "status": http_code,
                            "seen_time": seen_time}
                    try_count = 9999