How to use the treq.post function in treq

To help you get started, we’ve selected a few treq examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github ClusterHQ / flocker / flocker / acceptance / testtools.py View on Github external
def make_post(host, port, data):
        request = post(
            "http://{host}:{port}".format(host=host, port=port),
            data=data,
            persistent=False
        )

        def failed(failure):
            Message.new(message_type=u"acceptance:http_query_failed",
                        reason=unicode(failure)).write()
            return False
        request.addCallbacks(content, failed)
        return request
    d = verify_socket(host, port)
github mozilla-services / autopush-loadtester / aplt / runner.py View on Github external
parsed = urlparse.urlparse(url)
                claims["aud"] = "{scheme}://{netloc}".format(
                    scheme=parsed.scheme,
                    netloc=parsed.netloc
                )
                log.msg("Setting VAPID 'aud' to {}".format(claims["aud"]))
            headers.update(self._vapid.sign(claims, self._crypto_key))
        if data:
            headers.update({
                "Content-Type": "application/octet-stream",
                "Content-Encoding": "aesgcm",
                "Crypto-key": crypto_key,
                "Encryption": self._encryption,
            })

        d = treq.post(url,
                      data=data,
                      headers=headers,
                      allow_redirects=False,
                      agent=self._agent)
        d.addCallback(self._sent_notification, processor)
        d.addErrback(self._error_notif, processor)
github scalingexcellence / scrapybook / ch09 / properties / properties / pipelines / es.py View on Github external
def process_item(self, item, spider):
        """
        Pipeline's main method. Uses inlineCallbacks to do
        asynchronous REST requests
        """
        try:
            # Create a json representation of this item
            data = json.dumps(dict(item), ensure_ascii=False).encode("utf-8")
            yield treq.post(self.es_url, data, timeout=5)
        finally:
            # In any case, return the dict for the next stage
            defer.returnValue(item)
github CPChain / pdash / cpchain / wallet / net.py View on Github external
def upload_file_info(self, hashcode, path, size, product_id, remote_type, remote_uri, name, encrypted_key):
        # fixme: another argument aes_key should be passed and encrypted
        header = {"MARKET-KEY": self.public_key, "MARKET-TOKEN": self.token,
                  'Content-Type': 'application/json'}
        data = {"public_key": self.public_key, "hashcode": hashcode, "path": path, "size": size,
                "client_id": product_id,
                "remote_type": remote_type, "remote_uri": remote_uri, "is_published": "False",
                "aes_key": encrypted_key, "market_hash": "hash", "name": name}
        url = self.url + 'user_data/v1/uploaded_file/add/'
        logger.debug('upload file info url: %s', url)
        logger.debug('upload file info header: %s', header)
        logger.debug('upload file info payload: %s', data)
        resp = yield treq.post(url, headers=header, json=data, persistent=False)
        confirm_info = yield treq.json_content(resp)
        logger.debug('upload file info response: %s', confirm_info)
        return confirm_info['status']
github vulcand / vulcan / vulcan / smtpserver.py View on Github external
def toHTTPRequest(self, mime_message):
        return treq.post(
            ("http://" +
             pick_server(get_servers(self.delivery.settings["upstream"])) +
             self.delivery.settings['http']['path']),
            data={"to": ",".join(recipients)},
            files={"message": FileStorage(stream=StringIO(mime_message),
                                          name="mime")})
github scalingexcellence / scrapybook / ch11 / properties / properties / middlewares.py View on Github external
logger.info("Posting batch %d with %d URLs to %s",
                    self._batch, len(self._urls), target)

        data = [
            ("project", self._project),
            ("spider", spider.name),
            ("setting", "FEED_URI=%s" % self._feed_uri),
            ("batch", str(self._batch)),
        ]

        debug_data = "target (%d): %s" % (len(self._urls), data)

        json_urls = json.dumps(self._urls)
        data.append(("setting", "DISTRIBUTED_START_URLS=%s" % json_urls))

        d = treq.post("http://%s/schedule.json" % target,
                      data=data, timeout=5, persistent=False)

        d.addBoth(lambda resp: (debug_data, resp))

        self._scrapyd_submits_to_wait.append(d)

        self._urls = []
        self._batch += 1
github jsdir / stretch / agent / agent / supervisors.py View on Github external
key = result['key'].lstrip(config_key)
                if key != 'lb' and key not in self.blocked_instances:
                    if result.get('newKey'):
                        # add endpoint
                        endpoint = json.loads(result['value'])
                        self.add_endpoint(group_id, endpoint)
                    elif result['action'] == 'DELETE':
                        # remove endpoint
                        endpoint = json.loads(result['prevValue'])
                        self.remove_endpoint(group_id, endpoint)

                self.watch(group_id, config_key, result['index'])

        url = 'http://127.0.0.1:4001/v1/watch%s' % config_key
        if index:
            deferred = post(url, data={'index': index})
        else:
            deferred = get(url)

        deferred.addCallback(handle_response)
        return True
github scalingexcellence / scrapybook-2nd-edition / ch15 / properties / properties / middlewares.py View on Github external
logger.info("Posting batch %d with %d URLs to %s",
                    self._batch, len(self._urls), target)

        data = [
            ("project", self._project),
            ("spider", spider.name),
            ("setting", "FEED_URI=%s" % self._feed_uri),
            ("batch", str(self._batch)),
        ]

        debug_data = "target (%d): %s" % (len(self._urls), data)

        json_urls = json.dumps(self._urls)
        data.append(("setting", "DISTRIBUTED_START_URLS=%s" % json_urls))

        d = treq.post("http://%s/schedule.json" % target,
                      data=data, timeout=5, persistent=False)

        d.addBoth(lambda resp: (debug_data, resp))

        self._scrapyd_submits_to_wait.append(d)

        self._urls = []
        self._batch += 1
github glyph / lancer / lancer / _cloudflare.py View on Github external
records = data['result']
        dns_record = {
            "type": "TXT",
            "ttl": 120,
            "name": full_name,
            "content": validation
        }
        if records:
            put_to = str(records_base.child(records[0]["id"]))
            response = yield treq.put(
                put_to, json=dns_record,
                headers=self._headers()
            )
        else:
            post_to = str(records_base)
            response = yield treq.post(post_to, json=dns_record, headers=self._headers())
        yield response.json()
        yield ConsistencyChecker.default(self._reactor).check(full_name, validation)