Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
self._record_activity()
if self.request.headers.get("Upgrade", "").lower() == 'websocket':
# We wanna websocket!
# jupyterhub/jupyter-server-proxy@36b3214
self.log.info("we wanna websocket, but we don't define WebSocketProxyHandler")
self.set_status(500)
body = self.request.body
if not body:
if self.request.method == 'POST':
body = b''
else:
body = None
client = httpclient.AsyncHTTPClient()
req = self._build_proxy_request(host, port, proxied_path, body)
response = await client.fetch(req, raise_error=False)
# record activity at start and end of requests
self._record_activity()
# For all non http errors...
if response.error and type(response.error) is not httpclient.HTTPError:
self.set_status(500)
self.write(str(response.error))
else:
self.set_status(response.code, response.reason)
# clear tornado default header
self._headers = httputil.HTTPHeaders()
:param fetch_url: URL to fetch
:param default_value: value to return in case of failure
:return:
'''
# assign empty dict for optional param
if default_value is None:
default_value = dict()
Log.debug("fetching url %s", fetch_url)
ret = default_value
# time the duration of the fetch
start = time.time()
# fetch the URL asynchronously
http_response = yield tornado.httpclient.AsyncHTTPClient().fetch(fetch_url)
# handle http errors, and return if any
if http_response.error:
Log.error("Unable to get response from %s. Error %s", fetch_url, http_response.error)
raise tornado.gen.Return(ret)
# load response and handle return errors, if any
response = json.loads(http_response.body)
if not 'result' in response:
Log.error("Empty response from %s", fetch_url)
raise tornado.gen.Return(ret)
# get the response and execution time on server side
ret = response['result']
execution = 1000 * response['executiontime']
# Security header
headers = {SECRET_HEADER: options.secret}
# Build query arguments
arguments = {}
if include_lists is not None:
arguments['include_lists'] = include_lists.asdict()
if max_age is not None:
arguments['max_age'] = max_age
url = "http://{ip}:{port}/{path}".format(
ip=node_ip, port=constants.HERMES_PORT, path=self.method_path)
request = httpclient.HTTPRequest(
url=url, method='GET', body=json.dumps(arguments), headers=headers,
request_timeout=STATS_REQUEST_TIMEOUT, allow_nonstandard_methods=True
)
async_client = httpclient.AsyncHTTPClient()
try:
# Send Future object to coroutine and suspend till result is ready
response = yield async_client.fetch(request)
except (socket.error, httpclient.HTTPError) as err:
msg = u"Failed to get stats from {url} ({err})".format(url=url, err=err)
if hasattr(err, 'response') and err.response and err.response.body:
msg += u"\nBODY: {body}".format(body=err.response.body)
logger.error(msg)
raise gen.Return(unicode(err))
try:
snapshot = json.loads(response.body)
raise gen.Return(converter.stats_from_dict(self.stats_model, snapshot))
except TypeError as err:
msg = u"Can't parse stats snapshot ({})".format(err)
def find_doi(self,url):
""" retreive doi and metadata from url """
self.url = url # save for later
params = {'url': url}
scrapeomat_url = "http://localhost:8889/doi?" + urllib.urlencode(params)
http = tornado.httpclient.AsyncHTTPClient()
http.fetch(scrapeomat_url, callback=self.on_got_doi_data)
elif debug_type in (1, 2):
debug_msg = native_str(debug_msg)
for line in debug_msg.splitlines():
curl_log.debug("%s %s", debug_types[debug_type], line)
elif debug_type == 4:
curl_log.debug("%s %r", debug_types[debug_type], debug_msg)
class CurlError(HTTPError):
def __init__(self, errno: int, message: str) -> None:
HTTPError.__init__(self, 599, message)
self.errno = errno
if __name__ == "__main__":
AsyncHTTPClient.configure(CurlAsyncHTTPClient)
main()
except socket.error as e:
app_log.warn("Socket error on boot: %s", e)
if e.errno != errno.ECONNREFUSED:
app_log.warn("Error attempting to connect to [%s:%i]: %s",
ip, port, e)
yield gen.Task(loop.add_timeout, loop.time() + wait_time)
else:
break
# Fudge factor of IPython notebook bootup.
# TODO: Implement a webhook in IPython proper to call out when the
# notebook server is booted.
yield gen.Task(loop.add_timeout, loop.time() + .5)
# Now, make sure that we can reach the Notebook server.
http_client = AsyncHTTPClient()
req = HTTPRequest("http://{}:{}{}".format(ip, port, path))
while loop.time() - tic < timeout:
try:
yield http_client.fetch(req)
except HTTPError as http_error:
code = http_error.code
app_log.info("Booting server at [%s], getting HTTP status [%s]", path, code)
yield gen.Task(loop.add_timeout, loop.time() + wait_time)
else:
break
app_log.info("Server [%s] at address [%s:%s] has booted! Have at it.",
path, ip, port)
def get_global_http_client():
if not hasattr(RequestHandler, '_http_client'):
RequestHandler._http_client = tornado.httpclient.AsyncHTTPClient(
max_clients=options.tortik_max_clients)
return RequestHandler._http_client
# https://groups.google.com/forum/#!topic/python-tornado/lhyGhLZQIxY
import rospy
import tornado.web
import tornado.ioloop
import tornado.httpclient
from aggressive_proxy import ProxyHandler
from lg_common.helpers import write_log_to_file
if __name__ == "__main__":
# use `pycurl`, an external dependency
tornado.httpclient.AsyncHTTPClient.configure("tornado.curl_httpclient.CurlAsyncHTTPClient")
# Single client instance
client = tornado.httpclient.AsyncHTTPClient(max_clients=1000)
rospy.init_node('aggressive_proxy')
proxy_port = int(rospy.get_param('~proxy_port'))
upstream_socket = rospy.get_param('~upstream_socket')
application = tornado.web.Application([
(r"/(.*)", ProxyHandler, {'client': client, 'upstream_socket': upstream_socket}),
], debug=True)
application.listen(port=proxy_port)
ioloop = tornado.ioloop.IOLoop.current()
rospy.on_shutdown(ioloop.stop)
ioloop.start()
def appify(self, url):
# check for obviously malformed inputs...
parsed = urlparse.urlparse(url)
if parsed.scheme == None or len(parsed.scheme) == 0 or parsed.netloc == None or len(parsed.netloc) == 0:
self.fatal_error("Please provide a full URL, e.g. http://targethost.com/")
if parsed.scheme != "http" and parsed.scheme != "https":
self.fatal_error("Only http and https URLs are supported.")
targeturl = parsed.scheme + "://" + parsed.netloc
http = tornado.httpclient.AsyncHTTPClient()
inspector = SiteInspection(parsed.scheme, parsed.netloc)
faviconRequest = tornado.httpclient.HTTPRequest(targeturl + "/favicon.ico")
faviconRequest.inspector = inspector
indexRequest = tornado.httpclient.HTTPRequest(targeturl + "/")
indexRequest.inspector = inspector
http.fetch(faviconRequest,
callback=self.on_favicon_response)
http.fetch(indexRequest,
callback=self.on_index_response)
def get_access_token(self, code, request_handler):
body = urllib_parse.urlencode({
'redirect_uri': get_path_for_redirect(request_handler),
'code': code,
'client_id': self.client_id,
'client_secret': self.secret,
'grant_type': 'authorization_code',
})
http_client = httpclient.AsyncHTTPClient()
response = yield http_client.fetch(
tornado.auth.GoogleOAuth2Mixin._OAUTH_ACCESS_TOKEN_URL,
method='POST',
headers={'Content-Type': 'application/x-www-form-urlencoded'},
body=body,
raise_error=False)
response_values = {}
if response.body:
response_values = escape.json_decode(response.body)
if response.error:
if response_values.get('error_description'):
error_text = response_values.get('error_description')
elif response_values.get('error'):
error_text = response_values.get('error')