Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
def testHopByHop(self):
for hop in (
"Connection Keep-Alive Proxy-Authenticate Proxy-Authorization "
"TE Trailers Transfer-Encoding Upgrade"
).split():
for alt in hop, hop.title(), hop.upper(), hop.lower():
self.failUnless(util.is_hop_by_hop(alt))
# Not comprehensive, just a few random header names
for hop in (
"Accept Cache-Control Date Pragma Trailer Via Warning"
).split():
for alt in hop, hop.title(), hop.upper(), hop.lower():
self.failIf(util.is_hop_by_hop(alt))
def testHopByHop(self):
for hop in (
"Connection Keep-Alive Proxy-Authenticate Proxy-Authorization "
"TE Trailers Transfer-Encoding Upgrade"
).split():
for alt in hop, hop.title(), hop.upper(), hop.lower():
self.assertTrue(util.is_hop_by_hop(alt))
# Not comprehensive, just a few random header names
for hop in (
"Accept Cache-Control Date Pragma Trailer Via Warning"
).split():
for alt in hop, hop.title(), hop.upper(), hop.lower():
self.assertFalse(util.is_hop_by_hop(alt))
def testHopByHop(self):
for hop in (
"Connection Keep-Alive Proxy-Authenticate Proxy-Authorization "
"TE Trailers Transfer-Encoding Upgrade"
).split():
for alt in hop, hop.title(), hop.upper(), hop.lower():
self.assertTrue(util.is_hop_by_hop(alt))
# Not comprehensive, just a few random header names
for hop in (
"Accept Cache-Control Date Pragma Trailer Via Warning"
).split():
for alt in hop, hop.title(), hop.upper(), hop.lower():
self.assertFalse(util.is_hop_by_hop(alt))
def testHopByHop(self):
for hop in (
"Connection Keep-Alive Proxy-Authenticate Proxy-Authorization "
"TE Trailers Transfer-Encoding Upgrade"
).split():
for alt in hop, hop.title(), hop.upper(), hop.lower():
self.failUnless(util.is_hop_by_hop(alt))
# Not comprehensive, just a few random header names
for hop in (
"Accept Cache-Control Date Pragma Trailer Via Warning"
).split():
for alt in hop, hop.title(), hop.upper(), hop.lower():
self.failIf(util.is_hop_by_hop(alt))
else:
raise ValueError("Unknown request type %s." % req_spec['type'])
http_params = bottle.request.query_string
http_body = bottle.request.body.read()
http_method = resp_spec.get('method') or bottle.request.method or 'GET'
http_timeout = req_spec.get('timeout') or DEFAULT_TIMEOUT
http_headers = {k.lower(): v for k, v in\
util.filter_request_headers(bottle.request, allow_host=self.allow_host).items()}
http_headers.update(req_spec.get('headers') or {})
sess = session_pool.get_session()
resp_obj = sess.request(http_method, url, params=http_params, data=http_body,
headers=http_headers, timeout=http_timeout, allow_redirects=False)
ret['body'] = resp_obj.content
for k, v in resp_obj.headers.items():
if (not is_hop_by_hop(k)) and not k.lower() in ['content-length']:
ret['headers'][k.lower()] = v
if ret['headers'].get('location'):
ret['headers']['location'] = util.replace_location_host(ret['headers']['location'],
url, bottle.request.urlparts.netloc)
set_cookie = ret['headers'].pop('set-cookie', None)
if set_cookie:
ret['headers']['set-cookie'] =\
set([i.strip() for i in re.split(r",(?![^=]+;)", set_cookie) if i.strip()])
ret['status'] = resp_obj.status_code
elif resp_type in handlers.HANDLERS:
handler_object = handlers.get_handler_object(req_spec, resp_spec,
root_prefix=self.prefix)
pos = suffix.find(req_spec['path'])
ret = handler_object.dispatch(suffix[pos + len(req_spec['path']): ])
else:
"SERVER_PROTOCOL": "HTTP/{}.{}".format(*request.version),
"wsgi.version": (1, 0),
"wsgi.url_scheme": url_scheme,
"wsgi.input": body,
"wsgi.errors": self._stderr,
"wsgi.multithread": True,
"wsgi.multiprocess": False,
"wsgi.run_once": False,
"asyncio.loop": self._loop,
"asyncio.executor": self._executor,
"aiohttp.request": request,
}
# Add in additional HTTP headers.
for header_name in request.headers:
header_name = header_name.upper()
if not(is_hop_by_hop(header_name)) and header_name not in ("CONTENT-LENGTH", "CONTENT-TYPE"):
header_value = ",".join(request.headers.getall(header_name))
environ["HTTP_" + header_name.replace("-", "_")] = header_value
# All done!
return environ
if not _HTTP_TOKEN_RE.match(name):
raise appinfo_errors.InvalidHttpHeaderName(
'An HTTP header must be a non-empty RFC 2616 token.')
if name in _HTTP_REQUEST_HEADERS:
raise appinfo_errors.InvalidHttpHeaderName(
'%r can only be used in HTTP requests, not responses.'
% original_name)
if name.startswith('x-appengine'):
raise appinfo_errors.InvalidHttpHeaderName(
'HTTP header names that begin with X-Appengine are reserved.')
if wsgiref.util.is_hop_by_hop(name):
raise appinfo_errors.InvalidHttpHeaderName(
'Only use end-to-end headers may be used. See RFC 2616 section'
' 13.5.1.')
if name in HttpHeadersDict.DISALLOWED_HEADERS:
raise appinfo_errors.InvalidHttpHeaderName(
'%s is a disallowed header.' % name)
return original_name
if not _HTTP_TOKEN_RE.match(name):
raise appinfo_errors.InvalidHttpHeaderName(
'An HTTP header must be a non-empty RFC 2616 token.')
if name in _HTTP_REQUEST_HEADERS:
raise appinfo_errors.InvalidHttpHeaderName(
'%r can only be used in HTTP requests, not responses.'
% original_name)
if name.startswith('x-appengine'):
raise appinfo_errors.InvalidHttpHeaderName(
'HTTP header names that begin with X-Appengine are reserved.')
if wsgiref.util.is_hop_by_hop(name):
raise appinfo_errors.InvalidHttpHeaderName(
'Only use end-to-end headers may be used. See RFC 2616 section'
' 13.5.1.')
if name in HttpHeadersDict.DISALLOWED_HEADERS:
raise appinfo_errors.InvalidHttpHeaderName(
'%s is a disallowed header.' % name)
return original_name
@get('/proxy')
def proxy():
url = request.GET.get("url")
headers = ["{0}: {1}".format(k,v) for k,v in request.headers.iteritems() if k.lower() not in STRIP_HEADERS]
try:
headtuple, content = curl_http(url, headers)
# import ipdb;ipdb.set_trace()
headret = [(k,v) for k,v in headtuple if not is_hop_by_hop(k)]
raise HTTPResponse(output = content, header = headret)
except (pycurl.error, CurlNon200Status):
redirect(url)
except Exception, e:
raise
def filter_request_headers(req, allow_host=False):
ret = [(k, v) for (k, v) in req.headers.items()\
if not is_hop_by_hop(k)]
if allow_host:
remove_headers = ['content-length']
else:
remove_headers = ['content-length', 'host']
ret = dict(filter(lambda x: x[0].lower() not in remove_headers,
ret))
return ret