Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
def test_presigned_put_object_expiry(client, log_output):
# default value for log_output.function attribute is;
# log_output.function = "presigned_put_object(bucket_name, object_name, expires)"
ca_certs = os.environ.get('SSL_CERT_FILE')
if not ca_certs:
ca_certs = certifi.where()
_http = urllib3.PoolManager(cert_reqs='CERT_REQUIRED', ca_certs=ca_certs)
# Get a unique bucket_name and object_name
log_output.args['bucket_name'] = bucket_name = generate_bucket_name()
log_output.args['object_name'] = object_name = uuid.uuid4().__str__()
KB_1 = 1024 # 1KiB.
try:
client.make_bucket(bucket_name)
presigned_put_object_url = client.presigned_put_object(bucket_name,
object_name,
timedelta(seconds=1))
# Wait for 2 seconds for the presigned url to expire
time.sleep(2)
response = _http.urlopen('PUT',
presigned_put_object_url,
LimitedRandomReader(KB_1))
# proxy
proxy = Configuration().proxy
# https pool manager
if proxy:
self.pool_manager = urllib3.ProxyManager(
num_pools=pools_size,
maxsize=maxsize,
cert_reqs=cert_reqs,
ca_certs=ca_certs,
cert_file=cert_file,
key_file=key_file,
proxy_url=proxy
)
else:
self.pool_manager = urllib3.PoolManager(
num_pools=pools_size,
maxsize=maxsize,
cert_reqs=cert_reqs,
ca_certs=ca_certs,
cert_file=cert_file,
key_file=key_file
)
def get_query(query_string):
"""Wrapper to YaCy installation."""
#/yacysearch.rss?query=&size=
try:
query_string = urllib2.quote(query_string.encode("utf8"))
url = settings.YACY + "yacysearch.rss?query=" + query_string
http = urllib3.PoolManager()
response = http.request('GET', url)
data = response.data
return data
except Exception as error:
print error
def openshift_main():
import sys
import json
import codecs
import urllib3
from collections import OrderedDict
pool = urllib3.PoolManager()
reader = codecs.getreader('utf-8')
spec_url = 'https://raw.githubusercontent.com/openshift/origin/' \
'%s/api/swagger-spec/openshift-openapi-spec.json' % sys.argv[2]
output_path = sys.argv[3]
print("writing to {}".format(output_path))
with pool.request('GET', spec_url, preload_content=False) as response:
if response.status != 200:
print("Error downloading spec file. Reason: %s" % response.reason)
return 1
in_spec = json.load(reader(response), object_pairs_hook=OrderedDict)
out_spec = process_swagger(process_openshift_swagger(in_spec, output_path), sys.argv[1])
update_codegen_ignore(out_spec, output_path)
with open(output_path, 'w') as out:
json.dump(out_spec, out, sort_keys=True, indent=2,
separators=(',', ': '), ensure_ascii=True)
def repeatDownload(self, ticker, line, timestamp, exchange):
url = "https://www.reuters.com/finance/stocks/company-news/" + ticker + self.suffix[exchange]
new_time = timestamp[4:] + timestamp[:4] # change 20151231 to 12312015 to match reuters format
http = urllib3.PoolManager()
for _ in range(self.repeat_times):
try:
time.sleep(np.random.poisson(self.sleep_times))
response = http.request('GET', url + "?date=" + new_time)
soup = BeautifulSoup(response.data, "lxml")
hasNews = self.parser(soup, line, ticker, timestamp)
if hasNews: return 1 # return if we get the news
break # stop looping if the content is empty (no error)
except: # repeat if http error appears
print('Http error')
continue
return 0
def is_ec2_host():
http = urllib3.PoolManager(timeout=.1)
url = 'http://169.254.169.254/latest/meta-data/instance-id'
try:
r = http.request('GET', url)
return True
except Exception:
return False
def __init__(self, *args, **kwargs):
super(Uploader, self).__init__(*args, **kwargs)
self.opener = urllib3.PoolManager()
def get_opener():
http_proxy = os.getenv('http_proxy')
if http_proxy:
parsed_url = compat.urlparse(http_proxy)
proxy_headers = util.get_auth_info_from_url(
http_proxy, proxy=True)
return urllib3.ProxyManager(
proxy_url=parsed_url.geturl(),
proxy_headers=proxy_headers)
return urllib3.PoolManager()
def main():
pool = urllib3.PoolManager()
with pool.request('GET', SPEC_URL, preload_content=False) as response:
if response.status != 200:
print "Error downloading spec file. Reason: %s" % response.reason
return 1
in_spec = json.load(response, object_pairs_hook=OrderedDict)
out_spec = process_swagger(in_spec)
with open(OUTPUT_PATH, 'w') as out:
json.dump(out_spec, out, sort_keys=False, indent=2,
separators=(',', ': '), ensure_ascii=True)
return 0