Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
def test_POST_pass(self):
results_url = '%srecipes/%s/tasks/%s/results/' % (self.get_proxy_url(),
self.recipe.id, self.recipe.tasks[0].id)
response = requests.post(results_url, data=dict(result='Pass',
path='/random/junk', score='123', message='The thing worked'))
self.assertEquals(response.status_code, 201)
self.assert_(response.headers['Location'].startswith(results_url),
response.headers['Location'])
result_id = int(posixpath.basename(response.headers['Location']))
self.check_result(result_id, TaskResult.pass_, u'/random/junk', 123,
u'The thing worked')
def get_token(udid):
global token
data = {
'udid' : udid
}
res = rq.post(url_token, data=data)
js = json.loads(res.text)
try:
token = js['data']['token']
print("Game token received: " + token)
return token
except:
print("Invalid udid!")
exit(1)
content = urllib.urlopen(NG_STATUS_URI).read()
ts = int(time.time())
for line in content.splitlines():
la = line.strip().split(options.ngx_out_sep)
append_datapoint(datapoints, Render.render(la))
for key in renders.keys():
append_datapoint(datapoints, Render.service_stat(renders[key]))
for key in derive_renders.keys():
append_datapoint(datapoints, Render.service_stat(derive_renders[key]))
if options.format == 'falcon' and options.falcon_addr != '':
import requests
r = requests.post(options.falcon_addr, data=json.dumps(datapoints))
print "push to falcon result: " + r.text
else:
print json.dumps(datapoints, indent=4, sort_keys=True)
except Exception as e:
traceback.print_exc(file = sys.stderr)
sys.stdout.flush()
sys.stderr.flush()
Given a device hash and optionally a number of captures
to be taken, starts vent collector for that device with the
options specified in poseidon.config.
'''
try:
payload = {
'nic': self.mod_configuration['collector_nic'],
'id': dev_hash,
'interval': self.mod_configuration['collector_interval'],
'filter': self.mod_configuration['collector_filter'],
'iters': str(num_captures)}
self.logger.debug('vent payload: ' + str(payload))
vent_addr = self.mod_configuration[
'vent_ip'] + ':' + self.mod_configuration['vent_port']
uri = 'http://' + vent_addr + '/create'
resp = requests.post(uri, json=payload)
self.logger.debug('collector repsonse: ' + resp.text)
except Exception as e:
self.logger.debug('failed to start vent collector' + str(e))
params['deployment'] = deployment
params['environment'] = env
params['refs'] = yaml.safe_dump(play['vars'], default_flow_style=False)
params['release_id'] = release_id
params['mongo_uri'] = mongo_uri
params['configuration'] = configuration_ref
params['configuration_secure'] = configuration_secure_ref
params['base_ami'] = base_amis.get(play_name, default_base)
log.info("Need ami for {}".format(pformat(params)))
if noop:
r = requests.Request('POST', abbey_url, params=params)
url = r.prepare().url
print("Would have posted: {}".format(url))
else:
r = requests.post(abbey_url, params=params)
log.info("Sent request got {}".format(r))
if r.status_code != 200:
# Something went wrong.
msg = "Failed to submit request with params: {}"
raise Exception(msg.format(pformat(params)))
def _update_status(repo, commit, token, data):
"Sends the status update's data using the GitHub API."
header = {'Authorization': 'token ' + token}
api_url = ("https://api.github.com/repos/%s/statuses/%s" %
(repo, commit))
if __name__ == '__main__':
eprint("Updating status of commit", commit, "with data", data)
try:
# use data= instead of json= in case we're running on an older requests
resp = requests.post(api_url, data=json.dumps(data), headers=header)
_print_ratelimit_info(resp)
body = resp.json()
except JSONDecodeError:
eprint("Expected JSON, but received:")
eprint("---")
eprint(resp.content)
eprint("---")
eprint("Retrying...")
resp = requests.post(api_url, data=json.dumps(data), headers=header)
body = resp.json()
# pylint: disable=no-member
if resp.status_code != requests.codes.created:
if (resp.status_code == requests.codes.unprocessable
and body is not None and 'message' in body
and "No commit found for SHA" in body['message']):
def post_url(self, url, fields, files=[]):
for key in files:
files[key] = open(files[key], 'rb')
res = requests.post(url, files=files, data=fields)
return res.text
def converse(message):
global CONVERSATION_WORKSPACE_ID, CONVERSATION_USERNAME, CONVERSATION_PASSWORD, CONVERSATION_VERSION
POST_SUCCESS = 200
url = ""
if SOE:
url = CONVERSATION_SOE_URL
else:
url = CONVERSATION_URL + CONVERSATION_WORKSPACE_ID + '/message?version=' + CONVERSATION_VERSION
r = requests.post(url, auth=(CONVERSATION_USERNAME, CONVERSATION_PASSWORD), headers={'content-type': 'application/json'}, data=json.dumps(message))
if r.status_code == POST_SUCCESS:
message = r.json()
return message
r = requests.post('http://web:8000/add_post/', proxies=no_proxies, data = {"seed_url": seed_url, 'post_url': get_post_url(item)})
if r.status_code == requests.codes.ok:
logging.info(f"New post for {seed_url}: #{i} {item}")
else:
logging.info(f"Status {r.status_code} for {seed_url}: #{i} {item}")
if i > MAX_POSTS or r.status_code == requests.codes.forbidden:
logging.info("Stopping because max count or previous post reached")
break
logging.info(f"Job for {seed_url} done!")
logging.info(f"Dequeueing {seed_url}")
r = requests.post('http://web:8000/dequeue_seed/', proxies=no_proxies, data = {"seed_url": seed_url})
logging.info(f"Dequeue status: {r.status_code}")
except Exception:
logging.error("Handle seed job broke :-(", exc_info=True)
finally:
logging.info("Continuing...")
"buttons": [
{
"textButton": {
"text": "OPEN QUERY",
"onClick": {
"openLink": {
"url": "{host}/queries/{query_id}".format(host=host, query_id=query.id)
}
}
}
}
]
})
headers = {"Content-Type": "application/json; charset=UTF-8"}
resp = requests.post(options.get("url"), data=json_dumps(data), headers=headers, timeout=5.0)
if resp.status_code != 200:
logging.error("webhook send ERROR. status_code => {status}".format(status=resp.status_code))
except Exception:
logging.exception("webhook send ERROR.")