Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
def test_raises_on_wrong_browser() -> None:
"""Passing a browser other than Chrome or Chromium raises ValueError."""
with pytest.raises(ValueError):
chrome_cookies('http://www.html-kit.com/tools/cookietester',
browser="Safari")
def test_no_cookies(travis_setup: pytest.fixture) -> None:
"""Ensure that no cookies are returned for a fake url."""
never_been_here = 'http://{0}.com'.format(uuid4())
empty_dict = chrome_cookies(never_been_here)
assert empty_dict == dict()
def test_fake_cookie(travis_setup: pytest.fixture) -> None:
"""Tests a fake cookie from the website below.
For this to pass, you'll
have to visit the url and put in "TestCookie" and "Just_a_test!" to set
a temporary cookie with the appropriate values.
"""
cookies = chrome_cookies('http://www.html-kit.com/tools/cookietester')
assert cookies['TestCookie'] == 'Just_a_test!'
def test_raises_on_empty() -> None:
"""Ensure that `chrome_cookies()` raises."""
with pytest.raises(TypeError):
chrome_cookies() # type: ignore
def test_raises_without_scheme() -> None:
"""Ensure that `chrome_cookies("domain.com")` raises.
The domain must specify a scheme (http or https).
"""
with pytest.raises(URLError):
chrome_cookies('n8henrie.com')
def main():
if len(sys.argv) < 2:
print("usage: curlc [url] ")
sys.exit(1)
curlargs = sys.argv[1:]
for arg in curlargs:
if arg[0] != '-':
url = arg
break
chrome = chrome_cookies(url)
cargs = []
carg = "cookie: "
for k, v in chrome.items():
cargs.append(k + "=" + v)
carg += "; ".join(cargs)
args = ["curl", "-H", carg]
args.extend(curlargs)
subprocess.run(args)
if __name__ == "__main__":
parser = argparse.ArgumentParser(
description="Get all child orgs; this will use your existing chrome" +
"cookies; You need to be logged into the parent org you wish to" +
"get all child orgs for; This mirrors the data found on: " +
"https://app.datadoghq.com/account/multi-org-usage"
)
spinner = Halo(text="Getting all child orgs...", spinner="dots")
spinner.start()
# @ckelner: Support: This works with our internal systems also, just
# change the URL to point at internal systems
url = "https://app.datadoghq.com"
cookiez = chrome_cookies(url)
r = requests.get(url + "/account/usage/multi_org_summary", cookies=cookiez)
if r.status_code == 403:
spinner.stop()
print "Multi-org not enabled"
sys.exit()
org_summary = r.json()["orgs"]
# poor man's debugging
# print json.dumps(org_summary, indent=4)
'''
json objects look like:
...
"orgs": [
{
"aws_host_top99p": 0,
"infra_host_top99p": 0,
def get_prof_ids(keyword, url=PROF_URl, num_page = 1, strict = True):
if(num_page==0):
return []
ids = []
cookies = chrome_cookies(HOME_URL)
if(url == PROF_URl):
if(strict==True):
params = {"view_op": "search_authors", "mauthors": ("label:{0}").format(keyword)}
else:
params = {"view_op": "search_authors", "mauthors": ("{0}").format(keyword)}
r = requests.get(url, params=params, cookies=cookies)
else:
r = requests.get(url,cookies=cookies)
soup = BeautifulSoup(r.content, "html.parser")
urls = soup.find_all("div", {"class": "gsc_oai"})
for i in urls:
parsed = i.find("a")['href']
id = parse.parse_qs(parse.urlparse(parsed).query)['user'][0]
ids.append(id)
try:
next_page = soup.find("div",{"id":"gsc_authors_bottom_pag"})
def __init__(self,id):
self.id = id
params = {"user":id}
cookies=chrome_cookies(PROF_URl)
r = requests.get(PROF_URl,params=params,cookies=cookies)
self.html = BeautifulSoup(r.content,"html.parser")
self.name = self.get_name()
self.university=self.get_university()
self.job=self.get_job()
self.h_index = self.get_h_index()
self.homepage = self.get_homepage()
self.tags=self.get_tags()
self.gs_link = r.url