Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
if filters:
condition = set(filters).issubset(available_filters.keys())
if condition is False:
raise ValueError('ERR#0095: filters parameter values must be contained in ' + ', '.join(available_filters) + '.')
else:
filters = [available_filters[filter_] for filter_ in filters]
params = {
'search_text': text,
'tab': 'quotes',
'isFilter': False
}
head = {
"User-Agent": get_random(),
"X-Requested-With": "XMLHttpRequest",
"Accept": "text/html",
"Accept-Encoding": "gzip, deflate, br",
"Connection": "keep-alive",
}
url = 'https://www.investing.com/search/service/SearchInnerPage'
req = requests.post(url, headers=head, data=params)
if req.status_code != 200:
raise ConnectionError("ERR#0015: error " + str(req.status_code) + ", try again later.")
data = req.json()
if data['total']['quotes'] == 0:
etfs = etfs[etfs['country'] == country]
etf = etf.strip()
etf = etf.lower()
if unidecode.unidecode(etf) not in [unidecode.unidecode(value.lower()) for value in etfs['name'].tolist()]:
raise RuntimeError("ERR#0019: etf " + str(etf) + " not found in " + str(country.lower()) + ", check if it is correct.")
name = etfs.loc[(etfs['name'].str.lower() == etf).idxmax(), 'name']
tag = etfs.loc[(etfs['name'].str.lower() == etf).idxmax(), 'tag']
url = "https://www.investing.com/etfs/" + tag
head = {
"User-Agent": get_random(),
"X-Requested-With": "XMLHttpRequest",
"Accept": "text/html",
"Accept-Encoding": "gzip, deflate, br",
"Connection": "keep-alive",
}
req = requests.get(url, headers=head)
if req.status_code != 200:
raise ConnectionError("ERR#0015: error " + str(req.status_code) + ", try again later.")
root_ = fromstring(req.text)
path_ = root_.xpath("//div[contains(@class, 'overviewDataTable')]/div")
result = pd.DataFrame(columns=['ETF Name', 'Prev. Close', 'Todays Range', 'ROI (TTM)',
'Open', '52 wk Range', 'Dividends (TTM)', 'Volume',
if not isinstance(test_mode, bool):
raise ValueError('ERR#0041: test_mode can just be either True or False')
results = list()
resource_package = 'investpy'
resource_path = '/'.join(('resources', 'bonds', 'bond_countries.csv'))
if pkg_resources.resource_exists(resource_package, resource_path):
countries = pd.read_csv(pkg_resources.resource_filename(resource_package, resource_path))
else:
raise IOError("ERR#0062: bonds country list not found or unable to retrieve.")
for country in countries['tag'].tolist():
head = {
"User-Agent": user_agent.get_random(),
"X-Requested-With": "XMLHttpRequest",
"Accept": "text/html",
"Accept-Encoding": "gzip, deflate, br",
"Connection": "keep-alive",
}
url = "https://www.investing.com/rates-bonds/" + country + "-government-bonds"
req = requests.get(url, headers=head)
if req.status_code != 200:
raise ConnectionError("ERR#0015: error " + str(req.status_code) + ", try again later.")
root_ = fromstring(req.text)
path_ = root_.xpath(".//table[@id='cr1']/tbody/tr")
name = unidecode.unidecode(name.lower().strip())
if name not in [unidecode.unidecode(value.lower()) for value in data[check].tolist()]:
raise ValueError("ERR#0122: introduced name does not exist in the introduced country (if required).")
product_id = data.loc[(data[check].str.lower() == name).idxmax(), 'id']
data_values = {
'pairID': product_id,
'period': intervals[interval],
'viewType': 'normal'
}
headers = {
"User-Agent": get_random(),
"X-Requested-With": "XMLHttpRequest",
"Accept": "text/html",
"Accept-Encoding": "gzip, deflate, br",
"Connection": "keep-alive",
}
url = "https://www.investing.com/instruments/Service/GetTechincalData"
req = requests.post(url, headers=headers, data=data_values)
if req.status_code != 200:
raise ConnectionError("ERR#0015: error " + str(req.status_code) + ", try again later.")
root = fromstring(req.text)
table = root.xpath(".//table[contains(@class, 'technicalIndicatorsTbl')]/tbody/tr")
index_currency = indices.loc[(indices['name'].str.lower() == index).idxmax(), 'currency']
header = full_name + ' Historical Data'
params = {
"curr_id": id_,
"smlID": str(randint(1000000, 99999999)),
"header": header,
"interval_sec": interval,
"sort_col": "date",
"sort_ord": "DESC",
"action": "historical_data"
}
head = {
"User-Agent": get_random(),
"X-Requested-With": "XMLHttpRequest",
"Accept": "text/html",
"Accept-Encoding": "gzip, deflate, br",
"Connection": "keep-alive",
}
url = "https://www.investing.com/instruments/HistoricalDataAjax"
req = requests.post(url, headers=head, data=params)
if req.status_code != 200:
raise ConnectionError("ERR#0015: error " + str(req.status_code) + ", try again later.")
root_ = fromstring(req.text)
path_ = root_.xpath(".//table[@id='curr_table']/tbody/tr")
'bonds': '_Bonds'
}
if categories is not None and not isinstance(categories, list):
raise ValueError("ERR#0113: the introduced categories value is not valid since it must be a list of strings unless it is None.")
if from_date is not None and not isinstance(from_date, str):
raise ValueError("ERR#0114: the introduced date value must be a string unless it is None.")
if to_date is not None and not isinstance(to_date, str):
raise ValueError("ERR#0114: the introduced date value must be a string unless it is None.")
url = "https://www.investing.com/economic-calendar/Service/getCalendarFilteredData"
headers = {
"User-Agent": get_random(),
"X-Requested-With": "XMLHttpRequest",
"Accept": "text/html",
"Accept-Encoding": "gzip, deflate, br",
"Connection": "keep-alive",
}
dates = [from_date, to_date]
if any(date is None for date in dates) is True:
data = {
'timeZone': choice(time_zones[time_zone]),
'timeFilter': time_filters[time_filter],
'currentTab': 'today',
'submitFilters': 1,
'limit_from': 0
}
raise IOError("ERR#0065: bonds object not found or unable to retrieve.")
country = unidecode.unidecode(country.lower())
if country not in get_bond_countries():
raise ValueError("ERR#0034: country " + country + " not found, check if it is correct.")
bonds = bonds[bonds['country'] == country]
if country == 'united states':
country= 'usa'
elif country == 'united kingdom':
country = 'uk'
head = {
"User-Agent": get_random(),
"X-Requested-With": "XMLHttpRequest",
"Accept": "text/html",
"Accept-Encoding": "gzip, deflate, br",
"Connection": "keep-alive",
}
url = 'https://www.investing.com/rates-bonds/' + country + '-government-bonds'
req = requests.get(url, headers=head)
if req.status_code != 200:
raise ConnectionError("ERR#0015: error " + str(req.status_code) + ", try again later.")
root_ = fromstring(req.text)
table = root_.xpath(".//table[@id='cr1']/tbody/tr")
commodities = pd.read_csv(pkg_resources.resource_filename(resource_package, resource_path))
else:
raise FileNotFoundError("ERR#0075: commodities file not found or errored.")
if commodities is None:
raise IOError("ERR#0076: commodities not found or unable to retrieve.")
group = unidecode.unidecode(group.lower())
if group not in get_commodity_groups():
raise RuntimeError('ERR#0091: specified commodity group value is not valid.')
commodities = commodities[commodities['group'] == group]
head = {
"User-Agent": get_random(),
"X-Requested-With": "XMLHttpRequest",
"Accept": "text/html",
"Accept-Encoding": "gzip, deflate, br",
"Connection": "keep-alive",
}
url = "https://www.investing.com/commodities/" + group
req = requests.get(url, headers=head)
if req.status_code != 200:
raise ConnectionError("ERR#0015: error " + str(req.status_code) + ", try again later.")
root_ = fromstring(req.text)
table = root_.xpath(".//table[@id='cross_rate_1']/tbody/tr")
name = certificates.loc[(certificates['name'].str.lower() == certificate).idxmax(), 'name']
header = symbol + ' Historical Data'
params = {
"curr_id": id_,
"smlID": str(randint(1000000, 99999999)),
"header": header,
"interval_sec": interval,
"sort_col": "date",
"sort_ord": "DESC",
"action": "historical_data"
}
head = {
"User-Agent": get_random(),
"X-Requested-With": "XMLHttpRequest",
"Accept": "text/html",
"Accept-Encoding": "gzip, deflate, br",
"Connection": "keep-alive",
}
url = "https://www.investing.com/instruments/HistoricalDataAjax"
req = requests.post(url, headers=head, data=params)
if req.status_code != 200:
raise ConnectionError("ERR#0015: error " + str(req.status_code) + ", try again later.")
root_ = fromstring(req.text)
path_ = root_.xpath(".//table[@id='curr_table']/tbody/tr")