Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
file_directory, generation_date, followers_size, following_size, fans_size
)
file_index = 0
final_file = "{}.json".format(file_name)
try:
if not os.path.exists(file_directory):
os.makedirs(file_directory)
# this loop provides unique data files
while os.path.isfile(final_file):
file_index += 1
final_file = "{}({}).json".format(file_name, file_index)
with open(final_file, "w") as fans_data:
with interruption_handler():
json.dump(fans, fans_data)
logger.info("Stored Fans data at {} local file\n".format(final_file))
except Exception as exc:
logger.info(
"Failed to store Fans data in a local file :Z\n{}\n".format(
str(exc).encode("utf-8")
)
has_next_data = page_info["has_next_page"]
if has_next_data:
variables["after"] = page_info["end_cursor"]
url = "{}&variables={}".format(
graphql_followers, str(json.dumps(variables))
)
web_address_navigator(browser, url)
sc_rolled += 1
# dump the current graphql queries data
if local_read_failure is not True:
try:
with interruption_handler():
with open(filename, "w") as graphql_queries_file:
graphql_queries[username][query_date]["sc_rolled"] += 1
json.dump(graphql_queries, graphql_queries_file)
except Exception as exc:
print("\n")
logger.info(
"Error occurred while writing `scroll` data to "
"graphql_queries.json\n{}\n".format(
str(exc).encode("utf-8")
)
)
# take breaks gradually
if sc_rolled > 91:
print("\n")
logger.info("Queried too much! ~ sleeping a bit :>")
has_next_data = page_info["has_next_page"]
if has_next_data:
variables["after"] = page_info["end_cursor"]
url = "{}&variables={}".format(
graphql_following, str(json.dumps(variables))
)
web_address_navigator(browser, url)
sc_rolled += 1
# dumps the current graphql queries data
if local_read_failure is not True:
try:
with interruption_handler():
with open(filename, "w") as graphql_queries_file:
graphql_queries[username][query_date]["sc_rolled"] += 1
json.dump(graphql_queries, graphql_queries_file)
except Exception as exc:
print("\n")
logger.info(
"Error occurred while writing `scroll` data to "
"graphql_queries.json\n{}\n".format(
str(exc).encode("utf-8")
)
)
# take breaks gradually
if sc_rolled > 91:
print("\n")
logger.info("Queried too much! ~ sleeping a bit :>")
def log_record_all_followed(login, followed, logger, logfolder, logtime, user_id):
"""logs all followed ever to a pool that will never be erase"""
try:
with open(
"{0}{1}_record_all_followed.csv".format(logfolder, login), "a+"
) as followPool:
with interruption_handler():
followPool.write("{} ~ {} ~ {},\n".format(logtime, followed, user_id))
except BaseException as e:
logger.error("log_record_all_followed_pool error {}".format(str(e)))