Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
return const.ESkipped
if self.__resumedownload and \
self.__compare_size(self.__current_file_size, self.__remote_json) == 2:
if self.__resumedl_revertcount < 0:
if self.__current_file_size:
offset = self.__current_file_size
else:
# revert back at least self.__resumedl_revertcount download chunk(s), default: one
pieces = self.__current_file_size // self.__dl_chunk_size
if pieces > self.__resumedl_revertcount:
offset = (pieces - self.__resumedl_revertcount) * self.__dl_chunk_size
elif os.path.isdir(localfile):
if not self.shalloverwrite("Same-name directory '{}' exists, "
"do you want to remove it? [y/N]".format(localfile)):
pinfo("Same-name directory '{}' exists, skip downloading".format(localfile))
#return const.ENoError
return const.ESkipped
self.pv("Directory with the same name '{}' exists, removing ...".format(localfile))
result = removedir(localfile, self.verbose)
if result == const.ENoError:
self.pv("Removed")
else:
perr("Error removing the directory '{}'".format(localfile))
return result
ldir, file = os.path.split(localfile)
if ldir and not os.path.exists(ldir):
result = makedir(ldir, verbose = self.verbose)
if result != const.ENoError:
perr("Fail to make directory '{}'".format(ldir))
if self.__resumedownload and \
self.__compare_size(self.__current_file_size, self.__remote_json) == 2:
if self.__resumedl_revertcount < 0:
if self.__current_file_size:
offset = self.__current_file_size
else:
# revert back at least self.__resumedl_revertcount download chunk(s), default: one
pieces = self.__current_file_size // self.__dl_chunk_size
if pieces > self.__resumedl_revertcount:
offset = (pieces - self.__resumedl_revertcount) * self.__dl_chunk_size
elif os.path.isdir(localfile):
if not self.shalloverwrite("Same-name directory '{}' exists, "
"do you want to remove it? [y/N]".format(localfile)):
pinfo("Same-name directory '{}' exists, skip downloading".format(localfile))
#return const.ENoError
return const.ESkipped
self.pv("Directory with the same name '{}' exists, removing ...".format(localfile))
result = removedir(localfile, self.verbose)
if result == const.ENoError:
self.pv("Removed")
else:
perr("Error removing the directory '{}'".format(localfile))
return result
ldir, file = os.path.split(localfile)
if ldir and not os.path.exists(ldir):
result = makedir(ldir, verbose = self.verbose)
if result != const.ENoError:
perr("Fail to make directory '{}'".format(ldir))
return result
return result
offset = 0
self.pd("Checking if we already have the copy locally")
if os.path.isfile(localfile):
self.pd("Same-name local file '{}' exists, checking if contents match".format(localfile))
self.__current_file_size = getfilesize(self.__current_file)
if const.ENoError == self.__verify_current_file(self.__remote_json, False) \
and not (self.__downloader[:5] == const.DownloaderAria2 and os.path.exists(localfile + '.aria2')):
self.pd("Same local file '{}' already exists, skip downloading".format(localfile))
self.__remove_remote_on_success(remotefile)
return const.ENoError
else:
if not self.shalloverwrite("Same-name locale file '{}' exists but is different, "
"do you want to overwrite it? [y/N]".format(localfile)):
pinfo("Same-name local file '{}' exists but is different, skip downloading".format(localfile))
#return const.ENoError
return const.ESkipped
if self.__resumedownload and \
self.__compare_size(self.__current_file_size, self.__remote_json) == 2:
if self.__resumedl_revertcount < 0:
if self.__current_file_size:
offset = self.__current_file_size
else:
# revert back at least self.__resumedl_revertcount download chunk(s), default: one
pieces = self.__current_file_size // self.__dl_chunk_size
if pieces > self.__resumedl_revertcount:
offset = (pieces - self.__resumedl_revertcount) * self.__dl_chunk_size
elif os.path.isdir(localfile):
if not self.shalloverwrite("Same-name directory '{}' exists, "
"do you want to remove it? [y/N]".format(localfile)):
"Error: Your Python 'multiprocess' library is probably "
"not properly installed (missing C extensions). "
"You need to install a C compiler and Python headers before "
"installing the Python 'multiprocess' library. "
"(All these hassles could have been saved if Python's builtin "
"'multiprocessing' works properly, sigh.)\n"
"Fix for debian derivatives:\n"
"- Install gcc: # apt-get install gcc\n"
"- Install python-dev: # apt-get install python-dev\n"
"- Reinstall Python 'multiprocess' library:\n"
" # pip uninstall -y multiprocess\n"
" # pip install -v multiprocess\n"
"- If there's no errors/warnings in the above actions, "
"then this error should be gone when you run 'bypy' with '{}' again.\n"
).format(formatex(pe), const.MultiprocessOption)
perr(errmsg)
self.quit(const.EFatal)
#return const.ENoError
return const.ESkipped
self.pv("Directory with the same name '{}' exists, removing ...".format(localfile))
result = removedir(localfile, self.verbose)
if result == const.ENoError:
self.pv("Removed")
else:
perr("Error removing the directory '{}'".format(localfile))
return result
ldir, file = os.path.split(localfile)
if ldir and not os.path.exists(ldir):
result = makedir(ldir, verbose = self.verbose)
if result != const.ENoError:
perr("Fail to make directory '{}'".format(ldir))
return result
if self.__downloader[:5] == const.DownloaderAria2:
result = self.__down_aria2c(rfile, localfile)
else:
result = self.__downchunks(rfile, offset)
if result == const.ENoError:
self.__remove_remote_on_success(remotefile)
return result
def __request_work_die(self, ex, url, pars, r, act):
result = const.EFatal
self.__dump_exception(ex, url, pars, r, act)
perr("Fatal Exception, no way to continue.\nQuitting...\n")
perr("If the error is reproducible, run the program with `-dv` arguments again to get more info.\n")
self.quit(result)
# we eat the exception, and use return code as the only
"--------------------------------\n"
"Error: Your Python 'multiprocess' library is probably "
"not properly installed (missing C extensions). "
"You need to install a C compiler and Python headers before "
"installing the Python 'multiprocess' library. "
"(All these hassles could have been saved if Python's builtin "
"'multiprocessing' works properly, sigh.)\n"
"Fix for debian derivatives:\n"
"- Install gcc: # apt-get install gcc\n"
"- Install python-dev: # apt-get install python-dev\n"
"- Reinstall Python 'multiprocess' library:\n"
" # pip uninstall -y multiprocess\n"
" # pip install -v multiprocess\n"
"- If there's no errors/warnings in the above actions, "
"then this error should be gone when you run 'bypy' with '{}' again.\n"
).format(formatex(pe), const.MultiprocessOption)
perr(errmsg)
self.quit(const.EFatal)
# no need to call __load_local_json() again as __auth() will load the json & acess token.
result = self.__auth()
if result != const.ENoError:
perr("Program authorization FAILED.\n"
"You need to authorize this program before using any PCS functions.\n"
"Quitting...\n")
self.quit(result)
for proxy in ['HTTP_PROXY', 'HTTPS_PROXY']:
if proxy in os.environ:
pr("{} used: {}".format(proxy, os.environ[proxy]))
# update check
check_update = False
nowsec = int(time.time())
if const.SettingKey_LastUpdateCheckTime not in self.__setting:
check_update = True
else:
lastcheck = self.__setting[const.SettingKey_LastUpdateCheckTime]
if nowsec - lastcheck > 7 * 24 * 60 * 60: # check every 7 days
check_update = True
if check_update:
r = requests.get('https://raw.githubusercontent.com/houtianze/bypy/master/update/update.json')
if r.status_code == 200:
try:
j = r.json()
min_ver_key = 'minimumRequiredVersion'
if min_ver_key in j:
minver = j[min_ver_key]
if comp_semver(const.__version__, minver) < 0:
perr("Your current version ({}) is too low, "
"not properly installed (missing C extensions). "
"You need to install a C compiler and Python headers before "
"installing the Python 'multiprocess' library. "
"(All these hassles could have been saved if Python's builtin "
"'multiprocessing' works properly, sigh.)\n"
"Fix for debian derivatives:\n"
"- Install gcc: # apt-get install gcc\n"
"- Install python-dev: # apt-get install python-dev\n"
"- Reinstall Python 'multiprocess' library:\n"
" # pip uninstall -y multiprocess\n"
" # pip install -v multiprocess\n"
"- If there's no errors/warnings in the above actions, "
"then this error should be gone when you run 'bypy' with '{}' again.\n"
).format(formatex(pe), const.MultiprocessOption)
perr(errmsg)
self.quit(const.EFatal)
self.__remote_json = {}
self.__slice_md5s = []
self.__cookies = {}
# TODO: whether this works is still to be tried out
self.__isrev = False
self.__rapiduploaded = False
# store the response object, mainly for testing.
self.response = object()
# store function-specific result data
self.result = {}
if not self.__load_local_json():
# no need to call __load_local_json() again as __auth() will load the json & acess token.
result = self.__auth()
if result != const.ENoError:
perr("Program authorization FAILED.\n"
"You need to authorize this program before using any PCS functions.\n"
"Quitting...\n")
self.quit(result)
for proxy in ['HTTP_PROXY', 'HTTPS_PROXY']:
if proxy in os.environ:
pr("{} used: {}".format(proxy, os.environ[proxy]))
# update check
check_update = False
nowsec = int(time.time())
if const.SettingKey_LastUpdateCheckTime not in self.__setting:
check_update = True
else:
lastcheck = self.__setting[const.SettingKey_LastUpdateCheckTime]