Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
"Error: Your Python 'multiprocess' library is probably "
"not properly installed (missing C extensions). "
"You need to install a C compiler and Python headers before "
"installing the Python 'multiprocess' library. "
"(All these hassles could have been saved if Python's builtin "
"'multiprocessing' works properly, sigh.)\n"
"Fix for debian derivatives:\n"
"- Install gcc: # apt-get install gcc\n"
"- Install python-dev: # apt-get install python-dev\n"
"- Reinstall Python 'multiprocess' library:\n"
" # pip uninstall -y multiprocess\n"
" # pip install -v multiprocess\n"
"- If there's no errors/warnings in the above actions, "
"then this error should be gone when you run 'bypy' with '{}' again.\n"
).format(formatex(pe), const.MultiprocessOption)
perr(errmsg)
self.quit(const.EFatal)
#return const.ENoError
return const.ESkipped
self.pv("Directory with the same name '{}' exists, removing ...".format(localfile))
result = removedir(localfile, self.verbose)
if result == const.ENoError:
self.pv("Removed")
else:
perr("Error removing the directory '{}'".format(localfile))
return result
ldir, file = os.path.split(localfile)
if ldir and not os.path.exists(ldir):
result = makedir(ldir, verbose = self.verbose)
if result != const.ENoError:
perr("Fail to make directory '{}'".format(ldir))
return result
if self.__downloader[:5] == const.DownloaderAria2:
result = self.__down_aria2c(rfile, localfile)
else:
result = self.__downchunks(rfile, offset)
if result == const.ENoError:
self.__remove_remote_on_success(remotefile)
return result
def __request_work_die(self, ex, url, pars, r, act):
result = const.EFatal
self.__dump_exception(ex, url, pars, r, act)
perr("Fatal Exception, no way to continue.\nQuitting...\n")
perr("If the error is reproducible, run the program with `-dv` arguments again to get more info.\n")
self.quit(result)
# we eat the exception, and use return code as the only
def __restore_search_act(self, r, args):
path = args
flist = r.json()['list']
fsid = None
for f in flist:
if os.path.normpath(f['path'].lower()) == os.path.normpath(path.lower()):
fsid = f['fs_id']
self.pd("fs_id for restoring '{}' found".format(fsid))
break
if fsid:
pars = {
'method' : 'restore',
'fs_id' : fsid }
return self.__post(pcsurl + 'file', pars, self.__restore_act, path)
else:
perr("'{}' not found in the recycle bin".format(path))
self.__rapiduploaded = False
if not self.__rapiduploadonly:
self.pd("'{}' can't be RapidUploaded, now trying normal uploading.".format(
self.__current_file))
# rapid upload failed, we have to upload manually
if self.__current_file_size <= self.__slice_size:
self.pd("'{}' is being non-slicing uploaded.".format(self.__current_file))
# no-slicing upload
result = self.__upload_one_file(localpath, remotepath, ondup)
elif self.__current_file_size <= const.MaxSliceSize * const.MaxSlicePieces:
# slice them using slice size
self.pd("'{}' is being slicing uploaded.".format(self.__current_file))
result = self.__upload_file_slices(localpath, remotepath, ondup)
else:
result = const.EFileTooBig
perr("Error: size of file '{}' - {} is too big".format(
self.__current_file,
self.__current_file_size))
else:
self.pv("'{}' can't be rapidly uploaded, so it's skipped since we are in the rapid-upload-only mode.".format(localpath))
result = const.ESkipped
elif self.__rapiduploadonly:
self.pv("'{}' is too small to be rapidly uploaded, so it's skipped since we are in the rapid-upload-only mode.".format(localpath))
result = const.ESkipped
else:
# very small file, must be uploaded manually and no slicing is needed
self.pd("'{}' is small and being non-slicing uploaded.".format(self.__current_file))
result = self.__upload_one_file(localpath, remotepath, ondup)
if result == const.ENoError:
self.__remove_local_on_success(localpath)
return result
def __remove_remote_on_success(self, remotepath):
if self.__deletesource:
self.pd("Removing remote path '{}' after successful download.".format(remotepath))
result = self.__delete(remotepath)
if result == const.ENoError:
self.pd("Remote path '{}' removed.".format(remotepath))
else:
perr("Failed to remove remote path '{}'.".format(remotepath))
return result
prgrpath = os.path.join(configdir, const.ProgressFileName)
subresult = removefile(prgrpath, verbose)
if result == const.ENoError:
pr("Progress file '{}' removed, "
"any upload will be started from the beginning.".format(prgrpath))
else:
perr("Failed to remove the progress file '{}'".format(prgrpath))
result = subresult
if cleanlevel >= 2:
cachepath = os.path.join(configdir, const.HashCacheFileName)
subresult = os.remove(cachepath)
if subresult == const.ENoError:
pr("Hash Cache File '{}' removed.".format(cachepath))
else:
perr("Failed to remove the Hash Cache File '{}'".format(cachepath))
perr("You need to remove it manually")
result = subresult
return result
tokenpath = os.path.join(configdir, const.TokenFileName)
result = removefile(tokenpath, verbose)
if result == const.ENoError:
pr("Token file '{}' removed. You need to re-authorize "
"the application upon next run".format(tokenpath))
else:
perr("Failed to remove the token file '{}'".format(tokenpath))
perr("You need to remove it manually")
prgrpath = os.path.join(configdir, const.ProgressFileName)
subresult = removefile(prgrpath, verbose)
if result == const.ENoError:
pr("Progress file '{}' removed, "
"any upload will be started from the beginning.".format(prgrpath))
else:
perr("Failed to remove the progress file '{}'".format(prgrpath))
result = subresult
if cleanlevel >= 2:
cachepath = os.path.join(configdir, const.HashCacheFileName)
subresult = os.remove(cachepath)
if subresult == const.ENoError:
pr("Hash Cache File '{}' removed.".format(cachepath))
else:
perr("Failed to remove the Hash Cache File '{}'".format(cachepath))
perr("You need to remove it manually")
result = subresult
return result
def __verify_current_file(self, j, gotlmd5):
# if we really don't want to verify
if self.__current_file == '/dev/null' and not self.__verify:
return const.ENoError
rsize = 0
rmd5 = 0
# always perform size check even __verify is False
if 'size' in j:
rsize = j['size']
else:
perr("Unable to verify JSON: '{}', as no 'size' entry found".format(j))
return const.EHashMismatch
if 'md5' in j:
rmd5 = j['md5']
#elif 'block_list' in j and len(j['block_list']) > 0:
# rmd5 = j['block_list'][0]
#else:
# # quick hack for meta's 'block_list' field
# pwarn("No 'md5' nor 'block_list' found in json:\n{}".format(j))
# pwarn("Assuming MD5s match, checking size ONLY.")
# rmd5 = self.__current_file_md5
else:
perr("Unable to verify JSON: '{}', as no 'md5' entry found".format(j))
return const.EHashMismatch
self.pd("Comparing local file '{}' and remote file '{}'".format(