Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
"--------------------------------\n"
"Error: Your Python 'multiprocess' library is probably "
"not properly installed (missing C extensions). "
"You need to install a C compiler and Python headers before "
"installing the Python 'multiprocess' library. "
"(All these hassles could have been saved if Python's builtin "
"'multiprocessing' works properly, sigh.)\n"
"Fix for debian derivatives:\n"
"- Install gcc: # apt-get install gcc\n"
"- Install python-dev: # apt-get install python-dev\n"
"- Reinstall Python 'multiprocess' library:\n"
" # pip uninstall -y multiprocess\n"
" # pip install -v multiprocess\n"
"- If there's no errors/warnings in the above actions, "
"then this error should be gone when you run 'bypy' with '{}' again.\n"
).format(formatex(pe), const.MultiprocessOption)
perr(errmsg)
self.quit(const.EFatal)
self.__setting = {}
self.__downloader = downloader.lower().strip()
if downloader_args:
self.__downloader_args = downloader_args
else:
if downloader in const.DownloaderDefaultArgs:
self.__downloader_args = const.DownloaderDefaultArgs[downloader]
else:
self.__downloader_args = ''
if os.path.exists(self.__settingpath):
try:
self.__setting = jsonload(self.__settingpath)
except Exception as ex:
perr("Error loading settings: {}, using default settings".format(formatex(ex)))
self.__hashcachepath = configdir + os.sep + const.HashCacheFileName
cached.hashcachepath = self.__hashcachepath
self.__certspath = os.path.join(os.path.dirname(__file__), const.ByPyCertsFileName)
self.__requester = requester
self.__apikey = apikey
self.__secretkey = secretkey
self.__use_server_auth = not secretkey
self.__slice_size = slice_size
self.__dl_chunk_size = dl_chunk_size
self.__verify = verify
self.__retry = retry
self.__quit_when_fail = quit_when_fail
self.__timeout = timeout
self.__resumedownload = resumedownload
def __store_json(self, r):
j = {}
try:
j = r.json()
except Exception as ex:
perr("Failed to decode JSON:\n{}".format(formatex(ex)))
perr("Error response:\n{}".format(r.text))
return self.__prompt_clean()
return self.__store_json_only(j)
def __dump_exception(self, ex, url, pars, r, act):
if self.debug or self.verbose:
perr("Error accessing '{}'".format(url))
if self.debug:
perr(formatex(ex))
perr("Function: {}".format(act.__name__))
perr("Website parameters: {}".format(pars))
if r != None:
# just playing it safe
if hasattr(r, 'url'):
perr("Full URL: {}".format(r.url))
if hasattr(r, 'status_code') and hasattr(r, 'text'):
perr("HTTP Response Status Code: {}".format(r.status_code))
if (r.status_code != 200 and r.status_code != 206) \
or (not ('method' in pars and pars['method'] == 'download') \
and url.find('method=download') == -1 \
and url.find('baidupcs.com/file/') == -1):
self.__print_error_json(r)
perr("Website returned: {}".format(rb(r.text)))
def __get_file_info_act(self, r, args):
try:
remotefile = args
j = r.json()
self.jsonq.append(j)
self.pd("List json: {}".format(j))
l = j['list']
for f in l:
if f['path'] == remotefile: # case-sensitive
self.__remote_json = f
self.pd("File info json: {}".format(self.__remote_json))
return const.ENoError
return const.EFileNotFound
except KeyError as ex:
perr(formatex(ex))
return const.ERequestFailed
result = self.__share_local(lpath, rpath, fast)
if not fast:
# not critical
self.__delete(const.RemoteTempDir)
return result
else:
rpath = get_pcs_path(path)
srpath = get_pcs_path(sharepath)
tmpdir = tempfile.mkdtemp(prefix = 'bypy_')
self.pd("Using local temporary directory '{}' for sharing".format(tmpdir))
try:
result = self.__share_remote(tmpdir, rpath, srpath, fast)
except Exception as ex:
result = const.EFatal
perr("Exception while sharing remote path '{}'.\n{}".format(
rpath, formatex(ex)))
finally:
removedir(tmpdir)
return result
def __cdl_addmon_act(self, r, args):
try:
args[0] = r.json()
pr(pprint.pformat(args[0]))
return const.ENoError
except Exception as ex:
perr("Exception while adding offline (cloud) download task:\n{}".format(formatex(ex)))
perr("Baidu returned:\n{}".format(rb(r.text)))
return const.EInvalidJson
# pay the history debt ...
# TODO: Remove some time later when no-body uses the old bin format cache
if cached.isbincache(cached.cache):
pinfo("ONE TIME conversion for binary format Hash Cache ...")
stringifypickle(cached.cache)
pinfo("ONE TIME conversion finished")
if existingcache: # not empty
if cached.verbose:
pinfo("Merging with existing Hash Cache")
cached.mergeinto(existingcache, cached.cache)
cached.cacheloaded = True
if cached.verbose:
pr("Hash Cache File loaded.")
#except (EOFError, TypeError, ValueError, UnicodeDecodeError) as ex:
except Exception as ex:
perr("Fail to load the Hash Cache, no caching.\n{}".format(formatex(ex)))
cached.cache = existingcache
else:
if cached.verbose:
pr("Hash Cache File '{}' not found, no caching".format(cached.hashcachepath))
else:
if cached.verbose:
pr("Not loading Hash Cache since 'cacheloaded' is '{}'".format(cached.cacheloaded))
return cached.cacheloaded
def __store_json_only(self, j):
self.__json = j
self.__access_token = self.__json['access_token']
self.pd("access token: " + self.__access_token)
self.pd("Authorize JSON:")
self.pd(self.__json)
tokenmode = 0o600
try:
jsondump(self.__json, self.__tokenpath)
os.chmod(self.__tokenpath, tokenmode)
return const.ENoError
except Exception as ex:
perr("Exception occured while trying to store access token:\n{}".format(
formatex(ex)))
return const.EFileWrite