Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
rfile = remotefile
self.__remote_json = {}
self.pd("Downloading '{}' as '{}'".format(rfile, localfile))
self.__current_file = localfile
#if self.__verify or self.__resumedownload:
self.pd("Getting info of remote file '{}' for later verification".format(rfile))
result = self.__get_file_info(rfile)
if result != const.ENoError:
return result
offset = 0
self.pd("Checking if we already have the copy locally")
if os.path.isfile(localfile):
self.pd("Same-name local file '{}' exists, checking if contents match".format(localfile))
self.__current_file_size = getfilesize(self.__current_file)
if const.ENoError == self.__verify_current_file(self.__remote_json, False) \
and not (self.__downloader[:5] == const.DownloaderAria2 and os.path.exists(localfile + '.aria2')):
self.pd("Same local file '{}' already exists, skip downloading".format(localfile))
self.__remove_remote_on_success(remotefile)
return const.ENoError
else:
if not self.shalloverwrite("Same-name locale file '{}' exists but is different, "
"do you want to overwrite it? [y/N]".format(localfile)):
pinfo("Same-name local file '{}' exists but is different, skip downloading".format(localfile))
#return const.ENoError
return const.ESkipped
if self.__resumedownload and \
self.__compare_size(self.__current_file_size, self.__remote_json) == 2:
if self.__resumedl_revertcount < 0:
if self.__current_file_size:
f.seek(offset)
rsize = self.__remote_json['size']
start_time = time.time()
for chunk in r.iter_content(chunk_size = self.__dl_chunk_size):
if chunk: # filter out keep-alive new chunks
f.write(chunk)
f.flush()
pprgr(f.tell(), rsize, start_time)
# https://stackoverflow.com/questions/7127075/what-exactly-the-pythons-file-flush-is-doing
#os.fsync(f.fileno())
# No exception above, then everything goes fine
result = const.ENoError
if self.__verify:
self.__current_file_size = getfilesize(self.__current_file)
result = self.__verify_current_file(self.__remote_json, False)
if result == const.ENoError:
self.pv("'{}' <== '{}' OK".format(self.__current_file, rfile))
else:
perr("'{}' <== '{}' FAILED".format(self.__current_file, rfile))
return result
def __store(self, info, path, value):
cached.dirty = True
info['size'] = getfilesize(path)
info['mtime'] = getfilemtime_int(path)
info[self.f.__name__] = value
if cached.debug:
situation = "Storing cache"
if cached.usecache:
situation = "Cache miss"
pdbg((situation + " for file '{}',\n{}: {}\nsize: {}\nmtime: {}").format(
path, self.f.__name__,
value,
info['size'], info['mtime']))
# periodically save to prevent loss in case of system crash
now = time.time()
if now - gvar.last_cache_save >= const.CacheSavePeriodInSec:
if cached.debug:
pdbg("Periodically saving Hash Cash")
def __share_local_file(self, lpath, rpath, fast):
filesize = getfilesize(lpath)
if filesize < const.MinRapidUploadFileSize:
perr("File size ({}) of '{}' is too small (must be greater or equal than {}) to be shared".format(
human_size(filesize), lpath, human_size(const.MinRapidUploadFileSize)))
return const.EParameter
if fast:
self.__get_hashes_for_rapidupload(lpath, setlocalfile = True)
pr(self.__get_accept_cmd(rpath))
return const.ENoError
ulrpath = const.RemoteTempDir + '/' + posixpath.basename(lpath)
result = self.__upload_file(lpath, ulrpath)
if result != const.ENoError:
perr("Unable to share as uploading failed")
return result
(dirpath, dirnames, filenames) = walk
rdir = os.path.relpath(dirpath, localpath)
if rdir == '.':
rdir = ''
else:
rdir = rdir.replace('\\', '/')
rdir = (remotepath + '/' + rdir).rstrip('/') # '/' bites
result = const.ENoError
for name in filenames:
#lfile = os.path.join(dirpath, name)
lfile = joinpath(dirpath, name)
self.__current_file = lfile
self.__current_file_size = getfilesize(lfile)
rfile = rdir + '/' + name.replace('\\', '/')
# if the corresponding file matches at Baidu Yun, then don't upload
upload = True
self.__isrev = False
self.__remote_json = {}
subresult = self.__get_file_info(rfile, dumpex = False)
if subresult == const.ENoError: # same-name remote file exists
self.__isrev = True
if const.ENoError == self.__verify_current_file(self.__remote_json, False):
# the two files are the same
upload = False
self.pv("Remote file '{}' already exists, skip uploading".format(rfile))
else: # the two files are different
if not self.shalloverwrite("Remote file '{}' exists but is different, "
"do you want to overwrite it? [y/N]".format(rfile)):
upload = False
# #pars['ru'] = rangemagic
subresult = self.__get(dpcsurl + 'file', pars,
self.__downchunks_act, (rfile, offset, rsize, start_time), headers = headers, cookies = self.__cookies)
if subresult != const.ENoError:
return subresult
if nextoffset < rsize:
offset += self.__dl_chunk_size
else:
break
# No exception above, then everything goes fine
result = const.ENoError
if self.__verify:
self.__current_file_size = getfilesize(self.__current_file)
result = self.__verify_current_file(self.__remote_json, False)
if result == const.ENoError:
self.pv("'{}' <== '{}' OK".format(self.__current_file, rfile))
else:
perr("'{}' <== '{}' FAILED".format(self.__current_file, rfile))
return result
dl_args = ''
if not args.downloader_args:
if const.DownloaderArgsEnvKey in os.environ:
dl_args = os.environ[const.DownloaderArgsEnvKey]
else:
prefixlen = len(const.DownloaderArgsIsFilePrefix)
if args.downloader_args[:prefixlen] == const.DownloaderArgsIsFilePrefix: # file
with io.open(args.downloader_args[prefixlen:], 'r', encoding = 'utf-8') as f:
dl_args = f.read().strip()
else:
dl_args = args.downloader_args
# house-keeping reminder
# TODO: may need to move into ByPy for customized config dir
if os.path.exists(const.HashCachePath):
cachesize = getfilesize(const.HashCachePath)
if cachesize > 10 * const.OneM or cachesize == -1:
pwarn((
"*** WARNING ***\n"
"Hash Cache file '{0}' is very large ({1}).\n"
"This may affect program's performance (high memory consumption).\n"
"You can first try to run 'bypy.py cleancache' to slim the file.\n"
"But if the file size won't reduce (this warning persists),"
" you may consider deleting / moving the Hash Cache file '{0}'\n"
"*** WARNING ***\n\n\n").format(const.HashCachePath, human_size(cachesize)))
# check for situations that require no ByPy object creation first
if args.clean >= 1:
return clean_prog_files(args.clean, args.verbose, args.configdir)
# some arguments need some processing
try:
def __get_hashes_for_rapidupload(self, lpath, setlocalfile = False):
if setlocalfile:
self.__current_file = lpath
self.__current_file_size = getfilesize(lpath)
self.__current_file_md5 = md5(self.__current_file)
self.__current_file_slice_md5 = slice_md5(self.__current_file)
self.__current_file_crc32 = crc32(self.__current_file)
def __upload_file(self, localpath, remotepath, ondup = 'overwrite'):
# TODO: this is a quick patch
if not self.__shallinclude(localpath, remotepath, True):
# since we are not going to upload it, there is no error
#return const.ENoError
return const.ESkipped
self.__current_file = localpath
self.__current_file_size = getfilesize(localpath)
result = const.ENoError
if self.__current_file_size > const.MinRapidUploadFileSize:
self.pd("'{}' is being RapidUploaded.".format(self.__current_file))
result = self.__rapidupload_file(localpath, remotepath, ondup)
if result == const.ENoError:
self.pv("RapidUpload: '{}' =R=> '{}' OK.".format(localpath, remotepath))
self.__rapiduploaded = True
else:
self.__rapiduploaded = False
if not self.__rapiduploadonly:
self.pd("'{}' can't be RapidUploaded, now trying normal uploading.".format(
self.__current_file))
# rapid upload failed, we have to upload manually
if self.__current_file_size <= self.__slice_size:
self.pd("'{}' is being non-slicing uploaded.".format(self.__current_file))