Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
def _cached_json_results(results_path, results_factory=None):
"""
Read results from results_path if it exists;
otherwise, produce them with results_factory,
and write them to results_path.
"""
with FileLock(results_path + '.lock'):
try:
with open(results_path, mode='r') as results_f:
results = json.load(results_f)
except FileNotFoundError:
if not results_factory:
raise
results = results_factory()
with open(results_path, mode='w') as results_f:
json.dump(results, results_f)
return results
if sys.platform == 'darwin' and windowed:
app_run_command = './{}.app/Contents/MacOS/{}'.format(app_name,
app_name)
app_name = '{}.app'.format(app_name)
if custom_dir:
# update with custom_dir is multiprocessing-safe
lock_path = 'pyu.lock'
else:
if not os.path.exists(appdirs.user_data_dir(APP_NAME)):
os.makedirs(appdirs.user_data_dir(APP_NAME))
lock_path = os.path.join(appdirs.user_data_dir(APP_NAME),
'pyu.lock')
update_lock = filelock.FileLock(lock_path, LOCK_TIMEOUT)
version_file = 'version2.txt'
with update_lock.acquire(LOCK_TIMEOUT, 5):
count = 0
while count < 5:
# Call the binary to self update
subprocess.call(app_run_command, shell=True)
if os.path.exists(version_file):
break
count += 1
print("Retrying app launch!")
# Allow enough time for update process to complete.
time.sleep(AUTO_UPDATE_PAUSE)
simpleserver.stop()
# Detect if it was an overwrite error
def test_try_acquire_locks(testing_workdir):
# Acquiring two unlocked locks should succeed.
lock1 = filelock.FileLock(os.path.join(testing_workdir, 'lock1'))
lock2 = filelock.FileLock(os.path.join(testing_workdir, 'lock2'))
with utils.try_acquire_locks([lock1, lock2], timeout=1):
pass
# Acquiring the same lock twice should fail.
lock1_copy = filelock.FileLock(os.path.join(testing_workdir, 'lock1'))
# Also verify that the error message contains the word "lock", since we rely
# on this elsewhere.
with pytest.raises(BuildLockError, match='Failed to acquire all locks'):
with utils.try_acquire_locks([lock1, lock1_copy], timeout=1):
pass
def add(tool, mails, extra, result, ts=lmdutils.get_timestamp("now")):
check(Email.__tablename__)
with FileLock(lock_path):
tool = Tool.get_or_create(tool)
for mail in mails:
session.add(Email(tool, ts, mail, extra, result))
session.commit()
def __init__(self, path, branch=None, component=None, local=False):
self.path = path
if branch is None:
self.branch = self.default_branch
else:
self.branch = branch
self.component = component
self.last_output = ''
self.lock = FileLock(
self.path.rstrip('/').rstrip('\\') + '.lock',
timeout=120
)
if not local:
# Create ssh wrapper for possible use
create_ssh_wrapper()
if not self.is_valid():
self.init()
def save(self, out, timeout):
filepath = os.path.join(out, self.filename)
lockpath = filepath + '.lock'
try:
with filelock.FileLock(lockpath, timeout=timeout):
saved_assets_list = []
if os.path.isfile(filepath):
with open(filepath) as f:
saved_assets_list = json.load(f)
saved_assets_list.extend(self.cache[self.saved_idx:])
with open(filepath, 'w') as f:
json.dump(saved_assets_list, f, indent=4)
self.saved_idx = len(self.cache)
except filelock.Timeout:
logger.error('Process to write a list of assets is timeout')
parser.add_option("-m", "--max",
action="store", dest="max", type="int",
help="Maximum number of videos to process", default=1)
parser.add_option("-d", "--dryrun",
action="store_true", dest="dryrun",
help="Don't start new zencoder jobs or upload to gcs",
default=False)
options, args = parser.parse_args()
util.setup_logging(options.nolog)
# Make sure only one youtube-export converter is running at a time.
with filelock.FileLock("export.lock", timeout=2):
(success, error_ids) = YouTubeExporter.convert_missing_downloads(
options.max, options.dryrun)
if error_ids:
msg = ('Skipped %d youtube-ids due to errors:\n%s\n'
% (len(error_ids), '\n'.join(sorted(error_ids))))
logger.warning(msg)
# Make this part of the stdout output as well, so it gets passed
# from cron to our email.
print msg
return (success, len(error_ids))
def serialize_keras_model(model):
lock = FileLock('temp_model.h5.lock')
with lock:
model.save('temp_model.h5')
with open('temp_model.h5', 'rb') as f:
model_bin = f.read()
f.close()
return model_bin
def _get_file_lock(user):
file_name = '{}.lock'.format(slugify(user))
file_name = os.path.join(get_cache_dir(), file_name)
return filelock.FileLock(file_name)
Argument:
path: none | string
"""
# Check argument.
if path is None:
path = self.path
assert path is not None, "Missing argument: path"
# Normalize path.
path = os.path.expanduser(path)
path = os.path.abspath(path)
# Load overlaps.
lock = filelock.FileLock(path + ".lock")
with lock:
with open(path, mode='rb') as file_:
self.overlaps = pickle.load(file_)
return