Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
def lock(self):
return fasteners.InterProcessLock(self.lock_file_name)
def inter_process_locked(name):
return process_lock.interprocess_locked(get_lock_file(name))
# The plugins storage dir is mounted as a writable shared directory
# between all containers and the host machine. Most mock plugins make
# use of a utility method in mock_plugins.utils.update_storage to save
# state that the test can later read.
resources = [{
'src': self.plugins_storage_dir,
'dst': '/opt/integration-plugin-storage',
'write': True
}]
# Import only for the sake of finding the module path on the file
# system
import mock_plugins
import fasteners
mock_plugins_dir = os.path.dirname(mock_plugins.__file__)
fasteners_dir = os.path.dirname(fasteners.__file__)
# All code directories will be mapped to the management worker
# virtualenv and will also be included in the custom agent package
# created in the test suite setup
code_directories = [
# Plugins import mock_plugins.utils.update_storage all over the
# place
mock_plugins_dir,
# mock_plugins.utils.update_storage makes use of the fasteners
# library
fasteners_dir
]
# All plugins under mock_plugins are mapped. These are mostly used
# as operations and workflows mapped in the different tests blueprints.
# between all containers and the host machine. Most mock plugins make
# use of a utility method in
# integration_tests_plugins.utils.update_storage to save state that the
# test can later read.
resources = [{
'src': self.plugins_storage_dir,
'dst': '/opt/integration-plugin-storage',
'write': True
}]
# Import only for the sake of finding the module path on the file
# system
import integration_tests_plugins
import fasteners
plugins_dir = os.path.dirname(integration_tests_plugins.__file__)
fasteners_dir = os.path.dirname(fasteners.__file__)
# All code directories will be mapped to the management worker
# virtualenv and will also be included in the custom agent package
# created in the test suite setup
code_directories = [
# Plugins import integration_tests_plugins.utils.update_storage
# all over the place
plugins_dir,
# integration_tests_plugins.utils.update_storage makes use of the
# fasteners library
fasteners_dir
]
# All plugins under integration_tests_plugins are mapped. These are
# mostly used as operations and workflows mapped in the different tests
print("Deletion unsuccessful")
else:
print("Download unsuccessful")
zipfileObj = zipfile.ZipFile('/local/incomingData.zip', 'r', compression = zipfile.ZIP_DEFLATED)
# We are extracting to the file to incoming_dir in container
zipfileObj.extractall(incoming_dir)
# Create outgoing_dir directory as the plugin container will output data there after processing.
if not os.path.exists(outgoing_dir):
os.makedirs(outgoing_dir)
if __name__ == "__main__":
incoming_dir = os.environ.get("INCOMING_DIR")
# The init-storage container in all the pods should acquire the lock
with fasteners.InterProcessLock("/share/.lockfile"):
# If "/share/.download-failed" exists, exit with an error code immediately
if os.path.exists("/share/.download-failed"):
print("Previous pod failed to download the data. Exiting with failure...")
exit(1)
# If there is some data in incoming_dir but "/share/.download-succeeded" doesn't exist, it is a failure case
# Exit with error code immediately
if os.path.exists(incoming_dir) and len(os.listdir(incoming_dir)) > 0 and not os.path.exists('/share/.download-succeeded'):
print("Some data was downloaded, but '/share/.download-succeeded' file doesn't exist. Exiting with failure...")
exit(1)
# Download the data if "/share/.download-succeeded" does not exist
if not os.path.exists('/share/.download-succeeded'):
try:
print("Lock acquired. Downloading data from Swift...")
getData(containerName=os.environ.get('SWIFT_KEY'), in_dir=incoming_dir, out_dir=os.environ.get('OUTGOING_DIR'))
os.mknod('/local/.download-pod')
except Exception as err:
@interprocess_locked(interprocess_lock_path)
def new(frontend_scan_id, user, session):
try:
scan = Scan.get_scan(frontend_scan_id, user.id, session)
except IrmaDatabaseResultNotFound:
scan = Scan(frontend_scan_id, user.id)
session.add(scan)
session.commit()
log.debug("scanid %s: user_id %s id %s",
frontend_scan_id, user.id, scan.id)
return scan
@interprocess_locked(interprocess_lock_path)
def is_finished(scanid):
with session_transaction() as session:
scan = Scan.load_from_ext_id(scanid, session=session)
if scan.finished() and scan.status != IrmaScanStatus.finished:
scan.set_status(IrmaScanStatus.finished)
session.commit()
# launch flush celery task on brain
log.debug("scanid: %s calling scan_flush", scan.external_id)
celery_brain.scan_flush(scan.external_id)
@interprocess_locked(interprocess_lock_path)
def is_finished(scan_id):
with session_transaction() as session:
scan = Scan.load_from_ext_id(scan_id, session)
log.debug("scan %s: is_finished %d/%d", scan_id,
scan.probes_finished, scan.probes_total)
if scan.finished() and scan.status != IrmaScanStatus.finished:
# call finished hook for each files
for file_ext in scan.files_ext:
file_ext.hook_finished()
scan.set_status(IrmaScanStatus.finished)
session.commit()
# launch flush celery task on brain
log.debug("scan %s: calling scan_flush", scan.external_id)
celery_brain.scan_flush(scan.external_id)
def _submit(self, report, test_cases):
# prepare data for submission as CrashInfo
crash_info = self.create_crash_info(report, self.target_binary)
# search for a cached signature match and if the signature
# is already in the cache and marked as frequent, don't bother submitting
with fasteners.process_lock.InterProcessLock(os.path.join(tempfile.gettempdir(), "fm_sigcache.lock")):
collector = Collector()
cache_sig_file, cache_metadata = collector.search(crash_info)
if cache_metadata is not None:
if cache_metadata["frequent"]:
log.info("Frequent crash matched existing signature: %s",
cache_metadata["shortDescription"])
if not self.force_report:
return
elif "bug__id" in cache_metadata:
log.info("Crash matched existing signature (bug %s): %s",
cache_metadata["bug__id"],
cache_metadata["shortDescription"])
# we will still report this one, but no more
cache_metadata["frequent"] = True
# there is already a signature, initialize count
cache_metadata.setdefault("_grizzly_seen_count", 0)
class Git(git.cmd.Git):
"""Prevents asking for password for private repos"""
env = {'GIT_ASKPASS': 'echo'}
def __getattr__(self, item):
def wrapper(*args, **kwargs):
env = kwargs.pop('env', {})
env.update(self.env)
return super(Git, self).__getattr__(item)(*args, env=env, **kwargs)
return wrapper
class MsmProcessLock(InterProcessLock):
def __init__(self):
lock_path = '/tmp/msm_lock'
if not exists(lock_path):
lock_file = open(lock_path, '+w')
lock_file.close()
chmod(lock_path, 0o777)
super().__init__(lock_path)
# The cached_property class defined below was copied from the
# PythonDecoratorLibrary at:
# https://wiki.python.org/moin/PythonDecoratorLibrary/#Cached_Properties
#
# Β© 2011 Christopher Arndt, MIT License
#
class cached_property(object):
"""Decorator for read-only properties evaluated only once within TTL period.