Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
try:
app.submit(resubmit=True)
print("Successfully re-submitted %s; use the 'gstat' command"
" to monitor its progress." % app)
self.session.store.replace(jobid, app)
except Exception, ex:
failed += 1
self.log.error("Failed resubmission of job '%s': %s: %s",
jobid, ex.__class__.__name__, str(ex))
# exit code is practically limited to 7 bits ...
return min(failed, 126)
class cmd_gstat(_BaseCmd):
"""
Print job state.
"""
verbose_logging_threshold = 1
def setup_options(self):
self.add_param("-l", "--state",
action="store",
dest="states",
metavar="STATE",
default=None,
help="Only report about jobs in the given state."
" Multiple states are allowed: separate them with"
" commas.")
self.add_param("-L", "--lifetimes", "--print-lifetimes",
nargs='?',
output_if_exists('auth', "Authorization name")
output_if_exists('updated', "Accessible?")
output_if_exists('ncores', "Total number of cores")
output_if_exists('queued', "Total queued jobs")
output_if_exists('user_queued', "Own queued jobs")
output_if_exists('user_run', "Own running jobs")
# output_if_exists('free_slots', "Free job slots")
output_if_exists('max_cores_per_job', "Max cores per job")
output_if_exists('max_memory_per_core', "Max memory per core")
output_if_exists('max_walltime', "Max walltime per job")
output_if_exists('applications', "Supported applications")
print(table)
print('')
class cmd_gsession(_BaseCmd):
"""
`gsession` get info on a session.
Usage:
gsession `command` [options] SESSION_DIR
commands are listed below, under `subcommands`.
To get detailed info on a specific command, run:
gsession `command` --help
"""
# Setup methods
if submission_time is None:
# Jobs run by the ShellCmd backend transition directly to
# RUNNING; use that timestamp if available.
#
# Since jobs in NEW state will not have any timestamp
# at all, set the default to 0.0.
submission_time = job.execution.timestamp.get(
Run.State.RUNNING, 0.0)
if (submission_time <= end
and submission_time >= start):
matching_jobs.append(job)
return matching_jobs
class cmd_gcloud(_BaseCmd):
"""
`gcloud` manage VMs created by the EC2 backend
Usage:
gcloud `command` [options]
commands are listed below, under `subcommands`.
To get detailed info on a specific command, run:
gcloud `command` --help
"""
def _add_subcmd(self, name, func, help=None):
subparser = self.subparsers.add_parser(name, help=help)
self.log.error(
"Task '%s' (of class '%s') has no defined output/error"
" streams. Ignoring.",
app.persistent_id,
app.__class__.__name__)
failed += 1
except Exception, ex:
print("Failed while reading content of %s for job '%s': %s"
% (stream, jobid, str(ex)))
failed += 1
# exit code is practically limited to 7 bits ...
return min(failed, 126)
class cmd_gservers(_BaseCmd):
"""
List status of computational resources.
"""
def setup_options(self):
self.add_param("-n", "--no-update", action="store_false",
dest="update", default=True,
help="Do not update resource statuses;"
" only print what's in the local database.")
self.add_param(
"-p", "--print", action="store", dest="keys",
metavar="LIST", default=None,
help="Only print resource attributes whose name appears in"
" this comma-separated list. (Attribute name is as given in"
" the configuration file, or listed in the middle column"
" in `gservers` output.)")
self.params.lifetimes, delimiter='\t')
else:
lifetimes_csv = csv.writer(self.params.lifetimes)
lifetimes_csv.writerows(lifetimes_rows)
# since `_get_jobs` swallows any exception raised by invalid
# job IDs or corrupted files, let us determine the number of
# failures by counting the number of times we actually run
# this loop and then subtract from the number of times we
# *should* have run, i.e., the number of arguments we were passed.
failed = len(self.params.args) - tot
# exit code is practically limited to 7 bits ...
return min(failed, 126)
class cmd_gget(_BaseCmd):
"""
Retrieve output files of a job.
Output files can only be retrieved once a job has reached the
'RUNNING' state; this command will print an error message if
no output files are available.
Output files can be retrieved multiple times until a job reaches
'TERMINATED' state: after that, the remote storage will be
released once the output files have been fetched.
"""
def setup_options(self):
self.add_param("-A",
action="store_true",
dest="all",
default=False,
app.kill()
self.session.store.replace(jobid, app)
# or shall we simply return an ack message ?
print("Sent request to cancel job '%s'." % jobid)
except Exception, ex:
print("Failed canceling job '%s': %s" % (jobid, str(ex)))
failed += 1
continue
# exit code is practically limited to 7 bits ...
return min(failed, 126)
class cmd_gtail(_BaseCmd):
"""
Display the last lines from a job's standard output or error stream.
Optionally, keep running and displaying the last part of the file
as more lines are written to the given stream.
"""
def setup_args(self):
"""
Override `GC3UtilsScript`:class: `setup_args` method since we
don't operate on single jobs.
"""
self.add_param('args',
nargs=1,
metavar='JOBID',
help="Job ID string identifying the single job to"
" operate upon.")
else:
# if jobid is not a toplevel job Session.remove()
# will raise an error.
self.session.store.remove(jobid)
self.log.info("Removed job '%s'", jobid)
except:
failed += 1
self.log.error("Failed removing '%s' from persistency layer."
" option '-f' harmless" % jobid)
continue
# exit code is practically limited to 7 bits ...
return min(failed, 126)
class cmd_ginfo(_BaseCmd):
"""
Print detailed information about a job.
A complete dump of all the information known about jobs listed on
the command line is printed; this will only make sense if you know
GC3Libs internals.
"""
verbose_logging_threshold = 2
def setup_options(self):
self.add_param("-c", "--csv", action="store_true", dest="csv",
default=False,
help="Print attributes in CSV format,"
" e.g., for generating files that can be"
" read by a spreadsheet program."
" '%s'" % (app_download_dir,))
else:
print("A snapshot of job results was successfully"
" retrieved in '%s'" % (app_download_dir,))
except Exception, ex:
print("Failed retrieving results of job '%s': %s"
% (jobid, str(ex)))
failed += 1
continue
# exit code is practically limited to 7 bits ...
return min(failed, 126)
class cmd_gkill(_BaseCmd):
"""
Cancel a submitted job. Given a list of jobs, try to cancel each
one of them; exit with code 0 if all jobs were cancelled
successfully, and 1 if some job was not.
The command will print an error message if a job cannot be
canceled because it's in NEW or TERMINATED state, or if some other
error occurred.
"""
def setup_options(self):
self.add_param("-A", action="store_true", dest="all", default=False,
help="Remove all stored jobs. USE WITH CAUTION!")
def main(self):
try:
task_queue.append(child)
except AttributeError:
# Application class does not have a `tasks` attribute
pass
timestamps.sort(cmp=lambda x, y: cmp(x[0], y[0]))
for entry in timestamps:
print "%s %s: %s" % (
time.strftime(
"%b %d %H:%M:%S", time.localtime(entry[0])
),
str(entry[1]),
entry[2])
class cmd_gselect(_BaseCmd):
"""
Print IDs of jobs that match the specified criteria.
The criteria specified by command-line options will be
AND'ed together, i.e., a job must satisfy all of them
in order to be selected.
"""
def setup_args(self):
# No positional arguments allowed
pass
def setup_options(self):
self.add_param(
'--error-message', '--errmsg', metavar='REGEXP',
help=("Select jobs such that a line in their error output (STDERR)"
" file matches the given regular expression pattern."),
print(str(app.persistent_id))
if self.params.verbose == 0:
utils.prettyprint(app.execution, indent=4,
width=width, only_keys=only_keys)
else:
# with `-v` and above, dump the whole `Application` object
utils.prettyprint(app, indent=4, width=width,
only_keys=only_keys)
if self.params.tabular:
print(table)
failed = len(self.params.args) - ok
# exit code is practically limited to 7 bits ...
return min(failed, 126)
class cmd_gresub(_BaseCmd):
"""
Resubmit an already-submitted job with (possibly) different parameters.
If you resubmit a job that is not in terminal state, the existing job
is canceled before re-submission.
"""
def setup_options(self):
self.add_param("-r", "--resource",
action="store",
dest="resource_name",
metavar="NAME",
default=None,
help='Select execution resource by name')
self.add_param("-c", "--cores",
action="store",