Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
allowed_max_job_name_length=allowed_max_job_name_length,
state=(job.get("state") or "unknown"),
start_time=f"{str(start_time)} ({humanize.naturaltime(start_time)})",
dashboard_url=PaastaColors.grey(f"{dashboard_url}/#/jobs/{job_id}"),
)
)
if verbose and job_id in status.exceptions:
exceptions = status.exceptions[job_id]
root_exception = exceptions["root-exception"]
if root_exception is not None:
output.append(f" Exception: {root_exception}")
ts = exceptions["timestamp"]
if ts is not None:
exc_ts = datetime.fromtimestamp(int(ts) // 1000)
output.append(
f" {str(exc_ts)} ({humanize.naturaltime(exc_ts)})"
)
if verbose and len(status.pod_status) > 0:
output.append(f" Pods:")
rows: List[Union[str, Tuple[str, str, str]]] = [("Pod Name", "Host", "Phase")]
for pod in status.pod_status:
rows.append((pod["name"], pod["host"], pod["phase"]))
if pod["reason"] != "":
rows.append(PaastaColors.grey(f" {pod['reason']}: {pod['message']}"))
pods_table = format_table(rows)
output.extend([f" {line}" for line in pods_table])
return 0
start_time=f"{str(start_time)} ({humanize.naturaltime(start_time)})",
dashboard_url=PaastaColors.grey(f"{dashboard_url}/#/jobs/{job_id}"),
)
)
if verbose and job_id in status.exceptions:
exceptions = status.exceptions[job_id]
root_exception = exceptions["root-exception"]
if root_exception is not None:
output.append(f" Exception: {root_exception}")
ts = exceptions["timestamp"]
if ts is not None:
exc_ts = datetime_from_utc_to_local(
datetime.utcfromtimestamp(int(ts) // 1000)
)
output.append(
f" {str(exc_ts)} ({humanize.naturaltime(exc_ts)})"
)
return 0
)
for job in unique_jobs:
job_id = job["jid"]
if verbose:
fmt = """ {job_name: <{allowed_max_job_name_length}.{allowed_max_job_name_length}} {state: <11} {job_id} {start_time}
{dashboard_url}"""
else:
fmt = " {job_name: <{allowed_max_job_name_length}.{allowed_max_job_name_length}} {state: <11} {start_time}"
start_time = datetime.fromtimestamp(int(job["start-time"]) // 1000)
output.append(
fmt.format(
job_id=job_id,
job_name=get_flink_job_name(job),
allowed_max_job_name_length=allowed_max_job_name_length,
state=(job.get("state") or "unknown"),
start_time=f"{str(start_time)} ({humanize.naturaltime(start_time)})",
dashboard_url=PaastaColors.grey(f"{dashboard_url}/#/jobs/{job_id}"),
)
)
if verbose and job_id in status.exceptions:
exceptions = status.exceptions[job_id]
root_exception = exceptions["root-exception"]
if root_exception is not None:
output.append(f" Exception: {root_exception}")
ts = exceptions["timestamp"]
if ts is not None:
exc_ts = datetime.fromtimestamp(int(ts) // 1000)
output.append(
f" {str(exc_ts)} ({humanize.naturaltime(exc_ts)})"
)
if verbose and len(status.pod_status) > 0:
output.append(f" Pods:")
elif backend.status == "DOWN":
status = PaastaColors.red(backend.status)
elif backend.status == "MAINT":
status = PaastaColors.grey(backend.status)
else:
status = PaastaColors.yellow(backend.status)
if backend.check_duration is None:
check_duration = ""
else:
check_duration = str(backend.check_duration)
row = (
f"{backend.hostname}:{backend.port}",
f"{backend.check_status}/{backend.check_code} in {check_duration}ms",
humanize.naturaltime(timedelta(seconds=backend.last_change)),
status,
)
if not backend.has_associated_task:
row = tuple(
PaastaColors.grey(remove_ansi_escape_sequences(col)) for col in row
)
rows.append(row)
return format_table(rows)
if parent.msg_speedo_interval > now-parent.msg_speedo_last :
return True
lag=now-msgtime
logger.info("speedo: %3d messages received: %5.2g msg/s, %s bytes/s, lag: %4.2g s" % (
parent.msg_speedo_msgcount,
parent.msg_speedo_msgcount/(now-parent.msg_speedo_last),
humanize.naturalsize(parent.msg_speedo_bytecount/(now-parent.msg_speedo_last),binary=True,gnu=True),
lag))
# Set the maximum age, in seconds, of a message to retrieve.
if lag > parent.msg_speedo_maxlag :
logger.warn("speedo: Excessive lag! Messages posted %s " %
humanize.naturaltime(datetime.timedelta(seconds=lag)))
parent.msg_speedo_last = now
parent.msg_speedo_msgcount = 0
parent.msg_speedo_bytecount = 0
return True
node_index = nv.node_index
node_name = nv.node_name
location = nv.location
# pyre-ignore
target_state = mv.sequencer_target_state.name
mnt_status = mv.get_sequencer_maintenance_status(n)
current_state = colored(
# pyre-ignore
nv.sequencer_state.state.name,
_color(mnt_status),
)
maintenance_status = colored(mnt_status.name, _color(mnt_status))
last_updated_at = mv.get_sequencer_last_updated_at(n)
if last_updated_at:
last_updated = f"{last_updated_at} ({naturaltime(last_updated_at)})"
else:
last_updated = "-"
tbl.append(
[
node_index,
node_name,
location,
target_state,
current_state,
maintenance_status,
last_updated,
]
)
return "Sequencer Maintenances:\n{}".format(
indent(tabulate(tbl, headers=headers, tablefmt="plain"), prefix=" ")
def on_guild_member_add(self, event):
created = humanize.naturaltime(datetime.utcnow() - to_datetime(event.user.id))
new = (
event.config.new_member_threshold and
(time.time() - to_unix(event.user.id)) < event.config.new_member_threshold
)
self.log_action(Actions.GUILD_MEMBER_ADD, event, new=' :new:' if new else '', created=created)
tilelive_cmd = []
if msg['type'] == 'pyramid':
tilelive_cmd = render_pyramid(msg, source, sink)
elif msg['type'] == 'list':
tilelive_cmd = render_list(msg, source, sink)
else:
raise ValueError("Message must be either of type pyramid or list")
render_timeout = int(os.getenv('RENDER_TIMEOUT', 5 * 60))
_, render_time = timing(subprocess.check_call, tilelive_cmd,
timeout=render_timeout)
print('Render MBTiles: {}'.format(naturaltime(render_time)))
_, optimize_time = timing(optimize_mbtiles, mbtiles_file)
print('Optimize MBTiles: {}'.format(naturaltime(optimize_time)))
_, upload_time = timing(upload_mbtiles, bucket, mbtiles_file)
print('Upload MBTiles : {}'.format(naturaltime(upload_time)))
download_link = s3_url(mbtiles_file)
print('Uploaded {} to {}'.format(
naturalsize(os.path.getsize(mbtiles_file)),
download_link
))
os.remove(mbtiles_file)
return create_result_message(task_id, download_link, msg)
def get_first_status_timestamp(task):
"""Gets the first status timestamp from a task id and returns a human
readable string with the local time and a humanized duration:
``2015-01-30T08:45 (an hour ago)``
"""
try:
start_time_string = task['statuses'][0]['timestamp']
start_time = datetime.datetime.fromtimestamp(float(start_time_string))
return "%s (%s)" % (start_time.strftime("%Y-%m-%dT%H:%M"), humanize.naturaltime(start_time))
except (IndexError, SlaveDoesNotExist):
return "Unknown"
for replicaset in replicasets:
local_created_datetime = datetime.fromtimestamp(replicaset.create_timestamp)
replica_status = f"{replicaset.ready_replicas}/{replicaset.replicas}"
if replicaset.ready_replicas >= replicaset.replicas:
replica_status = PaastaColors.green(replica_status)
else:
replica_status = PaastaColors.red(replica_status)
rows.append(
(
replicaset.name,
replica_status,
"{} ({})".format(
local_created_datetime.strftime("%Y-%m-%dT%H:%M"),
humanize.naturaltime(local_created_datetime),
),
)
)
return format_table(rows)