Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
def _report(self):
delta = self.end - self.start
timeString = humanize.time.naturaldelta(delta)
print("Finished in {} ({} seconds)".format(timeString, delta))
click.secho(str(e), err=True, fg="red")
else:
self.echo_via_pager("\n".join(output))
except KeyboardInterrupt:
pass
if self.pgspecial.timing_enabled:
# Only add humanized time display if > 1 second
if query.total_time > 1:
print(
"Time: %0.03fs (%s), executed in: %0.03fs (%s)"
% (
query.total_time,
humanize.time.naturaldelta(query.total_time),
query.execution_time,
humanize.time.naturaldelta(query.execution_time),
)
)
else:
print("Time: %0.03fs" % query.total_time)
# Check if we need to update completions, in order of most
# to least drastic changes
if query.db_changed:
with self._completer_lock:
self.completer.reset_completions()
self.refresh_completions(persist_priorities="keywords")
elif query.meta_changed:
self.refresh_completions(persist_priorities="all")
elif query.path_changed:
logger.debug("Refreshing search path")
with self._completer_lock:
with open(self.output_file, 'a', encoding='utf-8') as f:
click.echo(text, file=f)
click.echo('\n'.join(output), file=f)
click.echo('', file=f) # extra newline
except IOError as e:
click.secho(str(e), err=True, fg='red')
else:
click.echo_via_pager('\n'.join(output))
except KeyboardInterrupt:
pass
if self.pgspecial.timing_enabled:
# Only add humanized time display if > 1 second
if query.total_time > 1:
print('Time: %0.03fs (%s)' % (query.total_time,
humanize.time.naturaldelta(query.total_time)))
else:
print('Time: %0.03fs' % query.total_time)
# Check if we need to update completions, in order of most
# to least drastic changes
if query.db_changed:
with self._completer_lock:
self.completer.reset_completions()
self.refresh_completions(persist_priorities='keywords')
elif query.meta_changed:
self.refresh_completions(persist_priorities='all')
elif query.path_changed:
logger.debug('Refreshing search path')
with self._completer_lock:
self.completer.set_search_path(
self.pgexecute.search_path())
click.echo("", file=f) # extra newline
except IOError as e:
click.secho(str(e), err=True, fg="red")
else:
self.echo_via_pager("\n".join(output))
except KeyboardInterrupt:
pass
if self.pgspecial.timing_enabled:
# Only add humanized time display if > 1 second
if query.total_time > 1:
print(
"Time: %0.03fs (%s), executed in: %0.03fs (%s)"
% (
query.total_time,
humanize.time.naturaldelta(query.total_time),
query.execution_time,
humanize.time.naturaldelta(query.execution_time),
)
)
else:
print("Time: %0.03fs" % query.total_time)
# Check if we need to update completions, in order of most
# to least drastic changes
if query.db_changed:
with self._completer_lock:
self.completer.reset_completions()
self.refresh_completions(persist_priorities="keywords")
elif query.meta_changed:
self.refresh_completions(persist_priorities="all")
elif query.path_changed:
def slang_date(self, locale="en"):
""""Returns human slang representation of date.
Keyword Arguments:
locale -- locale to translate to, e.g. 'fr' for french.
(default: 'en' - English)
"""
dt = pendulum.instance(self.datetime())
try:
return _translate(dt, locale)
except KeyError:
pass
delta = humanize.time.abs_timedelta(
timedelta(seconds=(self.epoch - now().epoch))
)
format_string = "DD MMM"
if delta.days >= 365:
format_string += " YYYY"
return dt.format(format_string, locale=locale).title()
# Issue where Ctrl+C propagates to sql tools service process and kills it,
# so that query/cancel request can't be sent.
# Right now the sql_tools_service process is killed and we restart
# it with a new connection.
click.secho(u'Cancelling query...', err=True, fg='red')
self.reset()
logger.debug("cancelled query, sql: %r", text)
click.secho("Query cancelled.", err=True, fg='red')
except NotImplementedError:
click.secho('Not Yet Implemented.', fg="yellow")
else:
if query.total_time > 1:
# pylint: disable=no-member
print('Time: %0.03fs (%s)' % (query.total_time,
humanize.time.naturaldelta(query.total_time)))
else:
print('Time: %0.03fs' % query.total_time)
# Check if we need to update completions, in order of most
# to least drastic changes
if query.db_changed:
with self._completer_lock:
self.completer.reset_completions()
self.refresh_completions(persist_priorities='keywords')
elif query.meta_changed:
self.refresh_completions(persist_priorities='all')
if not query.contains_secure_statement:
# Allow MssqlCompleter to learn user's preferred keywords, etc.
with self._completer_lock:
self.completer.extend_query_history(text)
logname = info.get('logname')
key = pathjoin(str(job_id), osarch, logname)
#print 'Log: ', logname
info['loglink'] = upload_file(key, pathjoin(workspace, logname))
print >>bufio, '==DONE=='
running = False
t.join()
json.dump(out, open('sample.json', 'w'))
reply = rpost('/task/commit', data=json.dumps(out))
print 'commit reply:', reply
naturaldelta = humanize.time.naturaldelta
que = Queue.Queue()
def job_manager():
while True:
job_id, reponame, tag = que.get()
try:
docker_build(job_id, reponame, tag)
except Exception as e:
output = traceback.format_exc()
rpost('/task/update', data=dict(id=job_id, status='error',
output = output))
workspace = job_workspace(job_id)
assert workspace != '/' and workspace != '/home'
sh.rm('-fr', workspace)
def main():