Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
def on_selection_changed(self, selection, *args):
log.debug('Selected new TreeView item %s = %s', selection, args)
name, email, keyid = self.get_items_from_selection(selection)
key = self.keysDict[keyid]
self.emit('key-selection-changed', keyid)
try:
exp_date = datetime.fromtimestamp(float(key.expiry))
except TypeError as e:
# This might be the case when the key.expiry is already a timedate
exp_date = key.expiry
except ValueError as e:
# This happens when converting an empty string to a datetime.
exp_date = None
if exp_date is None:
expiry = "No expiration date"
else:
expiry = "{:%Y-%m-%d %H:%M:%S}".format(exp_date)
pane = self.right_pane
for child in pane.get_children():
# Ouch, this is not very efficient.
# But this deals with the fact that the first
monitor = xbmc.Monitor()
xbmc.log("[plugin.video.iptv.recorder] service started...", xbmc.LOGERROR)
if ADDON.getSetting('service.startup') == 'true':
time.sleep(int(ADDON.getSetting('service.delay.seconds')))
Service()
ADDON.setSetting('last.update', str(time.time()))
while not monitor.abortRequested():
if ADDON.getSetting('service.type') == '1':
interval = int(ADDON.getSetting('service.interval'))
waitTime = 3600 * interval
ts = ADDON.getSetting('last.update') or "0.0"
lastTime = datetime.datetime.fromtimestamp(float(ts))
now = datetime.datetime.now()
nextTime = lastTime + datetime.timedelta(seconds=waitTime)
td = nextTime - now
timeLeft = td.seconds + (td.days * 24 * 3600)
xbmc.log("[plugin.video.iptv.recorder] Service waiting for interval %s" % waitTime, xbmc.LOGERROR)
elif ADDON.getSetting('service.type') == '2':
next_time = ADDON.getSetting('service.time')
if next_time:
hms = next_time.split(':')
hour = hms[0]
minute = hms[1]
now = datetime.datetime.now()
next_time = now.replace(hour=int(hour),minute=int(minute),second=0,microsecond=0)
if next_time < now:
next_time = next_time + datetime.timedelta(hours=24)
def _utc_to_cli_date(utc_date):
if utc_date is None:
return None
date = datetime.datetime.fromtimestamp(utc_date)
return date.strftime(CLI_DATE_FORMAT)
break
else:
treecfg = None
if not treecfg:
log.info("No treeinfo file found; assume no distributions to import")
return
treeinfo = parse_treeinfo(treecfg)
# Handle distributions that are part of repo syncs
id = description = "ks-%s-%s-%s-%s" % (treeinfo['family'], treeinfo['variant'],
treeinfo['version'], treeinfo['arch'] or "noarch")
#"ks-" + repo['id'] + "-" + repo['arch']
distro_path = "%s/%s" % (pulp.server.util.top_distribution_location(), id)
files = pulp.server.util.listdir(distro_path) or []
timestamp = None
if treeinfo['timestamp']:
timestamp = datetime.datetime.fromtimestamp(float(treeinfo['timestamp']))
distro = self.distro_api.create(id, description, distro_path, \
family=treeinfo['family'], variant=treeinfo['variant'], \
version=treeinfo['version'], timestamp=timestamp, files=files,\
arch=treeinfo['arch'], repoids=[repo['id']])
if distro['id'] not in repo['distributionid']:
repo['distributionid'].append(distro['id'])
log.info("Created a distributionID %s" % distro['id'])
if not repo['publish']:
# the repo is not published, dont expose the repo yet
return
distro_path = os.path.join(constants.LOCAL_STORAGE, "published", "ks")
if not os.path.isdir(distro_path):
os.mkdir(distro_path)
source_path = os.path.join(pulp.server.util.top_repos_location(),
repo["relative_path"])
link_path = os.path.join(distro_path, repo["relative_path"])
def unix2bluetime(unix_time):
t = datetime.fromtimestamp(unix_time)
return t.strftime("%Y%m%dT%H%M%S")
# Avoid storing fake languages
return "Invalid language", 400
lang_contents = PER_LANG.get(lang)
if lang_contents is None:
#
# If the language has not been previously calculated, it is calculated as follows:
# - first, check the modified date of the locales.json and messages.mo file
# - then, generate the file using render_template
# - store it in the local cache, and then check the etag and so on
#
fname = data_filename('weblab/core/templates/webclient/locales.json')
try:
modification_time = os.stat(fname).st_mtime
last_modified = datetime.datetime.fromtimestamp(modification_time)
except Exception as e:
print("Could not calculate the time for %s" % fname)
traceback.print_exc()
last_modified = datetime.datetime.now()
messages_directory = data_filename('weblab/core/translations')
messages_file = data_filename('weblab/core/translations/{0}/LC_MESSAGES/messages.mo'.format(lang))
if os.path.exists(messages_file):
try:
messages_modification_time = os.stat(fname).st_mtime
messages_last_modified = datetime.datetime.fromtimestamp(messages_modification_time)
except Exception as e:
messages_last_modified = datetime.datetime.now()
last_modified = max(last_modified, messages_last_modified)
def do_ls(self, line, display = True):
entries = self.currentINode.walk()
self.completion = []
for entry in entries:
inode = INODE(self.volume)
inode.FileAttributes = entry['FileAttributes']
inode.FileSize = entry['DataSize']
inode.LastDataChangeTime = datetime.fromtimestamp(getUnixTime(entry['LastDataChangeTime']))
inode.FileName = entry['FileName'].decode('utf-16le')
if display is True:
inode.displayName()
self.completion.append((inode.FileName,inode.isDirectory()))
if success:
# if the command was successful, write out the cleanup file
# update the cleanup script: "host username password UPLOAD_TRIGGER processname exename wmis/winexe"
if triggerMethod == "wmis":
cleanup += target + " " + username + " " + password + " UPLOAD_TRIGGER wmis " + uploadFileName + " " + uploadFileName + "\n"
else:
cleanup += target + " " + username + " " + password + " UPLOAD_TRIGGER winexe " + uploadFileName + " " + uploadFileName + "\n"
except Exception as e:
print "Exception:",e
print color(" [!] Error on "+str(target)+" with credentials "+str(username)+":"+str(password), warning="True")
print ""
# only write out our cleanup script if there were some results
if cleanup != "":
cleanupFileNameBase = datetime.datetime.fromtimestamp(time.time()).strftime('%m.%d.%Y.%H%M%S') + ".rc"
cleanupFileName = os.path.join(settings.CATAPULT_RESOURCE_PATH, cleanupFileNameBase)
cleanupFile = open(cleanupFileName, 'w')
cleanupFile.write(cleanup)
cleanupFile.close()
print "\n [*] Cleanup script written to " + cleanupFileNameBase
print " [*] run with \"./Veil-Catapult.py -r " + cleanupFileName + "\"\n"
print color("\n [*] Payload upload and triggering complete!\n")
def onMessageSent(self, jid, messageId):
formattedDate = datetime.datetime.fromtimestamp(self.sentCache[messageId][0]).strftime('%d-%m-%Y %H:%M')
print("%s [%s]:%s"%(self.username, formattedDate, self.sentCache[messageId][1]))
print(self.getPrompt())
def _changesetForRevision(self, repo, revision):
from datetime import datetime
from vcpx.changes import Changeset, ChangesetEntry
from vcpx.tzinfo import FixedOffset
entries = []
node = self._getNode(repo, revision)
parents = repo.changelog.parents(node)
nodecontent = repo.changelog.read(node)
# hg 0.9.5+ returns a tuple of six elements, last seems useless for us
(manifest, user, date, files, message) = nodecontent[:5]
dt, tz = date
date = datetime.fromtimestamp(dt, FixedOffset(-tz/60)) # note the minus sign!
manifest = repo.manifest.read(manifest)
# To find adds, we get the manifests of any parents. If a file doesn't
# occur there, it's new.
pms = {}
for parent in repo.changelog.parents(node):
pms.update(repo.manifest.read(repo.changelog.read(parent)[0]))
# if files contains only '.hgtags', this is probably a tag cset.
# Tailor appears to only support tagging the current version, so only
# pass on tags that are for the immediate parents of the current node
tags = None
if files == ['.hgtags']:
tags = [tag for (tag, tagnode) in repo.tags().iteritems()
if tagnode in parents]