Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
return make_year_array(dates)
except ValueError:
pass
try:
dates = str(dates).upper() # Ambry tends to lowercase things
parts = dates.replace('E', '/').split('/') # / is in std; ambry uses 'e' to be compat with urls.
rparts = []
for p in parts:
try:
rparts.append(isodate.parse_date(p))
except isodate.isoerror.ISO8601Error:
try:
rparts.append(isodate.parse_duration(p))
except:
raise
types = tuple(type(x) for x in rparts)
if types == (datetime.date, isodate.duration.Duration):
start = rparts[0].year
end = start + int(rparts[1].years)
elif types == (isodate.duration.Duration, datetime.date):
end = rparts[1].year + 1
start = end - int(rparts[0].years)
elif types == (datetime.date, datetime.date):
start = rparts[0].year
end = rparts[1].year + 1
else:
def return_valid_duration_create(update_value):
from datetime import timedelta
from isodate import parse_duration
if update_value is not None:
if iso8601pattern.match(update_value):
if parse_duration(update_value) < timedelta(days=10675199, minutes=10085, seconds=477581):
return update_value
if parse_duration(update_value) > timedelta(days=10675199, minutes=10085, seconds=477581):
return None
if timedeltapattern.match(update_value):
day, minute, seconds = update_value.split(":")
if timedelta(days=int(day), minutes=int(minute), seconds=int(seconds)) < timedelta(days=10675199, minutes=10085, seconds=477581):
return timedelta(days=int(day), minutes=int(minute), seconds=int(seconds))
if timedelta(days=int(day), minutes=int(minute), seconds=int(seconds)) > timedelta(days=10675198, minutes=10085, seconds=477581):
return None
else:
return None
delta_string: str
) -> List[Union[timedelta, isodate.Duration]]:
"""q§Parse the given string into a list of ``timedelta`` instances.
"""
if delta_string is None:
raise DeltasParseError(
f'Delta string is None',
)
deltas = []
for item in delta_string.split(' '):
item = item.strip()
if not item:
continue
try:
deltas.append(isodate.parse_duration(item))
except ValueError as exc:
raise DeltasParseError(
f'Could not parse duration: {item!r}',
error=exc,
item=item,
deltas=deltas,
delta_string=delta_string,
) from exc
if deltas and len(deltas) < 2:
raise DeltasParseError(
'At least two deltas are required',
deltas=deltas,
delta_string=delta_string,
)
'id': ','.join(query_ids),
'part': 'contentDetails'
}).text
duration = json.loads(api_request)
selected_video = [{}, -100]
for video in search['items']:
video = objectify(video)
if (video.id.kind == 'youtube#video'):
for ytv in duration['items']:
ytv = objectify(ytv)
try:
if (ytv.id == video.id.videoId):
video.duration = ytv.contentDetails.duration
video.duration = isodate.parse_duration(video.duration).total_seconds()
break
except:
video.duration = 0
video_points = self.attribute_meta_points(video)
if (video_points > selected_video[1]):
selected_video = [video, video_points]
if (selected_video[1] >= 3 or self.download_low_score):
self.console.success('Video
# DocumentFragments, and the xml parser Documents, letting this
# decide what datatype to use makes roundtripping easier, but it a
# bit random
(xml.dom.minidom.DocumentFragment, (_writeXML, _RDF_HTMLLITERAL))
]
XSDToPython = {
None: None, # plain literals map directly to value space
URIRef(_XSD_PFX + 'time'): parse_time,
URIRef(_XSD_PFX + 'date'): parse_date,
URIRef(_XSD_PFX + 'gYear'): parse_date,
URIRef(_XSD_PFX + 'gYearMonth'): parse_date,
URIRef(_XSD_PFX + 'dateTime'): parse_datetime,
URIRef(_XSD_PFX + 'duration'): parse_duration,
URIRef(_XSD_PFX + 'dayTimeDuration'): parse_duration,
URIRef(_XSD_PFX + 'yearMonthDuration'): parse_duration,
URIRef(_XSD_PFX + 'string'): None,
URIRef(_XSD_PFX + 'normalizedString'): None,
URIRef(_XSD_PFX + 'token'): None,
URIRef(_XSD_PFX + 'language'): None,
URIRef(_XSD_PFX + 'boolean'): lambda i: i.lower() == 'true',
URIRef(_XSD_PFX + 'decimal'): Decimal,
URIRef(_XSD_PFX + 'integer'): long_type,
URIRef(_XSD_PFX + 'nonPositiveInteger'): int,
URIRef(_XSD_PFX + 'long'): long_type,
URIRef(_XSD_PFX + 'nonNegativeInteger'): int,
URIRef(_XSD_PFX + 'negativeInteger'): int,
URIRef(_XSD_PFX + 'int'): long_type,
URIRef(_XSD_PFX + 'unsignedLong'): long_type,
URIRef(_XSD_PFX + 'positiveInteger'): int,
URIRef(_XSD_PFX + 'short'): int,
URIRef(_XSD_PFX + 'unsignedInt'): long_type,
def genDateRange(startDate, endDate, interval):
import isodate # https://github.com/gweis/isodate
dates = []
dateFrom = isodate.parse_datetime(startDate)
dateTo = isodate.parse_datetime(endDate)
dateInterval = isodate.parse_duration(interval)
currentDate = dateFrom
while currentDate <= dateTo:
datetime = isodate.datetime_isoformat(currentDate)
dates.append(datetime)
currentDate = currentDate + dateInterval
return dates
def parse_iso8601_duration(duration_str):
"""
Parse an iso8601 duration string.
@type duration_str: str
@param: duration_str: iso8601 duration string to parse
@rtype: isodate.Duration or datetime.timedelta instance
"""
try:
return isodate.parse_duration(duration_str)
except (ValueError, isodate.ISO8601Error):
msg = _('Malformed ISO8601 duration string: %(d)s') % {'d': duration_str}
raise isodate.ISO8601Error(msg), None, sys.exc_info()[2]
fout = open(output_path, 'w')
fout.write('{0}\t{1}\t{2}\t{3}\t{4}\t{5}\t{6}\t{7}\t{8}\t{9}\t{10}\t{11}\t{12}\t{13}\n'
.format('id', 'publish', 'duration', 'definition', 'category', 'detect_lang', 'channel', 'topics',
'view30', 'watch30', 'wp30', 'days', 'daily_view', 'daily_watch'))
with open(input_path, 'r') as fin:
for line in fin:
# skip if data is corrupted or reading duration fails
try:
video = json.loads(line.rstrip())
except:
continue
vid = video['id']
published_at = video['snippet']['publishedAt'][:10]
duration = isodate.parse_duration(video['contentDetails']['duration']).seconds
definition = [0, 1][video['contentDetails']['definition'] == 'hd']
category = video['snippet']['categoryId']
detect_lang = video['snippet']['detectLang']
channel = video['snippet']['channelId']
# freebase topic information
if 'topicDetails' in video:
if 'topicIds' in video['topicDetails']:
topic_ids = set(video['topicDetails']['topicIds'])
else:
topic_ids = set()
if 'relevantTopicIds' in video['topicDetails']:
relevant_topic_ids = set(video['topicDetails']['relevantTopicIds'])
else:
relevant_topic_ids = set()
topics_set = topic_ids.union(relevant_topic_ids)
start = request.GET.get("start", None)
end = request.GET.get("end", None)
interval = request.GET.get("interval", None)
try:
if start:
start = parse_datetime(start)
else:
discussion = request.context._instance
start = discussion.creation_date
# TODO: Round down at day/week/month according to interval
if end:
end = parse_datetime(end)
else:
end = datetime.now()
if interval:
interval = isodate.parse_duration(interval)
else:
interval = end - start + timedelta(seconds=1)
except isodate.ISO8601Error as e:
raise HTTPBadRequest(e)
return (start, end, interval)
aggfun = requestObject.getElementsByTagName('aggregateFunction')
aggnodata = requestObject.getElementsByTagName('aggregateNodata')
if len(aggint)==1 and len(aggfun)==1:
#-----------------------
# -- aggregate_interval
#-----------------------
# Check on the eventTime parameter: it must be only one interval: 2010-01-01T00:00:00+00/2011-01-01T00:00:01+00
exeMsg = "Using aggregate functions, the event time must exist with an interval composed by a begin and an end date (ISO8601)"
if self.eventTime == None or len(self.eventTime)!=1 or len(self.eventTime[0])!=2:
raise sosException.SOSException(2,exeMsg)
val = aggint[0].firstChild
if val.nodeType == val.TEXT_NODE:
self.aggregate_interval = str(val.data)
try:
iso.parse_duration(self.aggregate_interval)
except Exception as ex:
raise sosException.SOSException(2,"Parameter \"aggregate_interval\" sent with invalid format (check ISO8601 duration spec): %s" % ex)
else:
err_txt = "cannot get ISO8601 duration value in \"aggregateInterval\""
raise sosException.SOSException(1,err_txt)
#-----------------------
# -- aggregate_function
#-----------------------
val = aggfun[0].firstChild
if val.nodeType == val.TEXT_NODE:
self.aggregate_function = str(val.data)
if not (self.aggregate_function.upper() in ["AVG","COUNT","MAX","MIN","SUM"]):
raise sosException.SOSException(2,"Available aggregation functions: avg, count, max, min, sum.")
#-----------------------------------