Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
_validation = {}
_attribute_map = {'p1':{'key':'p1','type':'str'},
'p2':{'key':'p2','type':'str'},
'top_date':{'key':'top_date', 'type':'iso-8601'},
'top_dates':{'key':'top_dates', 'type':'[iso-8601]'},
'insider':{'key':'insider','type':'{iso-8601}'},
'top_complex':{'key':'top_complex','type':'ComplexId'}}
p1 = 'value1'
p2 = 'value2'
top_date = isodate.parse_datetime('2014-01-01T00:00:00')
top_dates = [isodate.parse_datetime('1900-01-01T00:00:00'), isodate.parse_datetime('1901-01-01T00:00:00')]
insider = {
'k1': isodate.parse_datetime('2015-01-01T00:00:00'),
'k2': isodate.parse_datetime('2016-01-01T00:00:00'),
'k3': isodate.parse_datetime('2017-01-01T00:00:00')}
top_complex = ComplexId()
message =self.s._serialize(ComplexJson())
output = {
'p1': 'value1',
'p2': 'value2',
'top_date': '2014-01-01T00:00:00.000Z',
'top_dates': [
'1900-01-01T00:00:00.000Z',
'1901-01-01T00:00:00.000Z'
],
'insider': {
'k1': '2015-01-01T00:00:00.000Z',
'k2': '2016-01-01T00:00:00.000Z',
'k3': '2017-01-01T00:00:00.000Z'
# GET primitive/date
dateResult = client.primitive.get_date()
self.assertEqual(isodate.parse_date("0001-01-01"), dateResult.field)
self.assertEqual(isodate.parse_date("2016-02-29"), dateResult.leap)
dateRequest = DateWrapper
dateRequest.field = isodate.parse_date('0001-01-01')
dateRequest.leap = isodate.parse_date('2016-02-29')
client.primitive.put_date(dateRequest)
# GET primitive/datetime
datetimeResult = client.primitive.get_date_time()
min_date = datetime.datetime.min
min_date = min_date.replace(tzinfo=UTC())
self.assertEqual(min_date, datetimeResult.field)
datetime_request = DatetimeWrapper(
field=isodate.parse_datetime("0001-01-01T00:00:00Z"),
now=isodate.parse_datetime("2015-05-18T18:38:00Z"))
client.primitive.put_date_time(datetime_request)
# GET primitive/datetimerfc1123
datetimeRfc1123Result = client.primitive.get_date_time_rfc1123()
self.assertEqual(min_date, datetimeRfc1123Result.field)
datetime_request = Datetimerfc1123Wrapper(
field=isodate.parse_datetime("0001-01-01T00:00:00Z"),
now=isodate.parse_datetime("2015-05-18T11:38:00Z"))
client.primitive.put_date_time_rfc1123(datetime_request)
# GET primitive/duration
#TimeSpan expectedDuration = new TimeSpan(123, 22, 14, 12, 11);
durationResult = client.primitive.get_duration();
self.assertEqual(123, durationResult.field.days)
min_date = datetime.min
min_date = min_date.replace(tzinfo=UTC())
self.assertEqual(min_date, datetimeResult.field)
datetime_request = DatetimeWrapper(
field=isodate.parse_datetime("0001-01-01T00:00:00Z"),
now=isodate.parse_datetime("2015-05-18T18:38:00Z"))
client.primitive.put_date_time(datetime_request)
# GET primitive/datetimerfc1123
datetimeRfc1123Result = client.primitive.get_date_time_rfc1123()
self.assertEqual(min_date, datetimeRfc1123Result.field)
datetime_request = Datetimerfc1123Wrapper(
field=isodate.parse_datetime("0001-01-01T00:00:00Z"),
now=isodate.parse_datetime("2015-05-18T11:38:00Z"))
client.primitive.put_date_time_rfc1123(datetime_request)
# GET primitive/duration
expected = timedelta(days=123, hours=22, minutes=14, seconds=12, milliseconds=11)
self.assertEqual(expected, client.primitive.get_duration().field)
client.primitive.put_duration(expected)
# GET primitive/byte
byteResult = client.primitive.get_byte()
valid_bytes = bytearray([0x0FF, 0x0FE, 0x0FD, 0x0FC, 0x000, 0x0FA, 0x0F9, 0x0F8, 0x0F7, 0x0F6])
self.assertEqual(valid_bytes, byteResult.field)
# PUT primitive/byte
client.primitive.put_byte(valid_bytes)
def getDSBeginPosition(self):
if 'constraint' in self.describe['outputs'][0]:
return iso.parse_datetime(
self.describe['outputs'][0]['constraint']['interval'][0])
return None
print("STARTING FILTER_AND_AGG")
print(scribe_reader_ctx, scribe_env, stream_name,
levels, service, components, clusters, instances)
with scribe_reader_ctx as scribe_reader:
try:
for line in scribe_reader:
if parser_fn:
line = parser_fn(line, clusters, service)
if filter_fn:
if filter_fn(
line, levels, service, components, clusters,
instances, start_time=start_time, end_time=end_time,
):
try:
parsed_line = json.loads(line)
timestamp = isodate.parse_datetime(parsed_line.get('timestamp'))
if not timestamp.tzinfo:
timestamp = pytz.utc.localize(timestamp)
except ValueError:
timestamp = pytz.utc.localize(datetime.datetime.min)
line = {'raw_line': line, 'sort_key': timestamp}
print("AGGREGATING log %s from scribereader %s" % (line, scribereader))
aggregated_logs.append(line)
except StreamTailerSetupError as e:
if 'No data in stream' in str(e):
log.warning("Scribe stream %s is empty on %s" % (stream_name, scribe_env))
log.warning("Don't Panic! This may or may not be a problem depending on if you expect there to be")
log.warning("output within this stream.")
else:
raise
import rfc3987
except ImportError:
pass
else:
@FormatChecker.cls_checks("uri", raises=ValueError)
def is_uri(instance):
return rfc3987.parse(instance, rule="URI_reference")
try:
import isodate
except ImportError:
pass
else:
FormatChecker.cls_checks("date-time",
raises=(ValueError, isodate.ISO8601Error))(isodate.parse_datetime)
draft4_format_checker = FormatChecker()
draft3_format_checker = FormatChecker()
draft3_format_checker.checks("ip-address",
raises=socket.error)(socket.inet_aton)
draft3_format_checker.checks("host-name")(is_host_name)
@draft3_format_checker.checks("date", raises=ValueError)
def is_date(instance):
return datetime.datetime.strptime(instance, "%Y-%m-%d")
@draft3_format_checker.checks("time", raises=ValueError)
def is_time(instance):
def date(self):
return isodate.parse_datetime(self.header.fields['WARC-Date'])
def prettify_timestamp(timestamp):
"""Returns more human-friendly form of 'timestamp' without microseconds and
in local time.
"""
dt = isodate.parse_datetime(timestamp)
pretty_timestamp = datetime_from_utc_to_local(dt)
return pretty_timestamp.strftime("%Y-%m-%d %H:%M:%S")
def job_is_stuck(last_run_iso_time, interval_in_seconds, client, job_name):
"""Considers that the job is stuck when it hasn't run on time
:param last_run_iso_time: ISO date and time of the last job run as a string
:param interval_in_seconds: the job interval in seconds
:param client: configured Chronos client
:param job_name: Chronos job name
:returns: True or False
"""
if last_run_iso_time is None or interval_in_seconds is None:
return False
dt_next_run = isodate.parse_datetime(last_run_iso_time) + timedelta(
seconds=interval_in_seconds
)
dt_now_utc = datetime.now(pytz.utc)
if dt_next_run >= dt_now_utc:
return False
try:
expected_runtime = min(
int(client.job_stat(job_name)["histogram"]["99thPercentile"]),
interval_in_seconds,
)
except KeyError:
log.debug(
"Can't get 99thPercentile for %s. "
"Assuming a runtime of %d seconds." % (job_name, interval_in_seconds)
)
expected_runtime = interval_in_seconds
fmt = "%Y-%m-%dT%H:%M:%S"
try:
return _strptime(s, fmt)
except ValueError:
try:
# strip zulu timezone suffix or utc offset
if s[-1] == "Z" or (s[-3] == ":" and s[-6] in (' ', '-', '+')):
try:
import iso8601
return iso8601.parse_date(s)
except ImportError:
pass
try:
import isodate
return isodate.parse_datetime(s)
except ImportError:
pass
try:
import dateutil.parser
return dateutil.parser.parse(s)
except ImportError:
pass
warnings.warn('removing unsupported "Z" suffix or UTC offset. Install `iso8601`, `isodate` or `python-dateutil` package to support it', RuntimeWarning)
s = s[:-1] if s[-1] == "Z" else s[:-6]
# parse microseconds
try:
return _strptime(s, fmt + ".%f")
except:
return _strptime(s, fmt)