Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
def test_str(self):
value = astimezone('Europe/Helsinki')
assert isinstance(value, tzinfo)
def test_tz(self):
tz = pytz.timezone('Europe/Helsinki')
value = astimezone(tz)
assert tz is value
def __init__(self, weeks=0, days=0, hours=0, minutes=0, seconds=0, start_date=None, end_date=None, timezone=None):
self.interval = timedelta(weeks=weeks, days=days, hours=hours, minutes=minutes, seconds=seconds)
self.interval_length = timedelta_seconds(self.interval)
if self.interval_length == 0:
self.interval = timedelta(seconds=1)
self.interval_length = 1
if timezone:
self.timezone = astimezone(timezone)
elif start_date and start_date.tzinfo:
self.timezone = start_date.tzinfo
elif end_date and end_date.tzinfo:
self.timezone = end_date.tzinfo
else:
self.timezone = get_localzone()
start_date = start_date or (datetime.now(self.timezone) + self.interval)
self.start_date = convert_to_datetime(start_date, self.timezone, 'start_date')
self.end_date = convert_to_datetime(end_date, self.timezone, 'end_date')
def _add_job_to_scheduler(self, trigger):
trigger_type_ref = trigger['type']
trigger_type = TIMER_TRIGGER_TYPES[trigger_type_ref]
try:
jsonschema.validate(trigger['parameters'],
trigger_type['parameters_schema'])
except jsonschema.ValidationError as e:
LOG.error('Exception scheduling timer: %s, %s',
trigger['parameters'], e, exc_info=True)
raise # Or should we just return?
time_spec = trigger['parameters']
time_zone = aps_utils.astimezone(trigger['parameters'].get('timezone'))
time_type = None
if trigger_type['name'] == 'st2.IntervalTimer':
unit = time_spec.get('unit', None)
value = time_spec.get('delta', None)
time_type = IntervalTrigger(**{unit: value, 'timezone': time_zone})
elif trigger_type['name'] == 'st2.DateTimer':
# Raises an exception if date string isn't a valid one.
dat = date_parser.parse(time_spec.get('date', None))
time_type = DateTrigger(dat, timezone=time_zone)
elif trigger_type['name'] == 'st2.CronTimer':
cron = time_spec.copy()
cron['timezone'] = time_zone
time_type = CronTrigger(**cron)
def __init__(self, year=None, month=None, day=None, week=None, day_of_week=None, hour=None,
minute=None, second=None, start_date=None, end_date=None, timezone=None,
jitter=None):
if timezone:
self.timezone = astimezone(timezone)
elif isinstance(start_date, datetime) and start_date.tzinfo:
self.timezone = start_date.tzinfo
elif isinstance(end_date, datetime) and end_date.tzinfo:
self.timezone = end_date.tzinfo
else:
self.timezone = get_localzone()
self.start_date = convert_to_datetime(start_date, self.timezone, 'start_date')
self.end_date = convert_to_datetime(end_date, self.timezone, 'end_date')
self.jitter = jitter
values = dict((key, value) for (key, value) in locals().items()
if key in self.FIELD_NAMES and value is not None)
self.fields = []
assign_defaults = False
trigger_type_ref = trigger['type']
trigger_type = TIMER_TRIGGER_TYPES[trigger_type_ref]
try:
util_schema.validate(
instance=trigger['parameters'],
schema=trigger_type['parameters_schema'],
cls=util_schema.CustomValidator,
use_default=True,
allow_default_none=True,
)
except jsonschema.ValidationError as e:
LOG.error('Exception scheduling timer: %s, %s', trigger['parameters'], e, exc_info=True)
raise # Or should we just return?
time_spec = trigger['parameters']
time_zone = aps_utils.astimezone(trigger['parameters'].get('timezone'))
time_type = None
if trigger_type['name'] == 'st2.IntervalTimer':
unit = time_spec.get('unit', None)
value = time_spec.get('delta', None)
time_type = IntervalTrigger(**{unit: value, 'timezone': time_zone})
elif trigger_type['name'] == 'st2.DateTimer':
# Raises an exception if date string isn't a valid one.
dat = date_parser.parse(time_spec.get('date', None))
time_type = DateTrigger(dat, timezone=time_zone)
elif trigger_type['name'] == 'st2.CronTimer':
cron = time_spec.copy()
cron['timezone'] = time_zone
time_type = CronTrigger(**cron)
def __init__(self, year=None, month=None, day=None, week=None, day_of_week=None, hour=None, minute=None,
second=None, start_date=None, end_date=None, timezone=None):
if timezone:
self.timezone = astimezone(timezone)
elif start_date and start_date.tzinfo:
self.timezone = start_date.tzinfo
elif end_date and end_date.tzinfo:
self.timezone = end_date.tzinfo
else:
self.timezone = get_localzone()
self.start_date = convert_to_datetime(start_date, self.timezone, 'start_date')
self.end_date = convert_to_datetime(end_date, self.timezone, 'end_date')
values = dict((key, value) for (key, value) in six.iteritems(locals())
if key in self.FIELD_NAMES and value is not None)
self.fields = []
assign_defaults = False
for field_name in self.FIELD_NAMES:
if field_name in values:
def _configure(self, config):
# Set general options
self._logger = maybe_ref(config.pop('logger', None)) or getLogger('apscheduler.scheduler')
self.timezone = astimezone(config.pop('timezone', None)) or get_localzone()
self.jobstore_retry_interval = float(config.pop('jobstore_retry_interval', 10))
# Set the job defaults
job_defaults = config.get('job_defaults', {})
self._job_defaults = {
'misfire_grace_time': asint(job_defaults.get('misfire_grace_time', 1)),
'coalesce': asbool(job_defaults.get('coalesce', True)),
'max_instances': asint(job_defaults.get('max_instances', 1))
}
# Configure executors
self._executors.clear()
for alias, value in config.get('executors', {}).items():
if isinstance(value, BaseExecutor):
self.add_executor(value, alias)
elif isinstance(value, MutableMapping):