How to use the apscheduler.jobstores.sqlalchemy.SQLAlchemyJobStore function in APScheduler

To help you get started, we’ve selected a few APScheduler examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github agronholm / apscheduler / tests / testintegration.py View on Github external
def make_jobstore():
        if not SQLAlchemyJobStore:
            raise SkipTest

        return SQLAlchemyJobStore(url='sqlite:///example.sqlite')
github agronholm / apscheduler / tests / testjobstores.py View on Github external
def test_sqlalchemy_alternate_tablename():
    if not SQLAlchemyJobStore:
        raise SkipTest

    store = SQLAlchemyJobStore('sqlite:///', tablename='test_table')
    eq_(store.jobs_t.name, 'test_table')
github wylok / sparrow / module / produce.py View on Github external
from apscheduler.schedulers.background import BackgroundScheduler
from apscheduler.jobstores.sqlalchemy import SQLAlchemyJobStore
app = Flask(__name__)
DB = SQLAlchemy(app)
app.config.from_pyfile('../conf/redis.conf')
app.config.from_pyfile('../conf/sql.conf')
app.config.from_pyfile('../conf/task.conf')
logging.basicConfig()
logging.getLogger('apscheduler').setLevel(logging.DEBUG)
redis_host = app.config.get('REDIS_HOST')
redis_port = app.config.get('REDIS_PORT')
redis_password = app.config.get('REDIS_PASSWORD')
task_hosts = app.config.get('TASK_HOSTS')
RC = Redis = redis.StrictRedis(host=redis_host, port=redis_port,decode_responses=True)
HOST = socket.gethostbyname(socket.gethostname())
jobstores = {'default': SQLAlchemyJobStore(url=app.config.get('SQLALCHEMY_BINDS')['idc'])}
executors = {'default': ThreadPoolExecutor(50),'processpool': ProcessPoolExecutor(8)}
job_defaults = {'coalesce': False,'max_instances': 3,'misfire_grace_time':60}
scheduler = BackgroundScheduler(jobstores=jobstores, executors=executors, job_defaults=job_defaults, timezone=pytz.timezone('Asia/Shanghai'))
#单点后台执行
def scheduler_tasks():
    date_time = datetime.datetime.now() + datetime.timedelta(minutes=1)
    run_date = date_time.strftime('%H:%M').split(':')
    scheduler.remove_all_jobs()
    ################################################################################################################################################
    #scheduler.add_job(Task.zabbix_counts_task, 'cron', second='0', minute=run_date[1], hour=run_date[0],id=Task.zabbix_counts_task.__name__, replace_existing=True)
    scheduler.add_job(Task.business_monitor_task, 'cron', second='0', minute='*', id=Task.business_monitor_task.__name__,replace_existing=True)
    scheduler.add_job(Task.es_log_status, 'cron', second='0', minute='*', id=Task.es_log_status.__name__,replace_existing=True)
    scheduler.add_job(Task.es_log_time, 'cron', second='0', minute='*', id=Task.es_log_time.__name__,replace_existing=True)
    scheduler.add_job(Task.business_data, 'cron', second='0', minute='*', id=Task.business_data.__name__, replace_existing=True)
    scheduler.add_job(Task.assets_infos, 'cron', second='0', minute='30',hour='4',id=Task.assets_infos.__name__,replace_existing=True)
    scheduler.add_job(Task.auto_discovery_task, 'cron', second='0', minute='0', hour='*/4',id=Task.auto_discovery_task.__name__,replace_existing=True)
github easytrader / StrategyCeleryWebsite / strategyceleryapp / views.py View on Github external
#running_jobs = []

    today = datetime.datetime.now()
    #return HttpResponse(html)
    return render_to_response('running_jobs.html', {"running_jobs": running_jobs, "username":username, "today":today},
                              context_instance=RequestContext(request))

#initial apscheduler
from apscheduler.schedulers.background import BackgroundScheduler
from pytz import utc
from apscheduler.jobstores.sqlalchemy import SQLAlchemyJobStore
from apscheduler.executors.pool import ThreadPoolExecutor, ProcessPoolExecutor

jobstores = {

    'default': SQLAlchemyJobStore(url='sqlite:///db.sqlite3')
}
executors = {
    'default': ThreadPoolExecutor(20),
    'processpool': ProcessPoolExecutor(5)
}
job_defaults = {
    'coalesce': False,
    'max_instances': 3
}
scheduler = BackgroundScheduler(jobstores=jobstores, executors=executors, job_defaults=job_defaults, timezone=utc)
scheduler.start()


lock = threading.Lock()

def job(strategy, userid, start_date):
github kylechenoO / AIOPS_PLATFORM / Scheduler / getinfo.py View on Github external
from pytz import utc
from apscheduler.schedulers.blocking import BlockingScheduler
from apscheduler.schedulers.background import BackgroundScheduler
from apscheduler.jobstores.sqlalchemy import SQLAlchemyJobStore
from apscheduler.executors.pool import ThreadPoolExecutor, ProcessPoolExecutor

def job():
    with open('./log.txt', 'a') as fp:
        now = datetime.datetime.now()
        boot_time = psutil.boot_time()
        result = '[{}][{}]\n'.format(now, boot_time)
        fp.write(result)


jobstores = {
    'default': SQLAlchemyJobStore(url='sqlite:///jobs.sqlite')
}
executors = {
    'default': ThreadPoolExecutor(20),
    'processpool': ProcessPoolExecutor(5)
}
job_defaults = {
    'coalesce': False,
    'max_instances': 3
}
sched = BackgroundScheduler(jobstores = jobstores, executors = executors, job_defaults = job_defaults, timezone = utc)
## sched = BlockingScheduler(jobstores = jobstores, executors = executors, job_defaults = job_defaults, timezone = utc)
## sched.add_job(job, 'cron', second = '*/2')
sched.start()
while True:
    time.sleep(1)
github my8100 / scrapydweb / scrapydweb / utils / scheduler.py View on Github external
# _handler = logging.StreamHandler()
# logging.FileHandler(filename, mode='a', encoding=None, delay=False)
_handler = logging.FileHandler(TIMER_TASKS_HISTORY_LOG, mode='a', encoding='utf-8')
_handler.setLevel(logging.WARNING)
_formatter = logging.Formatter(fmt="[%(asctime)s] %(levelname)s in %(name)s: %(message)s")
_handler.setFormatter(_formatter)
apscheduler_logger.addHandler(_handler)


# EVENT_JOB_REMOVED = 2 ** 10
# {'alias': None, 'code': 1024, 'job_id': '1', 'jobstore': 'default'}
# EVENT_JOB_MAX_INSTANCES = 2 ** 16
EVENT_MAP = {EVENT_JOB_MAX_INSTANCES: 'EVENT_JOB_MAX_INSTANCES', EVENT_JOB_REMOVED: 'EVENT_JOB_REMOVED'}

jobstores = {
    'default': SQLAlchemyJobStore(url=APSCHEDULER_DATABASE_URI),
    'memory': MemoryJobStore()
}
executors = {
    'default': ThreadPoolExecutor(20),
    # 'processpool': ProcessPoolExecutor(5)
}
job_defaults = {
    'coalesce': True,
    'max_instances': 1
}
# https://apscheduler.readthedocs.io/en/latest/userguide.html
# scheduler = BackgroundScheduler(jobstores=jobstores, executors=executors, job_defaults=job_defaults, timezone=utc)
scheduler = BackgroundScheduler(jobstores=jobstores, executors=executors, job_defaults=job_defaults)


# https://apscheduler.readthedocs.io/en/latest/userguide.html#scheduler-events
github JeffVandrewJr / patron / config.py View on Github external
if os.environ.get('SCHEDULER_MINUTE') is not None:
        SCHEDULER_MINUTE = int(os.environ.get('SCHEDULER_MINUTE'))
    else:
        SCHEDULER_MINUTE = None
    SECRET_KEY_LOCATION = os.environ.get('SECRET_KEY_LOCATION') or \
        join(basedir, 'key')
    with shelve.open(SECRET_KEY_LOCATION) as key:
        if key.get('key') is None:
            SECRET_KEY = os.urandom(24).hex()
            key['key'] = SECRET_KEY
        else:
            SECRET_KEY = key['key']
    SQLALCHEMY_DATABASE_URI = os.environ.get('DATABASE_URL') or \
        'sqlite:///' + join(basedir, 'app.db')
    SCHEDULER_JOBSTORES = {
            'default': SQLAlchemyJobStore(url=SQLALCHEMY_DATABASE_URI)
        }
    SQLALCHEMY_TRACK_MODIFICATIONS = False
    THEME = 'spacelab'
github EmmaOnThursday / next-book / app / generate_delivery_dates.py View on Github external
"""Every day, update the database at 12am to add a delivery date to one rec for each user."""

import random
import datetime as dt
from server import app
from model import connect_to_db, db, Book, User, Recommendation, Subject, UserBook, BookSubject

from pytz import utc

from apscheduler.schedulers.background import BackgroundScheduler
from apscheduler.jobstores.sqlalchemy import SQLAlchemyJobStore
from apscheduler.executors.pool import ThreadPoolExecutor, ProcessPoolExecutor


jobstores = {
    'default': SQLAlchemyJobStore(url='postgresql:///nextbook')
}

executors = {
    'default': ThreadPoolExecutor(20),
    'processpool': ProcessPoolExecutor(5)
}

job_defaults = {
    'coalesce': False,
    'max_instances': 3
}

scheduler = BackgroundScheduler(jobstores=jobstores, executors=executors, job_defaults=job_defaults, timezone=utc)

def generate_recommendation_delivery_dates():
    active_users = User.query.filter(User.paused==0).all()
github tomoncle / PythonStudy / scheduler_task / study_apscheduler / examples / executors / configure.py View on Github external
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# @Time           : 17-8-9 上午9:56
# @Author         : Tom.Lee
# @CopyRight      : 2016-2017
# @File           : job_configure.py
# @Product        : PyCharm


from apscheduler.executors.pool import ThreadPoolExecutor, ProcessPoolExecutor
from apscheduler.jobstores.sqlalchemy import SQLAlchemyJobStore
from apscheduler.schedulers.background import BackgroundScheduler
from pytz import utc

job_stores = {
    'default': SQLAlchemyJobStore(
        url='mysql+mysqldb://root:root@localhost:3306/djangoapp?charset=utf8')
}


executors = {
    'default': ThreadPoolExecutor(20),
    'processpool': ProcessPoolExecutor(5)
}


job_defaults = {
    'coalesce': False,
    'max_instances': 3
}

# UTC as the scheduler’s timezone
github mouday / SpiderAdmin / spideradmin / scheduler_app / controller.py View on Github external
scheduler_logging_filename = "scheduler_logging.log"
file_handler = logging.FileHandler(scheduler_logging_filename)
file_handler.setFormatter(formatter)

stream_handler = logging.StreamHandler()
stream_handler.setFormatter(formatter)

scheduler_logging.addHandler(stream_handler)
scheduler_logging.addHandler(file_handler)

# ==============================================
# 调度器服务配置
# ==============================================

jobstores = {
    'default': SQLAlchemyJobStore(url='sqlite:///db.sqlite')
}

# executors = {
#     'default': ThreadPoolExecutor(THREAD_NUM),
#     'processpool': ProcessPoolExecutor(PROCESS_NUM)
# }

job_defaults = {
    'coalesce': True,
    'max_instances': 1
}

scheduler = None


def start_scheduler():