Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
#return HttpResponse(html)
return render_to_response('running_jobs.html', {"running_jobs": running_jobs, "username":username, "today":today},
context_instance=RequestContext(request))
#initial apscheduler
from apscheduler.schedulers.background import BackgroundScheduler
from pytz import utc
from apscheduler.jobstores.sqlalchemy import SQLAlchemyJobStore
from apscheduler.executors.pool import ThreadPoolExecutor, ProcessPoolExecutor
jobstores = {
'default': SQLAlchemyJobStore(url='sqlite:///db.sqlite3')
}
executors = {
'default': ThreadPoolExecutor(20),
'processpool': ProcessPoolExecutor(5)
}
job_defaults = {
'coalesce': False,
'max_instances': 3
}
scheduler = BackgroundScheduler(jobstores=jobstores, executors=executors, job_defaults=job_defaults, timezone=utc)
scheduler.start()
lock = threading.Lock()
def job(strategy, userid, start_date):
#print("leo test strategy:"+strategy)
lock.acquire()
print("===================leo test job start==========================")
def get_scheduler(num_procs=1):
return BlockingScheduler(
jobstores={
'mongo': MongoDBJobStore(
client=signac.common.host.get_client(),
database=str(signac.get_project()),
collection='apscheduler')},
executors={
'threadpool': ThreadPoolExecutor(),
#'processpool': ProcessPoolExecutor(max_workers=num_procs)
def __init__(self):
self.run_date = datetime.datetime.now() + datetime.timedelta(seconds=3)
self.run_date = self.run_date.strftime('%Y-%m-%d %H:%M:%S')
self.tt = time.strftime('%Y%m%d%H%M', time.localtime())
self.scheduler = BackgroundScheduler()
self.executors = {'default': ThreadPoolExecutor(20), 'processpool': ProcessPoolExecutor(5)}
self.job_defaults = {'coalesce': False, 'max_instances': 3}
self.scheduler.configure(timezone=pytz.timezone('Asia/Shanghai'),job_defaults=self.job_defaults,executors=self.executors)
def Run(self):
def __init__(self, gconfig=None, **options):
executors = {'default': ThreadPoolExecutor(5)}
job_defaults = {'coalesce': False, 'max_instances': 3, 'timezone': pytz.timezone("EST")}
super(BackgroundScheduler, self).__init__(executors=executors, job_defaults=job_defaults)
signal.connect('scheduler', 'process', handleScheduleSignals.doHandle)
def _setup_scheduler():
job_stores = {"beer_garden": MongoJobStore()}
scheduler_config = beer_garden.config.get("scheduler")
executors = {"default": APThreadPoolExecutor(scheduler_config.max_workers)}
job_defaults = scheduler_config.job_defaults.to_dict()
return BackgroundScheduler(
jobstores=job_stores,
executors=executors,
job_defaults=job_defaults,
timezone=utc,
)
def __init__(self, proxy: Proxy = None):
self.log = logging.getLogger('theonionbox')
self.proxy = proxy
self.documents = {}
executors = {
'default': ThreadPoolExecutor(50)
}
job_defaults = {
'coalesce': True,
'max_instances': 10
}
self.scheduler = BackgroundScheduler(logger=self.log, executors=executors, job_defaults=job_defaults)
self.scheduler.start()
import logging
import threading
from apscheduler.schedulers.background import BackgroundScheduler
from django_apscheduler.jobstores import DjangoJobStore, register_events
from apscheduler.executors.pool import ThreadPoolExecutor
from gerapy.server.core.utils import get_scrapyd
from gerapy.server.core.models import Task, Client
logger = logging.getLogger(__name__)
db_time_format = "%Y-%m-%d %H:%M:%S"
executors = {
'default': ThreadPoolExecutor(20)
}
scheduler = BackgroundScheduler(executors=executors)
scheduler.add_jobstore(DjangoJobStore(), "default")
def work_func(client, project, spider):
ip_port = Client.objects.get(id=client)
scrapyd = get_scrapyd(ip_port)
logger.warning("Run {}: {} on server{}".format(project, spider, ip_port.ip))
try:
jobs = scrapyd.schedule(project, spider)
logger.warning("{}: {};Jobs:{}".format(project, spider, jobs))
except Exception as err:
logger.error("Please deploy the project to:{}".format(ip_port.ip))
def __init__(self):
self.run_date = datetime.datetime.now() + datetime.timedelta(seconds=3)
self.run_date = self.run_date.strftime('%Y-%m-%d %H:%M:%S')
self.tm = time.strftime('%Y%m%d%H%M%S',time.localtime())
self.scheduler = BackgroundScheduler()
self.executors = {'default': ThreadPoolExecutor(10), 'processpool': ProcessPoolExecutor(5)}
self.job_defaults = {'coalesce': False, 'max_instances': 1}
self.scheduler.configure(timezone=pytz.timezone('Asia/Shanghai'),job_defaults=self.job_defaults,executors=self.executors)
def Scheduler_mem(self,func,args = None):
def stats(self):
return [x.as_dict for x in self.metrics.values()]
@property
def stats_influx(self):
points = []
for metric in self.metrics.values():
points.extend(metric.as_influx)
return points
class HttpServer(flask.Flask):
"""Our HTTP/API server."""
EXECUTORS = {
'default': ThreadPoolExecutor(20),
'processpool': ProcessPoolExecutor(5)
}
def __init__(self, name, ip, port, *args, **kwargs):
"""Constructor.
Args:
name: (str) name of Flask service
ip: (str) IP address to bind HTTP server
port: (int) TCP port for HTTP server to listen
"""
super(HttpServer, self).__init__(name, *args, **kwargs)
# Fixup the root path for Flask so it can find templates/*
root_path = os.path.abspath(os.path.dirname(__file__))
logging.debug('Setting root_path for Flask: %s', root_path)
self.root_path = root_path
Parameters
----------
main
type: bool
whether the initiated scheduler is the nephos' scheduler or not
"""
self.main = main
job_stores = {
'default': SQLAlchemyJobStore(url='sqlite:///' + PATH_JOB_DB)
}
if self.main:
LOG.debug("Storing scheduler jobs in %s", job_stores["default"])
executors = {
'default': ThreadPoolExecutor(MAX_CONCURRENT_JOBS)
}
if self.main:
LOG.info("Initialising scheduler with timezone %s", TMZ)
try:
self._scheduler = BackgroundScheduler(jobstores=job_stores, executors=executors,
timezone=TMZ)
# catch if the timezone is not recognised by the scheduler
except UnknownTimeZoneError as _:
LOG.warning("Unknown timezone %s, resetting timezone to 'utc'", TMZ)
self._scheduler = BackgroundScheduler(jobstores=job_stores, executors=executors,
timezone='utc')
if self.main:
LOG.info("Scheduler initialised with database at %s", PATH_JOB_DB)