How to use the celery.schedules.crontab function in celery

To help you get started, we’ve selected a few celery examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github 4acoin / 4acoin / cloudbank / settings.py View on Github external
#: Only add pickle to this list if your broker is secured
#: from unwanted access (see userguide/security.html)
CELERY_ACCEPT_CONTENT = ['json']
CELERY_TASK_SERIALIZER = 'json'
CELERY_RESULT_BACKEND = 'django-db'
CELERY_TIMEZONE = 'GMT'

#Activate after installation!!!!!!
from celery.schedules import crontab
from cloudbank.tasks import givereward

# Other Celery settings
CELERY_BEAT_SCHEDULE = {
    'task-number-one': {
        'task': 'cloudbank.tasks.givereward',
        'schedule': crontab(hour='*/4')
    }
}



# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.11/howto/deployment/checklist/

# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'j54w1jdm@o(7vmnv=9_duz$c8zg-brf3z%i8yf%9@o(@k4fezu'

# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True

ALLOWED_HOSTS = ['localhost','188.166.77.61','bitjoin','cloudbankproject.com','www.cloudbankproject.com','167.99.47.107']
github p2pu / learning-circles / learnwithpeople / settings.py View on Github external
},
    'send_survey_reminders': {
        'task': 'studygroups.tasks.send_all_studygroup_survey_reminders',
        'schedule': crontab(minute='30'),
    },
    'send_facilitator_survey': {
        'task': 'studygroups.tasks.send_all_facilitator_surveys',
        'schedule':  crontab(minute='30'),
    },
    'weekly_update': {
        'task': 'studygroups.tasks.weekly_update',
        'schedule': crontab(hour=10, minute=0, day_of_week='monday'),
    },
    'daily_backup': {
        'task': 'backup.tasks.make_backup',
        'schedule': crontab(hour=1, minute=0),
    },
    'sync_typeform_surveys': {
        'task': 'surveys.tasks.sync_surveys',
        'schedule': crontab(minute='10'),
    },
    'send_facilitator_survey_reminder': {
        'task': 'studygroups.tasks.send_all_facilitator_survey_reminders',
        'schedule': crontab(minute='30'),
    },
    'send_final_learning_circle_report': {
        'task': 'studygroups.tasks.send_all_learning_circle_reports',
        'schedule': crontab(minute='30'),
    },
    'send_community_digest': {
        'task': 'studygroups.tasks.send_out_community_digest',
        'schedule': crontab(day_of_week='monday', hour=11, minute=0),
github ziirish / burp-ui / burpui / tasks.py View on Github external
BEAT_SCHEDULE = {
    'backup-running-4-minutely': {
        'task': '{}.backup_running'.format(ME),
        'schedule': timedelta(seconds=15),  # run every 15 seconds
    },
    'get-all-backups-every-twenty-minutes': {
        'task': '{}.get_all_backups'.format(ME),
        'schedule': crontab(minute='*/20'),  # every 20 minutes
    },
    'get-all-clients-reports-every-twenty-minutes': {
        'task': '{}.get_all_clients_reports'.format(ME),
        'schedule': crontab(minute='*/20'),  # every 20 minutes
    },
    'cleanup-expired-sessions-every-four-hours': {
        'task': '{}.cleanup_expired_sessions'.format(ME),
        'schedule': crontab(minute=1, hour='*/4'),  # every four hours
    },
}

if db:
    from burpui.models import Task

    BEAT_SCHEDULE.update({
        'cleanup-restore-hourly': {
            'task': '{}.cleanup_restore'.format(ME),
            'schedule': crontab(minute='12'),  # run every hour
        },
    })

if 'CELERYBEAT_SCHEDULE' in celery.conf and \
        isinstance(celery.conf['CELERYBEAT_SCHEDULE'], dict):
    celery.conf['CELERYBEAT_SCHEDULE'].update(BEAT_SCHEDULE)
github guardian / portal-plugins-public / portal / plugins / gnmatomresponder / tasks.py View on Github external
@periodic_task(run_every=crontab(minute=45))
def check_unprocessed_pacxml():
    """
    Scheduled task to check if any unprocessed pac forms have "fallen through the cracks"
    :return:
    """
    from models import PacFormXml
    from pac_xml import PacXmlProcessor
    from django.conf import settings
    from vs_mixin import VSMixin

    role_name = settings.ATOM_RESPONDER_ROLE_NAME
    session_name = "GNMAtomResponderTimed"

    vs = VSMixin()
    proc = PacXmlProcessor(role_name,session_name)
github skillachie / ark_agent / worker / scheduler.py View on Github external
config_data = yaml.load(config_file)

BROKER_URL = "mongodb://%s:%d/jobs" %(config_data['mongodb']['host'],config_data['mongodb']['port'])

#Loads settings for Backend to store results of jobs
app = Celery('ark_agent.scheduler',
		broker=BROKER_URL,
		backend=BROKER_URL,
		include=['eod_data_tasks'])  #list of modules to import when Celery starts


#Schedule Config
app.conf.update(CELERYBEAT_SCHEDULE = {
                        'every-day-at-seven': {
                        'task': 'eod_data_tasks.generate_eod_tasks',
                        'schedule': crontab(minute=00, hour=19, 
                                           day_of_week='mon,tue,wed,thu,fri'),
                                        },
                    },
                    CELERY_TIMEZONE = 'US/Eastern',
                    CELERY_ACCEPT_CONTENT = ['pickle', 'json']
                    )

if __name__ == '__main__':
	celery.start()
github fga-eps-mds / 2018.1-Dr-Down / drdown / users / models / model_patient.py View on Github external
@periodic_task(run_every=crontab(day_of_month=[1, ]))
def careline_notification():

    target_patients = list(Patient.objects.all())
    target_patients = list(
        filter(
            lambda x: x.count_incomplete_procedures_for_current_age() > 0,
            target_patients
        )
    )

    for pat in target_patients:
        mail.send_patient_careline_status(pat)

    return target_patients
github pypa / warehouse / warehouse / packaging / __init__.py View on Github external
config.register_origin_cache_keys(
        User.name,
        purge_keys=[
            key_factory("user/{obj.username}"),
            key_factory("project/{itr.normalized_name}", iterate_on="projects"),
        ],
    )
    config.register_origin_cache_keys(
        Email.primary,
        purge_keys=[
            key_factory("user/{obj.user.username}"),
            key_factory("project/{itr.normalized_name}", iterate_on="user.projects"),
        ],
    )

    config.add_periodic_task(crontab(minute="*/5"), update_description_html)

    # Add a periodic task to compute trending once a day, assuming we have
    # been configured to be able to access BigQuery.
    if config.get_settings().get("warehouse.trending_table"):
        config.add_periodic_task(crontab(minute=0, hour=3), compute_trending)
github YoLoveLife / DevOps / deveops / conf.py View on Github external
UPTIME_LIMIT = 70 # %


# SMTP配置
SMTP_HOST = ''
SMTP_PORT = 25
SMTP_USER = ''
SMTP_PASSWD = ''

# DNS服务器
INNER_DNS = ''
OUTER_DNS = ''

# Crontab 配置
from celery.schedules import crontab
DASHBOARD_TIME = crontab(minute=30,hour=1,day_of_week="sunday")
EXPIRED_TIME = crontab(minute=30,hour=1,day_of_week="sunday")
CHECK_TIME = crontab(minute='*')#,day_of_week="sunday")
MANAGER_TIME = crontab(minute=16,hour=10)#,day_of_week="sunday")
DNS_TIME = crontab(minute='*')#,day_of_week="sunday")
github fcurella / django-recommends / recommends / tasks.py View on Github external
    @periodic_task(name='recommends_precompute', run_every=crontab(**RECOMMENDS_TASK_CRONTAB), expires=RECOMMENDS_TASK_EXPIRES)
    def _recommends_precompute():
        recommends_precompute()
github dimagi / commcare-hq / custom / rch / tasks.py View on Github external
@periodic_task(run_every=crontab(minute="0", hour="18"), queue='background_queue')
def fetch_rch_mother_beneficiaries():
    RCHMotherRecord.update_beneficiaries()