def notify_consumers(doc, _method=None): """Send update notification updates to event consumers whenever update log is generated""" enqueued_method = 'frappe.event_streaming.doctype.event_consumer.event_consumer.notify_event_consumers' jobs = get_jobs() if not jobs or enqueued_method not in jobs[frappe.local.site]: frappe.enqueue(enqueued_method, doctype=doc.ref_doctype, queue='long', enqueue_after_commit=True)
def pull(now=False): """Will be called via scheduler, pull emails from all enabled Email accounts.""" if frappe.cache().get_value("workers:no-internet") == True: if test_internet(): frappe.cache().set_value("workers:no-internet", False) else: return queued_jobs = get_jobs(site=frappe.local.site, key='job_name')[frappe.local.site] email_accounts = frappe.db.sql_list( """select name from `tabEmail Account` where enable_incoming=1 and awaiting_password=0""") # No incoming email account available if not email_accounts: return if now: pull_from_email_accounts(email_accounts) else: # job_name is used to prevent duplicates in queue job_name = 'pull_from_email_accounts|{0}'.format( ",".join(email_accounts)) if job_name not in queued_jobs: enqueue(pull_from_email_accounts, 'short', event='all', job_name=job_name, email_accounts=email_accounts)
def pull(now=False): """Will be called via scheduler, pull emails from all enabled Email accounts.""" if frappe.cache().get_value("workers:no-internet") == True: if test_internet(): frappe.cache().set_value("workers:no-internet", False) else: return queued_jobs = get_jobs(site=frappe.local.site, key='job_name')[frappe.local.site] for email_account in frappe.get_list("Email Account", filters={ "enable_incoming": 1, "awaiting_password": 0 }): if now: pull_from_email_account(email_account.name) else: # job_name is used to prevent duplicates in queue job_name = 'pull_from_email_account|{0}'.format(email_account.name) if job_name not in queued_jobs: enqueue(pull_from_email_account, 'short', event='all', job_name=job_name, email_account=email_account.name)
def notify(consumer): """notify individual event consumers about a new update""" consumer_status = consumer.get_consumer_status() if consumer_status == 'online': try: client = get_consumer_site(consumer.callback_url) client.post_request({ 'cmd': 'frappe.event_streaming.doctype.event_producer.event_producer.new_event_notification', 'producer_url': get_url() }) consumer.flags.notified = True except Exception: consumer.flags.notified = False else: consumer.flags.notified = False # enqueue another job if the site was not notified if not consumer.flags.notified: enqueued_method = 'frappe.event_streaming.doctype.event_consumer.event_consumer.notify' jobs = get_jobs() if not jobs or enqueued_method not in jobs[ frappe.local.site] and not consumer.flags.notifed: frappe.enqueue(enqueued_method, queue='long', enqueue_after_commit=True, **{'consumer': consumer})
def import_update_items(xml_files): try: site_name = get_site_name(frappe.local.request.host) if site_name == 'localhost': site_name = 'site1.local' max_time = 7200 args = {'xml_files': xml_files, 'site_name': site_name} queue = "long" queued_jobs = get_jobs(site=frappe.local.site, queue=queue) method = "erpnextswiss.erpnextswiss.page.bkp_importer.bkp_importer._import_update_items" job_name = 'Import / Update Items from BKP File(s)' if method not in queued_jobs[frappe.local.site]: frappe.msgprint( _("Der Import / Das Updaten wurde gestartet. Bitte warten Sie bis der Backgroundjob ausgeführt wurde.<br>Prüfen Sie im Anschluss den Errorlog auf allfällige Fehler." ), 'Import / Update gestartet') enqueue(method=method, queue=queue, timeout=max_time, event=None, is_async=True, job_name=job_name, now=False, enqueue_after_commit=False, **args) return else: frappe.msgprint( _("Der Backup Job wurde bereits gestartet. Bitte warten Sie bis das System Ihnen mitteilt dass der Job erledigt ist." ), 'Bitte Warten') except: return frappe.msgprint("Es ist etwas schief gelaufen.")
def make_auto_repeat_entry(auto_repeat=None): enqueued_method = 'frappe.automation.doctype.auto_repeat.auto_repeat.create_repeated_entries' jobs = get_jobs() if not jobs or enqueued_method not in jobs[frappe.local.site]: data = get_auto_repeat_entries(auto_repeat=auto_repeat) frappe.enqueue(enqueued_method, data=data)
def make_auto_repeat_entry(date=None): enqueued_method = 'frappe.desk.doctype.auto_repeat.auto_repeat.create_repeated_entries' jobs = get_jobs() if not jobs or enqueued_method not in jobs[frappe.local.site]: date = date or today() for data in get_auto_repeat_entries(date): frappe.enqueue(enqueued_method, data=data)
def enqueue_events(site): if schedule_jobs_based_on_activity(): frappe.flags.enqueued_jobs = [] queued_jobs = get_jobs(site=site, key='job_type').get(site) or [] for job_type in frappe.get_all('Scheduled Job Type', ('name', 'method'), dict(stopped=0)): if not job_type.method in queued_jobs: # don't add it to queue if still pending frappe.get_doc('Scheduled Job Type', job_type.name).enqueue()
def new_event_notification(producer_url): """Pull data from producer when notified""" enqueued_method = 'frappe.event_streaming.doctype.event_producer.event_producer.pull_from_node' jobs = get_jobs() if not jobs or enqueued_method not in jobs[frappe.local.site]: frappe.enqueue(enqueued_method, queue='default', **{'event_producer': producer_url})
def make_auto_repeat_entry(): enqueued_method = "frappe.automation.doctype.auto_repeat.auto_repeat.create_repeated_entries" jobs = get_jobs() if not jobs or enqueued_method not in jobs[frappe.local.site]: date = getdate(today()) data = get_auto_repeat_entries(date) frappe.enqueue(enqueued_method, data=data)
def is_job_queued(job_name, queue="default"): """ :param job_name: used to identify a queued job, usually dotted path to function :param queue: should be either long, default or short """ site = frappe.local.site queued_jobs = get_jobs(site=site, queue=queue, key="job_name").get(site) return queued_jobs and job_name in queued_jobs
def schedule_files_backup(user_email): from frappe.utils.background_jobs import enqueue, get_jobs queued_jobs = get_jobs(site=frappe.local.site, queue="long") method = 'frappe.desk.page.backups.backups.backup_files_and_notify_user' if method not in queued_jobs[frappe.local.site]: enqueue("frappe.desk.page.backups.backups.backup_files_and_notify_user", queue='long', user_email=user_email) frappe.msgprint(_("Queued for backup. You will receive an email with the download link")) else: frappe.msgprint(_("Backup job is already queued. You will receive an email with the download link"))
def after_insert(self): """Send update notification updates to event consumers whenever update log is generated""" enqueued_method = ( "frappe.event_streaming.doctype.event_consumer.event_consumer.notify_event_consumers" ) jobs = get_jobs() if not jobs or enqueued_method not in jobs[frappe.local.site]: frappe.enqueue( enqueued_method, doctype=self.ref_doctype, queue="long", enqueue_after_commit=True )
def sync_accounts(): frappe.has_permission('GCalendar Settings', throw=True) accounts = frappe.get_all("GCalendar Account", filters={'enabled': 1}) queued_jobs = get_jobs(site=frappe.local.site, key='job_name')[frappe.local.site] for account in accounts: job_name = 'google_calendar_sp_sync|{0}'.format(account.name) if job_name not in queued_jobs: frappe.enqueue('shared_place.shared_place.shared_place_connector.run_sync', queue='long', timeout=1500, job_name=job_name, account=account) time.sleep(5)
def enqueue_events_for_all_sites(): '''Loop through sites and enqueue events that are not already queued''' with frappe.init_site(): jobs_per_site = get_jobs() sites = get_sites() for site in sites: try: enqueue_events_for_site(site=site, queued_jobs=jobs_per_site[site]) except: # it should try to enqueue other sites print(frappe.get_traceback())
def sync(self): """Create and execute Data Migration Run for GCalendar Sync plan""" frappe.has_permission('GCalendar Settings', throw=True) accounts = frappe.get_all("GCalendar Account", filters={'enabled': 1}) queued_jobs = get_jobs(site=frappe.local.site, key='job_name')[frappe.local.site] for account in accounts: job_name = 'google_calendar_sync|{0}'.format(account.name) if job_name not in queued_jobs: frappe.enqueue('frappe.integrations.doctype.gcalendar_settings.gcalendar_settings.run_sync', queue='long', timeout=1500, job_name=job_name, account=account) time.sleep(5)
def pull(now=False): """Will be called via scheduler, pull emails from all enabled Email accounts.""" queued_jobs = get_jobs(site=frappe.local.site, key='job_name')[frappe.local.site] for email_account in frappe.get_list("Email Account", filters={"enable_incoming": 1}): if now: pull_from_email_account(email_account.name) else: # job_name is used to prevent duplicates in queue job_name = 'pull_from_email_account|{0}'.format(email_account.name) if job_name not in queued_jobs: enqueue(pull_from_email_account, 'short', event='all', job_name=job_name, email_account=email_account.name)
def test_schedule(): jobs_per_site={} if os.path.exists(os.path.join('.', '.restarting')): # Don't add task to queue if webserver is in restart mode return with frappe.init_site(): jobs_per_site = get_jobs() sites = get_sites() #print(jobs_per_site) for site in sites: conf = frappe.get_site_config("/home/frappe/frappe-bench/sites","/home/frappe/frappe-bench/sites/"+site) if conf.db_name!="db_commeta": continue print(site) print(conf.db_name)
def enqueue_events_for_all_sites(): '''Loop through sites and enqueue events that are not already queued''' if os.path.exists(os.path.join('.', '.restarting')): # Don't add task to queue if webserver is in restart mode return with frappe.init_site(): jobs_per_site = get_jobs() sites = get_sites() for site in sites: try: enqueue_events_for_site(site=site, queued_jobs=jobs_per_site[site]) except Exception as e: print(e.__class__, 'Failed to enqueue events for site: {}'.format(site))
def enqueue_events_for_all_sites(): '''Loop through sites and enqueue events that are not already queued''' if os.path.exists(os.path.join('.', '.restarting')): # Don't add task to queue if webserver is in restart mode return with frappe.init_site(): jobs_per_site = get_jobs() sites = get_sites() for site in sites: try: enqueue_events_for_site(site=site, queued_jobs=jobs_per_site[site]) except: # it should try to enqueue other sites print(frappe.get_traceback())
def sync(self): """Create and execute Data Migration Run for GCalendar Sync plan""" frappe.has_permission('GCalendar Settings', throw=True) accounts = frappe.get_all("GCalendar Account", filters={'enabled': 1}) queued_jobs = get_jobs(site=frappe.local.site, key='job_name')[frappe.local.site] for account in accounts: job_name = 'google_calendar_sync|{0}'.format(account.name) if job_name not in queued_jobs: frappe.enqueue( 'frappe.integrations.doctype.gcalendar_settings.gcalendar_settings.run_sync', queue='long', timeout=1500, job_name=job_name, account=account) time.sleep(5)
def trigger(site, event, last=None, queued_jobs=(), now=False): """Trigger method in hooks.scheduler_events.""" queue = 'long' if event.endswith('_long') else 'short' timeout = queue_timeout[queue] if not queued_jobs and not now: queued_jobs = get_jobs(site=site, queue=queue) if frappe.flags.in_test: frappe.flags.ran_schedulers.append(event) events_from_hooks = get_scheduler_events(event) if not events_from_hooks: return events = events_from_hooks if not now: events = [] if event == "cron": for e in events_from_hooks: e = cron_map.get(e, e) if croniter.is_valid(e): if croniter(e, last).get_next( datetime) <= frappe.utils.now_datetime(): events.extend(events_from_hooks[e]) else: frappe.log_error("Cron string " + e + " is not valid", "Error triggering cron job") frappe.logger(__name__).error( 'Exception in Trigger Events for Site {0}, Cron String {1}' .format(site, e)) else: if croniter( cron_map[event], last).get_next(datetime) <= frappe.utils.now_datetime(): events.extend(events_from_hooks) for handler in events: if not now: if handler not in queued_jobs: enqueue(handler, queue, timeout, event) else: scheduler_task(site=site, event=event, handler=handler, now=True)
def ensure_job_run(): jobs = get_scheduled_jobs(as_df=True) now = frappe.utils.now_datetime() failed_jobs = jobs[jobs.status == 'Failure'] unfinished_jobs = jobs[jobs.expected_finish < now] if not unfinished_jobs.empty: unfinished_jobs = unfinished_jobs[unfinished_jobs.status == 'Started'] to_retry = pd.concat([failed_jobs, unfinished_jobs]) site_name = frappe.local.site running_jobs = set([ job for job in (get_jobs(site=site_name).get(site_name) or []) if job in to_retry.index.values ]) for method, row in to_retry.iterrows(): frappe.set_value('Job Run', row.job_run_id, 'status', 'Retried') if method in running_jobs: continue running_jobs.add(method) retry_job(method, row.queue_name)
def pull(now=False): """Will be called via scheduler, pull emails from all enabled Email accounts.""" if frappe.cache().get_value("workers:no-internet") == True: if test_internet(): frappe.cache().set_value("workers:no-internet", False) else: return queued_jobs = get_jobs(site=frappe.local.site, key='job_name')[frappe.local.site] for email_account in frappe.get_list("Email Account", filters={"enable_incoming": 1, "awaiting_password": 0}): if now: pull_from_email_account(email_account.name) else: # job_name is used to prevent duplicates in queue job_name = 'pull_from_email_account|{0}'.format(email_account.name) if job_name not in queued_jobs: enqueue(pull_from_email_account, 'short', event='all', job_name=job_name, email_account=email_account.name)
def trigger(site, event, queued_jobs=(), now=False): """trigger method in hooks.scheduler_events""" queue = 'long' if event.endswith('_long') else 'short' timeout = queue_timeout[queue] if not queued_jobs and not now: queued_jobs = get_jobs(site=site, queue=queue) if frappe.flags.in_test: frappe.flags.ran_schedulers.append(event) events = get_scheduler_events(event) if not events: return for handler in events: if not now: if handler not in queued_jobs: enqueue(handler, queue, timeout, event) else: scheduler_task(site=site, event=event, handler=handler, now=True)
def import_update_items(xml_files): try: site_name = get_site_name(frappe.local.request.host) if site_name == 'localhost': site_name = 'site1.local' max_time = 4800 args = { 'xml_files': xml_files, 'site_name': site_name } queue="default" queued_jobs = get_jobs(site=frappe.local.site, queue=queue) method = "hlk.hlk.page.bkp_importer.bkp_importer._import_update_items" job_name='Import / Update Items from BKP File(s)' if method not in queued_jobs[frappe.local.site]: frappe.msgprint(_("Der Import / Das Updaten wurde gestartet. Bitte warten Sie bis das System Ihnen mitteilt, dass der Auftrag ausgeführt wurde."), 'Import / Update gestartet') enqueue(method=method, queue=queue, timeout=max_time, event=None, is_async=True, job_name=job_name, now=False, enqueue_after_commit=False, **args) return else: frappe.msgprint(_("Der Backup Job wurde bereits gestartet. Bitte warten Sie bis das System Ihnen mitteilt dass der Job erledigt ist."), 'Bitte Warten') except: return 'Error'
def trigger(site, event, last=None, queued_jobs=(), now=False): """Trigger method in hooks.scheduler_events.""" queue = 'long' if event.endswith('_long') else 'short' timeout = queue_timeout[queue] if not queued_jobs and not now: queued_jobs = get_jobs(site=site, queue=queue) if frappe.flags.in_test: frappe.flags.ran_schedulers.append(event) events_from_hooks = get_scheduler_events(event) if not events_from_hooks: return events = events_from_hooks if not now: events = [] if event == "cron": for e in events_from_hooks: e = cron_map.get(e, e) if croniter.is_valid(e): if croniter(e, last).get_next(datetime) <= frappe.utils.now_datetime(): events.extend(events_from_hooks[e]) else: frappe.log_error("Cron string " + e + " is not valid", "Error triggering cron job") frappe.logger(__name__).error('Exception in Trigger Events for Site {0}, Cron String {1}'.format(site, e)) else: if croniter(cron_map[event], last).get_next(datetime) <= frappe.utils.now_datetime(): events.extend(events_from_hooks) for handler in events: if not now: if handler not in queued_jobs: enqueue(handler, queue, timeout, event) else: scheduler_task(site=site, event=event, handler=handler, now=True)
def is_job_in_queue(self): queued_jobs = get_jobs(site=frappe.local.site, key='job_type')[frappe.local.site] return self.method in queued_jobs