def save_customization(self): if not self.doc_type: return self.flags.update_db = False self.flags.rebuild_doctype_for_global_search = False self.set_property_setters() self.update_custom_fields() self.set_name_translation() validate_fields_for_doctype(self.doc_type) if self.flags.update_db: from dataent.model.db_schema import updatedb updatedb(self.doc_type) if not hasattr(self, 'hide_success') or not self.hide_success: dataent.msgprint(_("{0} updated").format(_(self.doc_type))) dataent.clear_cache(doctype=self.doc_type) self.fetch_to_customize() if self.flags.rebuild_doctype_for_global_search: dataent.enqueue('dataent.utils.global_search.rebuild_for_doctype', now=True, doctype=self.doc_type)
def history(room, fields=None, limit=10, start=None, end=None): room = dataent.get_doc('Chat Room', room) mess = dataent.get_all('Chat Message', filters=[('Chat Message', 'room', '=', room.name), ('Chat Message', 'room_type', '=', room.type)], fields=fields if fields else [ 'name', 'room_type', 'room', 'content', 'type', 'user', 'mentions', 'urls', 'creation', '_seen' ], order_by='creation') if not fields or 'seen' in fields: for m in mess: m['seen'] = json.loads(m._seen) if m._seen else [] del m['_seen'] if not fields or 'content' in fields: for m in mess: m['content'] = json.loads(m.content) if m.type in ["File" ] else m.content dataent.enqueue( 'dataent.chat.doctype.chat_message.chat_message.mark_messages_as_seen', message_names=[m.name for m in mess], user=dataent.session.user) return mess
def create_site(site_name, install_epaas, mysql_password, admin_password, key): verify_whitelisted_call() commands = [ "bench new-site --mariadb-root-password {mysql_password} --admin-password {admin_password} {site_name}" .format(site_name=site_name, admin_password=admin_password, mysql_password=mysql_password) ] if install_epaas == "true": with open('apps.txt', 'r') as f: app_list = f.read() if 'epaas' not in app_list: commands.append("bench get-app epaas") commands.append("bench --site {site_name} install-app epaas".format( site_name=site_name)) dataent.enqueue('bench_manager.bench_manager.utils.run_command', commands=commands, doctype="Bench Settings", key=key) all_sites = safe_decode(check_output("ls")).strip('\n').split('\n') while site_name not in all_sites: time.sleep(2) print("waiting for site creation...") all_sites = safe_decode(check_output("ls")).strip('\n').split('\n') doc = dataent.get_doc({ 'doctype': 'Site', 'site_name': site_name, 'app_list': 'dataent', 'developer_flag': 1 }) doc.insert() dataent.db.commit()
def enqueue_update_cost(): dataent.enqueue( "epaas.manufacturing.doctype.bom_update_tool.bom_update_tool.update_cost" ) dataent.msgprint( _("Queued for updating latest price in all Bill of Materials. It may take a few minutes." ))
def validate_clean_description_html(self): if int(self.clean_description_html or 0) \ and not int(self.db_get('clean_description_html') or 0): # changed to text dataent.enqueue( 'epaas.stock.doctype.stock_settings.stock_settings.clean_all_descriptions', now=dataent.flags.in_test)
def trigger_feedback_request(doc, method): """Trigger the feedback alert, or delete feedback requests on delete""" def _get(): triggers = {} if not (dataent.flags.in_migrate or dataent.flags.in_install): for d in dataent.get_all('Feedback Trigger', dict(enabled=1), ['name', 'document_type']): triggers[d.document_type] = d.name return triggers feedback_triggers = dataent.cache().get_value('feedback_triggers', _get) if doc.doctype in feedback_triggers: if doc.flags.in_delete: dataent.enqueue( 'dataent.core.doctype.feedback_trigger.feedback_trigger.delete_feedback_request_and_feedback', reference_doctype=doc.doctype, reference_name=doc.name, now=dataent.flags.in_test) else: dataent.enqueue( 'dataent.core.doctype.feedback_trigger.feedback_trigger.send_feedback_request', trigger=feedback_triggers[doc.doctype], reference_doctype=doc.doctype, reference_name=doc.name, now=dataent.flags.in_test)
def dump_request_data(data, event="create/order"): event_mapper = { "orders/create": get_webhook_address(connector_name='shopify_connection', method="sync_sales_order", exclude_uri=True), "orders/paid": get_webhook_address(connector_name='shopify_connection', method="prepare_sales_invoice", exclude_uri=True), "orders/fulfilled": get_webhook_address(connector_name='shopify_connection', method="prepare_delivery_note", exclude_uri=True) } log = dataent.get_doc({ "doctype": "Shopify Log", "request_data": json.dumps(data, indent=1), "method": event_mapper[event] }).insert(ignore_permissions=True) dataent.db.commit() dataent.enqueue(method=event_mapper[event], queue='short', timeout=300, is_async=True, **{ "order": data, "request_id": log.name })
def create_site(site_name, mysql_password, admin_password, key): commands = [ "bench new-site --mariadb-root-password {mysql_password} --admin-password {admin_password} {site_name}" .format(site_name=site_name, admin_password=admin_password, mysql_password=mysql_password) ] commands.append("bench --site {site_name} install-app epaas".format( site_name=site_name)) dataent.enqueue('bench_manager.clienttestapi.run_command', commands=commands, doctype="Bench Settings", key=key) all_sites = check_output("ls").strip('\n').split('\n') while site_name not in all_sites: time.sleep(2) print "waiting for site creation..." all_sites = check_output("ls").strip('\n').split('\n') doc = dataent.get_doc({ 'doctype': 'Site', 'site_name': site_name, 'app_list': 'dataent', 'developer_flag': 1 }) doc.insert() dataent.db.commit()
def assign_salary_structure(self, grade=None, department=None, designation=None, employee=None, from_date=None, base=None, variable=None): employees = self.get_employees(grade=grade, department=department, designation=designation, name=employee) if employees: if len(employees) > 20: dataent.enqueue(assign_salary_structure_for_employees, timeout=600, employees=employees, salary_structure=self, from_date=from_date, base=base, variable=variable) else: assign_salary_structure_for_employees(employees, self, from_date=from_date, base=base, variable=variable) else: dataent.msgprint(_("No Employee Found"))
def restore_backup(doctype, docname, on_a_new_site, existing_site, new_site_name, mysql_password, admin_password, key): verify_whitelisted_call() backup = dataent.get_doc('Site Backup', docname) commands = [] password_suffix = "--admin-password {admin_password} --mariadb-root-password {mysql_password}".format(mysql_password=mysql_password, admin_password=admin_password) site_name = existing_site if on_a_new_site == '1': site_name = new_site_name commands.append("bench new-site {site_name} {password_suffix}".format(site_name=site_name, password_suffix=password_suffix)) command = "bench --site {site_name} --force restore {backup_file_path}_database.sql".format(site_name=site_name, backup_file_path=backup.file_path) if not os.path.isfile("{backup_file_path}_database.sql".format(backup_file_path=backup.file_path)): command += ".gz" if backup.public_file_backup: command += " --with-public-files ../{backup_file_path}_files.tar".format(backup_file_path=backup.file_path) if backup.private_file_backup: command += " --with-private-files ../{backup_file_path}_private_files.tar".format(backup_file_path=backup.file_path) command += " {password_suffix}".format(password_suffix=password_suffix) commands.append(command) dataent.enqueue('bench_manager.bench_manager.utils.run_command', commands=commands, doctype=doctype, key=key, docname=docname )
def ipn_handler(): try: data = dataent.local.form_dict validate_ipn_request(data) data.update({ "payment_gateway": "PayPal" }) doc = dataent.get_doc({ "data": json.dumps(dataent.local.form_dict), "doctype": "Integration Request", "integration_type": "Subscription Notification", "status": "Queued" }).insert(ignore_permissions=True) dataent.db.commit() dataent.enqueue(method='dataent.integrations.doctype.paypal_settings.paypal_settings.handle_subscription_notification', queue='long', timeout=600, is_async=True, **{"doctype": "Integration Request", "docname": doc.name}) except dataent.InvalidStatusError: pass except Exception as e: dataent.log(dataent.log_error(title=e))
def make_auto_repeat_entry(date=None): enqueued_method = 'dataent.desk.doctype.auto_repeat.auto_repeat.create_repeated_entries' jobs = get_jobs() if not jobs or enqueued_method not in jobs[dataent.local.site]: date = date or today() for data in get_auto_repeat_entries(date): dataent.enqueue(enqueued_method, data=data)
def enqueue_replace_bom(args): if isinstance(args, string_types): args = json.loads(args) dataent.enqueue( "epaas.manufacturing.doctype.bom_update_tool.bom_update_tool.replace_bom", args=args, timeout=4000) dataent.msgprint( _("Queued for replacing the BOM. It may take a few minutes."))
def console_command(self, key, caller, app_name=None, branch_name=None): commands = { "bench_update": ["bench update"], "switch_branch": [""], "get-app": ["bench get-app {app_name}".format(app_name=app_name)] } dataent.enqueue('bench_manager.bench_manager.utils.run_command', commands=commands[caller], doctype=self.doctype, key=key, docname=self.name)
def on_update(self): # clear new password self.validate_user_limit() self.share_with_self() clear_notifications(user=self.name) dataent.clear_cache(user=self.name) self.send_password_notification(self.__new_password) create_contact(self, ignore_mandatory=True) if self.name not in ('Administrator', 'Guest') and not self.user_image: dataent.enqueue('dataent.core.doctype.user.user.update_gravatar', name=self.name)
def submit_salary_slips(self): self.check_permission('write') ss_list = self.get_sal_slip_list(ss_status=0) if len(ss_list) > 30: dataent.enqueue(submit_salary_slips_for_employees, timeout=600, payroll_entry=self, salary_slips=ss_list) else: submit_salary_slips_for_employees(self, ss_list, publish_progress=False)
def sync(self): """Create and execute Data Migration Run for GCalendar Sync plan""" dataent.has_permission('GCalendar Settings', throw=True) accounts = dataent.get_all("GCalendar Account", filters={'enabled': 1}) queued_jobs = get_jobs(site=dataent.local.site, key='job_name')[dataent.local.site] for account in accounts: job_name = 'google_calendar_sync|{0}'.format(account.name) if job_name not in queued_jobs: dataent.enqueue('dataent.integrations.doctype.gcalendar_settings.gcalendar_settings.run_sync', queue='long', timeout=1500, job_name=job_name, account=account) time.sleep(5)
def automatic_synchronization(): settings = dataent.get_doc("Plaid Settings", "Plaid Settings") if settings.enabled == 1 and settings.automatic_sync == 1: plaid_accounts = dataent.get_all("Bank Account", filter={"integration_id": ["!=", ""]}, fields=["name", "bank"]) for plaid_account in plaid_accounts: dataent.enqueue( "epaas.epaas_integrations.doctype.plaid_settings.plaid_settings.sync_transactions", bank=plaid_account.bank, bank_account=plaid_account.name)
def _webhook_request(webhook): if not webhook.name in dataent.flags.webhooks_executed.get( doc.name, []): dataent.enqueue( "dataent.integrations.doctype.webhook.webhook.enqueue_webhook", enqueue_after_commit=True, doc=doc, webhook=webhook) # keep list of webhooks executed for this doc in this request # so that we don't run the same webhook for the same document multiple times # in one request dataent.flags.webhooks_executed.setdefault(doc.name, []).append(webhook.name)
def resync(method, name, request_data): dataent.db.set_value("Shopify Log", name, "status", "Queued", update_modified=False) dataent.enqueue(method=method, queue='short', timeout=300, is_async=True, **{ "order": json.loads(request_data), "request_id": name })
def send_reminder(): dataent.has_permission('GST Settings', throw=True) last_sent = dataent.db.get_single_value('GST Settings', 'gstin_email_sent_on') if last_sent and date_diff(nowdate(), last_sent) < 3: dataent.throw(_("Please wait 3 days before resending the reminder.")) dataent.db.set_value('GST Settings', 'GST Settings', 'gstin_email_sent_on', nowdate()) # enqueue if large number of customers, suppliser dataent.enqueue( 'epaas.regional.doctype.gst_settings.gst_settings.send_gstin_reminder_to_all_parties' ) dataent.msgprint( _('Email Reminders will be sent to all parties with email contacts'))
def console_command(self, key, caller, alias=None, app_name=None, admin_password=None, mysql_password=None): site_abspath = None if alias: site_abspath = os.path.abspath(os.path.join(self.name)) commands = { "migrate": ["bench --site {site_name} migrate".format(site_name=self.name)], "create-alias": [ "ln -s {site_abspath} sites/{alias}".format( site_abspath=site_abspath, alias=alias) ], "delete-alias": ["rm sites/{alias}".format(alias=alias)], "backup": [ "bench --site {site_name} backup --with-files".format( site_name=self.name) ], "reinstall": [ "bench --site {site_name} reinstall --yes --admin-password {admin_password}" .format(site_name=self.name, admin_password=admin_password) ], "install_app": [ "bench --site {site_name} install-app {app_name}".format( site_name=self.name, app_name=app_name) ], "uninstall_app": [ "bench --site {site_name} uninstall-app {app_name} --yes". format(site_name=self.name, app_name=app_name) ], "drop_site": [ "bench drop-site {site_name} --root-password {mysql_password}". format(site_name=self.name, mysql_password=mysql_password) ] } dataent.enqueue('bench_manager.bench_manager.utils.run_command', commands=commands[caller], doctype=self.doctype, key=key, docname=self.name) return "executed"
def update_variants(self): if self.flags.dont_update_variants or \ dataent.db.get_single_value('Item Variant Settings', 'do_not_update_variants'): return if self.has_variants: variants = dataent.db.get_all("Item", fields=["item_code"], filters={"variant_of": self.name}) if variants: if len(variants) <= 30: update_variants(variants, self, publish_progress=False) dataent.msgprint(_("Item Variants updated")) else: dataent.enqueue( "epaas.stock.doctype.item.item.update_variants", variants=variants, template=self, now=dataent.flags.in_test, timeout=600)
def console_command(self, key, caller, branch_name=None, remote=None, commit_msg=None): commands = { "git_init": ["git init", "git add .", "git commit -m 'Initial Commit'"], "switch_branch": ["git checkout {branch_name}".format(branch_name=branch_name)], "new_branch": ["git branch {branch_name}".format(branch_name=branch_name)], "delete_branch": ["git branch -D {branch_name}".format(branch_name=branch_name)], "git_fetch": ["git fetch --all"], "track-remote": ["git checkout -b {branch_name} -t {remote}".format(branch_name=branch_name, remote=remote)], "pull-rebase": ["git pull --rebase {remote} {branch_name}".format(branch_name=branch_name, remote=remote)], "commit": ["git add .", 'git commit -m "{commit_msg}"'.format(commit_msg=commit_msg)], "stash": ["git add .", "git stash"], "apply-stash": ["git stash apply"] } dataent.enqueue('bench_manager.bench_manager.utils.run_command', commands=commands[caller], cwd=os.path.join('..', 'apps', self.name), doctype=self.doctype, key=key, docname=self.name )
def enqueue_multiple_variant_creation(item, args): # There can be innumerable attribute combinations, enqueue if isinstance(args, string_types): variants = json.loads(args) total_variants = 1 for key in variants: total_variants *= len(variants[key]) if total_variants >= 600: dataent.msgprint("Please do not create more than 500 items at a time", raise_exception=1) return if total_variants < 10: return create_multiple_variants(item, args) else: dataent.enqueue( "epaas.controllers.item_variant.create_multiple_variants", item=item, args=args, now=dataent.flags.in_test) return 'queued'
def sync_global_search(self): '''If global search settings are changed, rebuild search properties for this table''' global_search_fields_before_update = [ d.fieldname for d in self.before_update.fields if d.in_global_search ] if self.before_update.show_name_in_global_search: global_search_fields_before_update.append('name') global_search_fields_after_update = [ d.fieldname for d in self.fields if d.in_global_search ] if self.show_name_in_global_search: global_search_fields_after_update.append('name') if set(global_search_fields_before_update) != set( global_search_fields_after_update): now = (not dataent.request ) or dataent.flags.in_test or dataent.flags.in_install dataent.enqueue('dataent.utils.global_search.rebuild_for_doctype', now=now, doctype=self.name)
def create_salary_slips(self): """ Creates salary slip for selected employees if already not created """ self.check_permission('write') self.created = 1 emp_list = [d.employee for d in self.get_emp_list()] if emp_list: args = dataent._dict({ "salary_slip_based_on_timesheet": self.salary_slip_based_on_timesheet, "payroll_frequency": self.payroll_frequency, "start_date": self.start_date, "end_date": self.end_date, "company": self.company, "posting_date": self.posting_date, "deduct_tax_for_unclaimed_employee_benefits": self.deduct_tax_for_unclaimed_employee_benefits, "deduct_tax_for_unsubmitted_tax_exemption_proof": self.deduct_tax_for_unsubmitted_tax_exemption_proof, "payroll_entry": self.name }) if len(emp_list) > 30: dataent.enqueue(create_salary_slips_for_employees, timeout=600, employees=emp_list, args=args) else: create_salary_slips_for_employees(emp_list, args, publish_progress=False)
def bulk_rename(doctype, rows=None, via_console = False): """Bulk rename documents :param doctype: DocType to be renamed :param rows: list of documents as `((oldname, newname), ..)`""" if not rows: dataent.throw(_("Please select a valid csv file with data")) if not via_console: max_rows = 500 if len(rows) > max_rows: dataent.throw(_("Maximum {0} rows allowed").format(max_rows)) rename_log = [] for row in rows: # if row has some content if len(row) > 1 and row[0] and row[1]: try: if rename_doc(doctype, row[0], row[1]): msg = _("Successful: {0} to {1}").format(row[0], row[1]) dataent.db.commit() else: msg = _("Ignored: {0} to {1}").format(row[0], row[1]) except Exception as e: msg = _("** Failed: {0} to {1}: {2}").format(row[0], row[1], repr(e)) dataent.db.rollback() if via_console: print(msg) else: rename_log.append(msg) dataent.enqueue('dataent.utils.global_search.rebuild_for_doctype', doctype=doctype) if not via_console: return rename_log
def sync_all(in_background=False): if not in_background: dataent.msgprint('Sync has started and will run in the background...') verify_whitelisted_call() dataent.enqueue( 'bench_manager.bench_manager.doctype.bench_settings.bench_settings.sync_sites' ) dataent.enqueue( 'bench_manager.bench_manager.doctype.bench_settings.bench_settings.sync_apps' ) dataent.enqueue( 'bench_manager.bench_manager.doctype.bench_settings.bench_settings.sync_backups' ) dataent.set_value('Bench Settings', None, 'last_sync_timestamp', dataent.utils.time.time())
def delete_doc(doctype=None, name=None, force=0, ignore_doctypes=None, for_reload=False, ignore_permissions=False, flags=None, ignore_on_trash=False, ignore_missing=True): """ Deletes a doc(dt, dn) and validates if it is not submitted and not linked in a live record """ if not ignore_doctypes: ignore_doctypes = [] # get from form if not doctype: doctype = dataent.form_dict.get('dt') name = dataent.form_dict.get('dn') names = name if isinstance(name, string_types) or isinstance(name, integer_types): names = [name] for name in names or []: # already deleted..? if not dataent.db.exists(doctype, name): if not ignore_missing: raise dataent.DoesNotExistError else: return False # delete passwords delete_all_passwords_for(doctype, name) doc = None if doctype == "DocType": if for_reload: try: doc = dataent.get_doc(doctype, name) except dataent.DoesNotExistError: pass else: doc.run_method("before_reload") else: doc = dataent.get_doc(doctype, name) update_flags(doc, flags, ignore_permissions) check_permission_and_not_submitted(doc) dataent.db.sql("delete from `tabCustom Field` where dt = %s", name) dataent.db.sql("delete from `tabCustom Script` where dt = %s", name) dataent.db.sql( "delete from `tabProperty Setter` where doc_type = %s", name) dataent.db.sql("delete from `tabReport` where ref_doctype=%s", name) dataent.db.sql( "delete from `tabCustom DocPerm` where parent=%s", name) delete_from_table(doctype, name, ignore_doctypes, None) else: doc = dataent.get_doc(doctype, name) if not for_reload: update_flags(doc, flags, ignore_permissions) check_permission_and_not_submitted(doc) if not ignore_on_trash: doc.run_method("on_trash") doc.flags.in_delete = True doc.run_method('on_change') dataent.enqueue( 'dataent.model.delete_doc.delete_dynamic_links', doctype=doc.doctype, name=doc.name, is_async=False if dataent.flags.in_test else True) # check if links exist if not force: check_if_doc_is_linked(doc) check_if_doc_is_dynamically_linked(doc) update_naming_series(doc) delete_from_table(doctype, name, ignore_doctypes, doc) doc.run_method("after_delete") # delete attachments remove_all(doctype, name, from_delete=True) # delete global search entry delete_for_document(doc) if doc and not for_reload: add_to_deleted_document(doc) if not dataent.flags.in_patch: try: doc.notify_update() insert_feed(doc) except ImportError: pass # delete user_permissions dataent.defaults.clear_default(parenttype="User Permission", key=doctype, value=name)