def execute(): frappe.reload_doc('core', 'doctype', 'dynamic_link') frappe.reload_doc('email', 'doctype', 'contact') frappe.reload_doc('contact', 'doctype', 'address') map_fields = (('Customer', 'customer'), ('Supplier', 'supplier'), ('Lead', 'lead'), ('Sales Partner', 'sales_partner')) for doctype in ('Contact', 'Address'): if frappe.db.has_column(doctype, 'customer'): items = frappe.get_all(doctype) for i, doc in enumerate(items): doc = frappe.get_doc(doctype, doc.name) dirty = False for field in map_fields: if doc.get(field[1]): doc.append( 'links', dict(link_doctype=field[0], link_name=doc.get(field[1]))) dirty = True if dirty: deduplicate_dynamic_links(doc) doc.update_children() update_progress_bar('Updating {0}'.format(doctype), i, len(items)) print
def rebuild_global_search(context, static_pages=False): '''Setup help table in the current site (called after migrate)''' from frappe.utils.global_search import (get_doctypes_with_global_search, rebuild_for_doctype, get_routes_to_index, add_route_to_global_search, sync_global_search) for site in context.sites: try: frappe.init(site) frappe.connect() if static_pages: routes = get_routes_to_index() for i, route in enumerate(routes): add_route_to_global_search(route) frappe.local.request = None update_progress_bar('Rebuilding Global Search', i, len(routes)) sync_global_search() else: doctypes = get_doctypes_with_global_search() for i, doctype in enumerate(doctypes): rebuild_for_doctype(doctype) update_progress_bar('Rebuilding Global Search', i, len(doctypes)) finally: frappe.destroy()
def translate_untranslated_from_google(lang): if lang == "en": return if lang=='zh-cn': lang = 'zh' if lang=='zh-tw': lang = 'zh-TW' if not get_lang_name(lang): print('{0} not supported by Google Translate'.format(lang)) return count = 0 untranslated = get_untranslated(lang) l = len(untranslated) for i, d in enumerate(untranslated): source, message = d if not frappe.db.get_value('Translated Message', {"source": source, "language": lang}): t = frappe.new_doc('Translated Message') t.language = lang t.source = source t.translated = get_translation_from_google(lang, message) try: t.save() except frappe.exceptions.ValidationError: continue count += 1 frappe.db.commit() update_progress_bar("Translating {0}".format(lang), i, l) print(lang, count, 'imported')
def import_source_messages(): """Import messages from apps listed in **Translator App** as **Source Message**""" message_map = get_formatted_messages() l = len(message_map) frappe.db.sql("UPDATE `tabSource Message` SET `disabled`=1") for i, ((message, context), positions) in enumerate(message_map.items()): # used SQL so as to make message comparision case sensitive source_message = frappe.db.sql(""" SELECT `name` FROM `tabSource Message` WHERE `message` = BINARY %s AND coalesce(`tabSource Message`.context, '') = %s LIMIT 1 """, (message, context), as_dict=1) source_message = source_message[0] if source_message else None if source_message: d = frappe.get_doc("Source Message", source_message['name']) d.disabled = 0 positions = get_postions_to_save(d.positions, positions) else: d = frappe.new_doc('Source Message') d.message = message d.context = context d.set('positions', positions) d.save(ignore_version=True, ignore_permissions=True) update_progress_bar("Importing messages", i, l)
def sync_for(app_name, force=0, sync_everything = False, verbose=False): files = [] if app_name == "frappe": # these need to go first at time of install for d in (("core", "docfield"), ("core", "docperm"), ("core", "doctype"), ("core", "user"), ("core", "role"), ("custom", "custom_field"), ("custom", "property_setter")): files.append(os.path.join(frappe.get_app_path("frappe"), d[0], "doctype", d[1], d[1] + ".json")) for module_name in frappe.local.app_modules.get(app_name) or []: folder = os.path.dirname(frappe.get_module(app_name + "." + module_name).__file__) get_doc_files(files, folder, force, sync_everything, verbose=verbose) l = len(files) if l: for i, doc_path in enumerate(files): import_file_by_path(doc_path, force=force) #print module_name + ' | ' + doctype + ' | ' + name frappe.db.commit() # show progress bar update_progress_bar("Updating {0}".format(app_name), i, l) print ""
def sync_generators(generators): global all_routes l = len(generators) if l: frappe.flags.in_sync_website = True for i, g in enumerate(generators): doc = frappe.get_doc(g[0], g[1]) doc.update_sitemap() route = doc.get_route() if route in all_routes: all_routes.remove(route) update_progress_bar("Updating Generators", i, l) sys.stdout.flush() frappe.flags.in_sync_website = False rebuild_tree("Website Route", "parent_website_route") # HACK! update public_read, public_write for name in frappe.db.sql_list( """select name from `tabWebsite Route` where ifnull(parent_website_route, '')!='' order by lft"""): route = frappe.get_doc("Website Route", name) route.make_private_if_parent_is_private() route.db_update() print ""
def sync_for(app_name, force=0, sync_everything=False, verbose=False): files = [] if app_name == "frappe": # these need to go first at time of install for d in (("core", "docfield"), ("core", "docperm"), ("core", "doctype"), ("core", "user"), ("core", "role"), ("custom", "custom_field"), ("custom", "property_setter")): files.append( os.path.join(frappe.get_app_path("frappe"), d[0], "doctype", d[1], d[1] + ".json")) for module_name in frappe.local.app_modules.get(app_name) or []: folder = os.path.dirname( frappe.get_module(app_name + "." + module_name).__file__) get_doc_files(files, folder, force, sync_everything, verbose=verbose) l = len(files) if l: for i, doc_path in enumerate(files): import_file_by_path(doc_path, force=force) #print module_name + ' | ' + doctype + ' | ' + name frappe.db.commit() # show progress bar update_progress_bar("Updating {0}".format(app_name), i, l) print ""
def execute(): frappe.cache().delete_value('doctypes_with_global_search') doctypes_with_global_search = get_doctypes_with_global_search(with_child_tables=False) for i, doctype in enumerate(doctypes_with_global_search): update_progress_bar("Updating Global Search", i, len(doctypes_with_global_search)) rebuild_for_doctype(doctype)
def execute(): frappe.reload_doc('core', 'doctype', 'dynamic_link') frappe.reload_doc('contacts', 'doctype', 'contact') frappe.reload_doc('contacts', 'doctype', 'address') map_fields = ( ('Customer', 'customer'), ('Supplier', 'supplier'), ('Lead', 'lead'), ('Sales Partner', 'sales_partner') ) for doctype in ('Contact', 'Address'): if frappe.db.has_column(doctype, 'customer'): items = frappe.get_all(doctype) for i, doc in enumerate(items): doc = frappe.get_doc(doctype, doc.name) dirty = False for field in map_fields: if doc.get(field[1]): doc.append('links', dict(link_doctype=field[0], link_name=doc.get(field[1]))) dirty = True if dirty: deduplicate_dynamic_links(doc) doc.update_children() update_progress_bar('Updating {0}'.format(doctype), i, len(items)) print
def download_latest_client_backup(): credentials = get_credentials() http = credentials.authorize(httplib2.Http()) drive_service = discovery.build('drive', 'v3', http=http) client_id = get_client_folder_id(drive_service) results = drive_service.files().list( q="'{}' in parents".format(client_id), pageSize=10, fields="files(id,name)", orderBy="modifiedTime desc").execute() recent_folder = results.get("files", []) and results.get("files")[0].get("name") if not recent_folder: print("No recent backups found") return print("Recent Backup Found ON : {}".format(recent_folder)) recent_folder = results.get("files")[0].get("id") # Download all files in the recent_folder and proceed backup_files = drive_service.files().list( q="'{}' in parents".format(recent_folder), fields="files(id,name)").execute() if not backup_files: print("No files found in the folder") return sitepath = os.path.join(frappe.get_site_path(), recent_folder) os.mkdir(sitepath) for file in backup_files["files"]: request = drive_service.files().get_media(fileId=file.get("id")) fh = io.FileIO(os.path.join(frappe.get_site_path(), recent_folder, file.get("name")), mode='w') downloader = MediaIoBaseDownload(fh, request) done = False while done is False: status, done = downloader.next_chunk() update_progress_bar( "Downloading {} {}%".format(file.get("name", ''), round(status.progress() * 100)), status.progress() * 100, 100) print('') # execute restore methods print( "bench --force --site {} restore {} --with-public-files {} --with-private-files {}" .format(os.path.basename(os.path.normpath(frappe.get_site_path())), [ os.path.join(sitepath, x.get("name")) for x in backup_files["files"] if "database" in x.get("name") ][0], [ os.path.join(sitepath, x.get("name")) for x in backup_files["files"] if not "database" in x.get("name") and not "private" in x.get("name") ][0], [ os.path.join(sitepath, x.get("name")) for x in backup_files["files"] if "private" in x.get("name") ][0])) print() print("rm -rf sites{}".format(sitepath[1:])) print()
def disable_roles_exepct_PF_roles(pf_roles): roles = get_all_roles() i = 0 for role in roles: update_progress_bar('Deactivate Roles', i, len(roles)) if not role[0] in pf_roles: disable_role(role[0]) i = i + 1
def sync_pages(routes): l = len(routes) if l: for i, r in enumerate(routes): r.insert(ignore_permissions=True) update_progress_bar("Updating Pages", i, l) print ""
def set_permissions(docs_for_permission, role): i = 0 print("+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++") print("Define permissions for:") for doc in docs_for_permission: frappe.permissions.add_permission(doc, role) update_progress_bar('"{0}"'.format(role), i, len(docs_for_permission)) i = i + 1 print("+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++")
def sync_for(app_name, force=0, reset_permissions=False): files = [] if app_name == "frappe": # these need to go first at time of install FRAPPE_PATH = frappe.get_app_path("frappe") for core_module in ["docfield", "docperm", "doctype_action", "doctype_link", "role", "has_role", "doctype"]: files.append(os.path.join(FRAPPE_PATH, "core", "doctype", core_module, f"{core_module}.json")) for custom_module in ["custom_field", "property_setter"]: files.append(os.path.join(FRAPPE_PATH, "custom", "doctype", custom_module, f"{custom_module}.json")) for website_module in ["web_form", "web_template", "web_form_field", "portal_menu_item"]: files.append(os.path.join(FRAPPE_PATH, "website", "doctype", website_module, f"{website_module}.json")) for data_migration_module in [ "data_migration_mapping_detail", "data_migration_mapping", "data_migration_plan_mapping", "data_migration_plan", ]: files.append(os.path.join(FRAPPE_PATH, "data_migration", "doctype", data_migration_module, f"{data_migration_module}.json")) for desk_module in [ "number_card", "dashboard_chart", "dashboard", "onboarding_permission", "onboarding_step", "onboarding_step_map", "module_onboarding", "workspace_link", "workspace_chart", "workspace_shortcut", "workspace", ]: files.append(os.path.join(FRAPPE_PATH, "desk", "doctype", desk_module, f"{desk_module}.json")) for module_name in frappe.local.app_modules.get(app_name) or []: folder = os.path.dirname(frappe.get_module(app_name + "." + module_name).__file__) files = get_doc_files(files=files, start_path=folder) l = len(files) if l: for i, doc_path in enumerate(files): import_file_by_path(doc_path, force=force, ignore_version=True, reset_permissions=reset_permissions) frappe.db.commit() # show progress bar update_progress_bar("Updating DocTypes for {0}".format(app_name), i, l) # print each progress bar on new line print()
def execute(): """This patch needs to be executed manually since it needs to call the Shipstation API multiple times, which can keep sites down for a long time.""" # setup_custom_fields() shipstation_settings = frappe.get_all("Shipstation Settings", filters={"enabled": True}) for settings in shipstation_settings: settings_doc = frappe.get_doc("Shipstation Settings", settings.name) client = settings_doc.client() for store in settings_doc.shipstation_stores: if not store.enable_orders: continue store_orders = frappe.get_all( "Sales Order", filters={ "docstatus": 1, "shipstation_store_name": store.store_name, "marketplace": store.marketplace_name, "shipstation_order_id": ["is", "set"], }, fields=["name", "shipstation_order_id"], ) for i, order in enumerate(store_orders): update_progress_bar( f"Updating Shipstation order item IDs for {store.marketplace_name} ({store.store_name})", i, len(store_orders), ) shipstation_order = client.get_order( order.shipstation_order_id) sales_order = frappe.get_doc("Sales Order", order.name) for item in shipstation_order.items: if not item.order_item_id: continue for order_item in sales_order.items: if (order_item.item_code == item.sku.strip() and flt(order_item.qty) == flt(item.quantity) and flt(order_item.rate) == flt( item.unit_price)): frappe.db.set_value( "Sales Order Item", order_item.name, "shipstation_order_item_id", item.order_item_id, update_modified=False, ) frappe.db.commit()
def sync_for(app_name, force=0, sync_everything=False, verbose=False, reset_permissions=False): files = [] if app_name == "frappe": # these need to go first at time of install for d in (("core", "docfield"), ("core", "docperm"), ("core", "doctype_action"), ("core", "doctype_link"), ("core", "role"), ("core", "has_role"), ("core", "doctype"), ("core", "user"), ("custom", "custom_field"), ("custom", "property_setter"), ("website", "web_form"), ("website", "web_template"), ("website", "web_form_field"), ("website", "portal_menu_item"), ("data_migration", "data_migration_mapping_detail"), ("data_migration", "data_migration_mapping"), ("data_migration", "data_migration_plan_mapping"), ("data_migration", "data_migration_plan"), ("desk", "number_card"), ("desk", "dashboard_chart"), ("desk", "dashboard"), ("desk", "onboarding_permission"), ("desk", "onboarding_step"), ("desk", "onboarding_step_map"), ("desk", "module_onboarding"), ("desk", "workspace_link"), ("desk", "workspace_chart"), ("desk", "workspace_shortcut"), ("desk", "workspace")): files.append( os.path.join(frappe.get_app_path("frappe"), d[0], "doctype", d[1], d[1] + ".json")) for module_name in frappe.local.app_modules.get(app_name) or []: folder = os.path.dirname( frappe.get_module(app_name + "." + module_name).__file__) get_doc_files(files, folder) l = len(files) if l: for i, doc_path in enumerate(files): import_file_by_path(doc_path, force=force, ignore_version=True, reset_permissions=reset_permissions, for_sync=True) frappe.db.commit() # show progress bar update_progress_bar("Updating DocTypes for {0}".format(app_name), i, l) # print each progress bar on new line print()
def build_index(self): """Build index for all parsed documents""" ix = self.create_index() writer = ix.writer() for i, document in enumerate(self.documents): if document: writer.add_document(**document) update_progress_bar("Building Index", i, len(self.documents)) writer.commit(optimize=True)
def hide_modules(visible_modules): modules = get_all_modules() i = 0 for module in modules: update_progress_bar('Hide Module unused modules', i, len(modules)) if module[0] not in visible_modules: sql_query = """UPDATE `tabModule Def` SET `restrict_to_domain` = 'Non Profit' WHERE `module_name` = '{0}'""".format(module[0]) frappe.db.sql(sql_query) i = i + 1
def rebuild_global_search(context): '''Setup help table in the current site (called after migrate)''' from frappe.utils.global_search import (get_doctypes_with_global_search, rebuild_for_doctype) for site in context.sites: try: frappe.init(site) frappe.connect() doctypes = get_doctypes_with_global_search() for i, doctype in enumerate(doctypes): rebuild_for_doctype(doctype) update_progress_bar('Rebuilding Global Search', i, len(doctypes)) finally: frappe.destroy()
def import_country_and_currency(): from frappe.geo.country_info import get_all from frappe.utils import update_progress_bar data = get_all() for i, name in enumerate(data): update_progress_bar("Updating country info", i, len(data)) country = frappe._dict(data[name]) add_country_and_currency(name, country) # enable frequently used currencies for currency in ("INR", "USD", "GBP", "EUR", "AED", "AUD", "JPY", "CNY", "CHF"): frappe.db.set_value("Currency", currency, "enabled", 1)
def import_country_and_currency(): from frappe.geo.country_info import get_all from frappe.utils import update_progress_bar data = get_all() for i, name in enumerate(data): update_progress_bar("Updating country info", i, len(data)) country = frappe._dict(data[name]) add_country_and_currency(name, country) print # enable frequently used currencies for currency in ("INR", "USD", "GBP", "EUR", "AED", "AUD", "JPY", "CNY", "CHF"): frappe.db.set_value("Currency", currency, "enabled", 1)
def copy_translations(from_lang, to_lang): translations = frappe.db.sql("""select source, translated from `tabTranslated Message` where language=%s""", (from_lang, )) l = len(translations) for i, d in enumerate(translations): source, translated = d if not frappe.db.get_value('Translated Message', {"source": source, "language": to_lang}): t = frappe.new_doc('Translated Message') t.language = to_lang t.source = source t.translated = translated try: t.save() except frappe.ValidationError: pass update_progress_bar("Copying {0} to {1}".format(from_lang, to_lang), i, l)
def sync_for(app_name, force=0, sync_everything = False, verbose=False): files = [] for module_name in frappe.local.app_modules.get(app_name) or []: folder = os.path.dirname(frappe.get_module(app_name + "." + module_name).__file__) files += get_doc_files(folder, force, sync_everything, verbose=verbose) l = len(files) if l: for i, doc_path in enumerate(files): import_file_by_path(doc_path, force=force) #print module_name + ' | ' + doctype + ' | ' + name frappe.db.commit() # show progress bar update_progress_bar("Updating {0}".format(app_name), i, l) print ""
def sync_for(app_name, force=0, sync_everything=False, verbose=False): files = [] for module_name in frappe.local.app_modules.get(app_name) or []: folder = os.path.dirname( frappe.get_module(app_name + "." + module_name).__file__) files += get_doc_files(folder, force, sync_everything, verbose=verbose) l = len(files) if l: for i, doc_path in enumerate(files): import_file_by_path(doc_path, force=force) #print module_name + ' | ' + doctype + ' | ' + name frappe.db.commit() # show progress bar update_progress_bar("Updating {0}".format(app_name), i, l) print ""
def import_translations_from_csv(lang, app): path = os.path.join(frappe.get_app_path(app, "translations", lang + ".csv")) translations = [] try: translations = read_translation_csv_file(path) except: return normalized_translations = get_normalized_translations(translations) source_messages = get_source_messages() translations = get_translations(lang) count = 0 l = len(normalized_translations) print('importing', len(normalized_translations), 'translations') for i, (source_message, translated, context) in enumerate(normalized_translations): if not (source_message, context or None) in source_messages: pass elif translated in translations: pass else: try: source = frappe.db.get_all("Source Message", { "message": source_message, "context": context or '', "disabled": 0 }, limit=1) source_name = source[0].name dest = frappe.new_doc("Translated Message") dest.language = lang dest.translated = translated dest.source = source_name dest.translation_source = 'CSV' dest.save(ignore_version=True, ignore_permissions=True) frappe.db.commit() count += 1 except Exception as e: print(e) update_progress_bar(f"Importing messages for lang {lang} of {app}", i, l) print(f'{count} updated for {lang}')
def sync_pages(routes): global all_routes l = len(routes) if l: for i, r in enumerate(routes): r.autoname() if frappe.db.exists("Website Route", r.name): route = frappe.get_doc("Website Route", r.name) for key in ("page_title", "controller", "template"): route.set(key, r.get(key)) route.save(ignore_permissions=True) else: r.insert(ignore_permissions=True) if r.name in all_routes: all_routes.remove(r.name) update_progress_bar("Updating Pages", i, l) print ""
def sync_for(app_name, force=0, sync_everything = False, verbose=False, reset_permissions=False): files = [] if app_name == "frappe": # these need to go first at time of install for d in (("core", "docfield"), ("core", "docperm"), ("core", "has_role"), ("core", "doctype"), ("core", "user"), ("core", "role"), ("custom", "custom_field"), ("custom", "property_setter"), ("website", "web_form"), ("website", "web_form_field"), ("website", "portal_menu_item"), ("data_migration", "data_migration_mapping_detail"), ("data_migration", "data_migration_mapping"), ("data_migration", "data_migration_plan_mapping"), ("data_migration", "data_migration_plan")): files.append(os.path.join(frappe.get_app_path("frappe"), d[0], "doctype", d[1], d[1] + ".json")) for module_name in frappe.local.app_modules.get(app_name) or []: folder = os.path.dirname(frappe.get_module(app_name + "." + module_name).__file__) get_doc_files(files, folder, force, sync_everything, verbose=verbose) l = len(files) if l: for i, doc_path in enumerate(files): import_file_by_path(doc_path, force=force, ignore_version=True, reset_permissions=reset_permissions, for_sync=True) #print module_name + ' | ' + doctype + ' | ' + name frappe.db.commit() # show progress bar update_progress_bar("Updating DocTypes for {0}".format(app_name), i, l) # print each progress bar on new line print()
def get_items_to_index(self): """Get all routes to be indexed, this includes the static pages in www/ and routes from published documents Returns: self (object): FullTextSearch Instance """ if getattr(self, "_items_to_index", False): return self._items_to_index routes = get_static_pages_from_all_apps() + slugs_with_web_view() self._items_to_index = [] for i, route in enumerate(routes): update_progress_bar("Retrieving Routes", i, len(routes)) self._items_to_index += [self.get_document_to_index(route)] print() return self.get_items_to_index()
def insert_and_update(self): if self.to_insert: l = len(self.to_insert) for i, page in enumerate(self.to_insert): if self.verbose: print "Inserting " + page.route else: update_progress_bar("Updating Static Pages", i, l) self.insert_web_page(page) if not self.verbose: print "" if self.to_update: for i, route_details in enumerate(self.to_update): if self.verbose: print "Updating " + route_details.name else: sys.stdout.write("\rUpdating statics {0}/{1}".format(i+1, len(self.to_update))) sys.stdout.flush() self.update_web_page(route_details) if not self.verbose: print ""
def insert_and_update(self): if self.to_insert: l = len(self.to_insert) for i, page in enumerate(self.to_insert): if self.verbose: print "Inserting " + page.page_name else: update_progress_bar("Updating Static Pages", i, l) page.insert() if not self.verbose: print "" if self.to_update: for i, page in enumerate(self.to_update): if not self.verbose: print "Updating " + page.page_name else: sys.stdout.write("\rUpdating statics {0}/{1}".format(i+1, len(self.to_update))) sys.stdout.flush() page.save() if not self.verbose: print ""
def insert_and_update(self): if self.to_insert: l = len(self.to_insert) for i, page in enumerate(self.to_insert): if self.verbose: print "Inserting " + page.page_name else: update_progress_bar("Updating Static Pages", i, l) page.insert() if not self.verbose: print "" if self.to_update: for i, page in enumerate(self.to_update): if not self.verbose: print "Updating " + page.page_name else: sys.stdout.write("\rUpdating statics {0}/{1}".format( i + 1, len(self.to_update))) sys.stdout.flush() page.save() if not self.verbose: print ""
def import_data(self): # set user lang for translations frappe.cache().hdel("lang", frappe.session.user) frappe.set_user_lang(frappe.session.user) if not self.console: self.data_import.db_set("template_warnings", "") # set flags frappe.flags.in_import = True frappe.flags.mute_emails = self.data_import.mute_emails # prepare a map for missing link field values self.prepare_missing_link_field_values() # parse docs from rows payloads = self.get_payloads_for_import() # dont import if there are non-ignorable warnings warnings = [w for w in self.warnings if w.get("type") != "info"] if warnings: if self.console: self.print_grouped_warnings(warnings) else: self.data_import.db_set("template_warnings", json.dumps(warnings)) frappe.publish_realtime( "data_import_refresh", {"data_import": self.data_import.name} ) return # setup import log if self.data_import.import_log: import_log = frappe.parse_json(self.data_import.import_log) else: import_log = [] # remove previous failures from import log import_log = [l for l in import_log if l.get("success") == True] # get successfully imported rows imported_rows = [] for log in import_log: log = frappe._dict(log) if log.success: imported_rows += log.row_indexes # start import total_payload_count = len(payloads) batch_size = frappe.conf.data_import_batch_size or 1000 for batch_index, batched_payloads in enumerate( frappe.utils.create_batch(payloads, batch_size) ): for i, payload in enumerate(batched_payloads): doc = payload.doc row_indexes = [row[0] for row in payload.rows] current_index = (i + 1) + (batch_index * batch_size) if set(row_indexes).intersection(set(imported_rows)): print("Skipping imported rows", row_indexes) if total_payload_count > 5: frappe.publish_realtime( "data_import_progress", { "current": current_index, "total": total_payload_count, "skipping": True, "data_import": self.data_import.name, }, ) continue try: start = timeit.default_timer() doc = self.process_doc(doc) processing_time = timeit.default_timer() - start eta = self.get_eta(current_index, total_payload_count, processing_time) if total_payload_count > 5: frappe.publish_realtime( "data_import_progress", { "current": current_index, "total": total_payload_count, "docname": doc.name, "data_import": self.data_import.name, "success": True, "row_indexes": row_indexes, "eta": eta, }, ) if self.console: update_progress_bar( "Importing {0} records".format(total_payload_count), current_index, total_payload_count, ) import_log.append( frappe._dict(success=True, docname=doc.name, row_indexes=row_indexes) ) # commit after every successful import frappe.db.commit() except Exception: import_log.append( frappe._dict( success=False, exception=frappe.get_traceback(), messages=frappe.local.message_log, row_indexes=row_indexes, ) ) frappe.clear_messages() # rollback if exception frappe.db.rollback() # set status failures = [l for l in import_log if l.get("success") == False] if len(failures) == total_payload_count: status = "Pending" elif len(failures) > 0: status = "Partial Success" else: status = "Success" if self.console: self.print_import_log(import_log) else: self.data_import.db_set("status", status) self.data_import.db_set("import_log", json.dumps(import_log)) frappe.flags.in_import = False frappe.flags.mute_emails = False frappe.publish_realtime("data_import_refresh", {"data_import": self.data_import.name}) return import_log
def transform_database(context, table, engine, row_format, failfast): "Transform site database through given parameters" site = get_site(context) check_table = [] add_line = False skipped = 0 frappe.init(site=site) if frappe.conf.db_type and frappe.conf.db_type != "mariadb": click.secho( "This command only has support for MariaDB databases at this point", fg="yellow") sys.exit(1) if not (engine or row_format): click.secho("Values for `--engine` or `--row_format` must be set") sys.exit(1) frappe.connect() if table == "all": information_schema = frappe.qb.Schema("information_schema") queried_tables = frappe.qb.from_( information_schema.tables).select("table_name").where( (information_schema.tables.row_format != row_format) & (information_schema.tables.table_schema == frappe.conf.db_name)).run() tables = [x[0] for x in queried_tables] else: tables = [x.strip() for x in table.split(",")] total = len(tables) for current, table in enumerate(tables): values_to_set = "" if engine: values_to_set += f" ENGINE={engine}" if row_format: values_to_set += f" ROW_FORMAT={row_format}" try: frappe.db.sql(f"ALTER TABLE `{table}`{values_to_set}") update_progress_bar("Updating table schema", current - skipped, total) add_line = True except Exception as e: check_table.append([table, e.args]) skipped += 1 if failfast: break if add_line: print() for errored_table in check_table: table, err = errored_table err_msg = f"{table}: ERROR {err[0]}: {err[1]}" click.secho(err_msg, fg="yellow") frappe.destroy()