def get_data(filters, columns): data = [] entry = dataent.get_all("Work Order", fields=[ "creation", "modified", "actual_start_date", "actual_end_date", "planned_start_date", "planned_end_date", "status" ], filters={ "docstatus": 1, "company": filters["company"] }) periodic_data = get_periodic_data(filters, entry) labels = [ "All Work Orders", "Not Started", "Overdue", "Pending", "Completed" ] chart_data = get_chart_data(periodic_data, columns) ranges = get_period_date_ranges(filters) for label in labels: work = {} work["Status"] = label for dummy, end_date in ranges: period = get_period(end_date, filters) if periodic_data.get(label).get(period): work[scrub(period)] = periodic_data.get(label).get(period) else: work[scrub(period)] = 0.0 data.append(work) return data, chart_data
def get_column(assessment_criteria): columns = [{ "fieldname": "student", "label": _("Student ID"), "fieldtype": "Link", "options": "Student", "width": 90 }, { "fieldname": "student_name", "label": _("Student Name"), "fieldtype": "Data", "width": 160 }] for d in assessment_criteria: columns.append({ "fieldname": dataent.scrub(d), "label": d, "fieldtype": "Data", "width": 110 }) columns.append({ "fieldname": dataent.scrub(d) +"_score", "label": "Score(" + str(int(assessment_criteria[d])) + ")", "fieldtype": "Float", "width": 100 }) return columns
def update_number_field(doctype_name, name, field_name, number_value, company): ''' doctype_name = Name of the DocType name = Docname being referred field_name = Name of the field thats holding the 'number' attribute number_value = Numeric value entered in field_name Stores the number entered in the dialog to the DocType's field. Renames the document by adding the number as a prefix to the current name and updates all transaction where it was present. ''' doc_title = dataent.db.get_value(doctype_name, name, dataent.scrub(doctype_name)+"_name") validate_field_number(doctype_name, name, number_value, company, field_name) dataent.db.set_value(doctype_name, name, field_name, number_value) if doc_title[0].isdigit(): separator = " - " if " - " in doc_title else " " doc_title = doc_title.split(separator, 1)[1] dataent.db.set_value(doctype_name, name, dataent.scrub(doctype_name)+"_name", doc_title) new_name = get_autoname_with_number(number_value, doc_title, name, company) if name != new_name: dataent.rename_doc(doctype_name, name, new_name) return new_name
def get_columns_dict(columns): """Returns a dict with column docfield values as dict The keys for the dict are both idx and fieldname, so either index or fieldname can be used to search for a column's docfield properties """ columns_dict = dataent._dict() for idx, col in enumerate(columns): col_dict = dataent._dict() # string if isinstance(col, string_types): col = col.split(":") if len(col) > 1: if "/" in col[1]: col_dict["fieldtype"], col_dict["options"] = col[1].split( "/") else: col_dict["fieldtype"] = col[1] col_dict["label"] = col[0] col_dict["fieldname"] = dataent.scrub(col[0]) # dict else: col_dict.update(col) if "fieldname" not in col_dict: col_dict["fieldname"] = dataent.scrub(col_dict["label"]) columns_dict[idx] = col_dict columns_dict[col_dict["fieldname"]] = col_dict return columns_dict
def get_column(course_dict): columns = [{ "fieldname": "student", "label": _("Student ID"), "fieldtype": "Link", "options": "Student", "width": 90 }, { "fieldname": "student_name", "label": _("Student Name"), "fieldtype": "Data", "width": 160 }] for course in course_dict: columns.append({ "fieldname": "grade_" + dataent.scrub(course), "label": course, "fieldtype": "Data", "width": 110 }) columns.append({ "fieldname": "score_" + dataent.scrub(course), "label": "Score(" + str(course_dict[course]) + ")", "fieldtype": "Float", "width": 100 }) return columns
def execute(): for doctype in ['BOM Explosion Item', 'BOM Item', 'Work Order Item', 'Item']: if dataent.db.has_column(doctype, 'allow_transfer_for_manufacture'): if doctype != 'Item': dataent.reload_doc('manufacturing', 'doctype', dataent.scrub(doctype)) else: dataent.reload_doc('stock', 'doctype', dataent.scrub(doctype)) rename_field(doctype, "allow_transfer_for_manufacture", "include_item_in_manufacturing") if dataent.db.has_column('BOM', 'allow_same_item_multiple_times'): dataent.db.sql(""" UPDATE tabBOM SET allow_same_item_multiple_times = 0 WHERE trim(coalesce(allow_same_item_multiple_times, '')) = '' """) for doctype in ['BOM', 'Work Order']: dataent.reload_doc('manufacturing', 'doctype', dataent.scrub(doctype)) if dataent.db.has_column(doctype, 'transfer_material_against_job_card'): dataent.db.sql(""" UPDATE `tab%s` SET transfer_material_against = CASE WHEN transfer_material_against_job_card = 1 then 'Job Card' Else 'Work Order' END WHERE docstatus < 2""" % (doctype)) else: dataent.db.sql(""" UPDATE `tab%s` SET transfer_material_against = 'Work Order' WHERE docstatus < 2""" % (doctype))
def get_expense_cost_center(doctype, args): if doctype == 'Item Group': return dataent.db.get_value( 'Item Default', { 'parent': args.get(dataent.scrub(doctype)), 'company': args.get('company') }, ['buying_cost_center', 'expense_account']) else: return dataent.db.get_value(doctype, args.get(dataent.scrub(doctype)),\ ['cost_center', 'default_expense_account'])
def get_defaults(self, key=None, parent="__default"): """Get all defaults""" if key: defaults = dataent.defaults.get_defaults(parent) d = defaults.get(key, None) if (not d and key != dataent.scrub(key)): d = defaults.get(dataent.scrub(key), None) return d else: return dataent.defaults.get_defaults(parent)
def get_web_template(self, suffix=''): '''Returns the relative path of the row template for this doctype''' module_name = dataent.scrub(self.module) doctype = dataent.scrub(self.name) template_path = dataent.get_module_path(module_name, 'doctype', doctype, 'templates', doctype + suffix + '.html') if os.path.exists(template_path): return '{module_name}/doctype/{doctype_name}/templates/{doctype_name}{suffix}.html'.format( module_name=module_name, doctype_name=doctype, suffix=suffix) return None
def get_mapping_module(self, mapping_name): try: module_def = dataent.get_doc("Module Def", self.module) module = dataent.get_module('{app}.{module}.data_migration_mapping.{mapping_name}'.format( app= module_def.app_name, module=dataent.scrub(self.module), mapping_name=dataent.scrub(mapping_name) )) return module except ImportError: return None
def rename_inside_controller(self, new, old, new_path): for fname in ('{}.js', '{}.py', '{}_list.js', '{}_calendar.js', 'test_{}.py', 'test_{}.js'): fname = os.path.join(new_path, fname.format(dataent.scrub(new))) if os.path.exists(fname): with open(fname, 'r') as f: code = f.read() with open(fname, 'w') as f: f.write( code.replace( dataent.scrub(old).replace(' ', ''), dataent.scrub(new).replace(' ', '')))
def cleanup_fields_value(self): for logic_field in ["apply_on", "applicable_for", "rate_or_discount"]: fieldname = dataent.scrub(self.get(logic_field) or "") # reset all values except for the logic field options = (self.meta.get_options(logic_field) or "").split("\n") for f in options: if not f: continue f = dataent.scrub(f) if f != fieldname: self.set(f, None)
def execute(filters=None): data, chart, grades = [], [], [] args = dataent._dict() grade_wise_analysis = defaultdict(dict) args["academic_year"] = filters.get("academic_year") args["course"] = filters.get("course") args["assessment_group"] = filters.get("assessment_group") args["academic_term"] = filters.get("academic_term") args["student_group"] = filters.get("student_group") if args["assessment_group"] == "All Assessment Groups": dataent.throw(_("Please select the assessment group other than 'All Assessment Groups'")) returned_values = get_formatted_result(args, get_assessment_criteria=True) student_dict = returned_values["student_details"] result_dict = returned_values["assessment_result"] assessment_criteria_dict = returned_values["assessment_criteria"] for student in result_dict: student_row = {} student_row["student"] = student student_row["student_name"] = student_dict[student] for criteria in assessment_criteria_dict: scrub_criteria = dataent.scrub(criteria) if criteria in result_dict[student][args.course][args.assessment_group]: student_row[scrub_criteria] = result_dict[student][args.course][args.assessment_group][criteria]["grade"] student_row[scrub_criteria + "_score"] = result_dict[student][args.course][args.assessment_group][criteria]["score"] # create the list of possible grades if student_row[scrub_criteria] not in grades: grades.append(student_row[scrub_criteria]) # create the dict of for gradewise analysis if student_row[scrub_criteria] not in grade_wise_analysis[criteria]: grade_wise_analysis[criteria][student_row[scrub_criteria]] = 1 else: grade_wise_analysis[criteria][student_row[scrub_criteria]] += 1 else: student_row[dataent.scrub(criteria)] = "" student_row[dataent.scrub(criteria)+ "_score"] = "" data.append(student_row) assessment_criteria_list = [d for d in assessment_criteria_dict] columns = get_column(assessment_criteria_dict) chart = get_chart_data(grades, assessment_criteria_list, grade_wise_analysis) return columns, data, None, chart
def create_new_connection(module, connection_name): if not dataent.conf.get('developer_mode'): dataent.msgprint(_('Please enable developer mode to create new connection')) return # create folder module_path = dataent.get_module_path(module) connectors_folder = os.path.join(module_path, 'connectors') dataent.create_folder(connectors_folder) # create init py create_init_py(module_path, 'connectors', '') connection_class = connection_name.replace(' ', '') file_name = dataent.scrub(connection_name) + '.py' file_path = os.path.join(module_path, 'connectors', file_name) # create boilerplate file with open(file_path, 'w') as f: f.write(connection_boilerplate.format(connection_class=connection_class)) # get python module string from file_path app_name = dataent.db.get_value('Module Def', module, 'app_name') python_module = os.path.relpath( file_path, '../apps/{0}'.format(app_name)).replace(os.path.sep, '.')[:-3] return python_module
def execute(): dataent.reload_doc('desk', 'doctype', 'auto_repeat') doctypes_to_rename = { 'accounts': ['Journal Entry', 'Payment Entry', 'Purchase Invoice', 'Sales Invoice'], 'buying': ['Purchase Order', 'Supplier Quotation'], 'selling': ['Quotation', 'Sales Order'], 'stock': ['Delivery Note', 'Purchase Receipt'] } for module, doctypes in doctypes_to_rename.items(): for doctype in doctypes: dataent.reload_doc(module, 'doctype', dataent.scrub(doctype)) if dataent.db.has_column(doctype, 'subscription'): rename_field(doctype, 'subscription', 'auto_repeat') subscriptions = dataent.db.sql('select * from `tabSubscription`', as_dict=1) for doc in subscriptions: doc['doctype'] = 'Auto Repeat' auto_repeat = dataent.get_doc(doc) auto_repeat.db_insert() dataent.db.sql('delete from `tabSubscription`') dataent.db.commit() drop_columns_from_subscription()
def update_nsm(doc): # get fields, data from the DocType opf = 'old_parent' pf = "parent_" + dataent.scrub(doc.doctype) if hasattr(doc, 'nsm_parent_field'): pf = doc.nsm_parent_field if hasattr(doc, 'nsm_oldparent_field'): opf = doc.nsm_oldparent_field p, op = doc.get(pf) or None, doc.get(opf) or None # has parent changed (?) or parent is None (root) if not doc.lft and not doc.rgt: update_add_node(doc, p or '', pf) elif op != p: update_move_node(doc, pf) # set old parent doc.set(opf, p) dataent.db.set_value(doc.doctype, doc.name, opf, p or '', update_modified=False) doc.reload()
def test_Custom_Script_fixture_rex_no_flags(self): fixture = ["Custom Script", {"name": r"^[i|A]"}] path = dataent.scrub(fixture[0]) + "_rex_no_flags.csv" export_csv(fixture, path) self.assertTrue(True) os.remove(path)
def test_Custom_Script_fixture_simple(self): fixture = "Custom Script" path = dataent.scrub(fixture) + "_original_style.csv" export_csv(fixture, path) self.assertTrue(True) os.remove(path)
def validate_mandatory(self): for field in ["apply_on", "applicable_for"]: tocheck = dataent.scrub(self.get(field) or "") if tocheck and not self.get(tocheck): throw( _("{0} is required").format(self.meta.get_label(tocheck)), dataent.MandatoryError)
def test_Custom_Field_fixture_empty_object(self): fixture = ["Custom Field", {}] path = dataent.scrub(fixture[0]) + "_empty_object_should_be_all.csv" export_csv(fixture, path) self.assertTrue(True) os.remove(path)
def validate(self): if not self.get(dataent.scrub(self.budget_against)): dataent.throw(_("{0} is mandatory").format(self.budget_against)) self.validate_duplicate() self.validate_accounts() self.set_null_value() self.validate_applicable_for()
def test_Custom_Field_fixture_rex_with_flags(self): fixture = ["Custom Field", {"name": r"^[i|A]", "flags": "L,M"}] path = dataent.scrub(fixture[0]) + "_rex_with_flags.csv" export_csv(fixture, path) self.assertTrue(True) os.remove(path)
def test_Custom_Script_fixture_just_list(self): fixture = ["Custom Script"] path = dataent.scrub(fixture[0]) + "_just_list_should_be_all.csv" export_csv(fixture, path) self.assertTrue(True) os.remove(path)
def test_Custom_Script_fixture_simple_name_at_least_equal(self): fixture = ["Custom Script", {"name": "Item-Cli"}] path = dataent.scrub(fixture[0]) + "_simple_name_at_least_equal.csv" export_csv(fixture, path) self.assertTrue(True) os.remove(path)
def test_Doctype_multi_name_equal(self): fixture = ["ToDo", {"name": ["TDI00000002", "TDI00000008"], "op": "="}] path = "Doctype_" + dataent.scrub(fixture[0]) + "_multi_name_equal.csv" export_csv(fixture, path) self.assertTrue(True) os.remove(path)
def test_Custom_Script_fixture_simple_name_equal_default(self): fixture = ["Custom Script", {"name": ["Item-Client"]}] path = dataent.scrub(fixture[0]) + "_simple_name_equal_default.csv" export_csv(fixture, path) self.assertTrue(True) os.remove(path)
def get_lead_data(filters, based_on): based_on_field = dataent.scrub(based_on) conditions = get_filter_conditions(filters) lead_details = dataent.db.sql(""" select {based_on_field}, name from `tabLead` where {based_on_field} is not null and {based_on_field} != '' {conditions} """.format(based_on_field=based_on_field, conditions=conditions), filters, as_dict=1) lead_map = dataent._dict() for d in lead_details: lead_map.setdefault(d.get(based_on_field), []).append(d.name) data = [] for based_on_value, leads in lead_map.items(): row = {based_on_field: based_on_value, "lead_count": len(leads)} row["quot_count"] = get_lead_quotation_count(leads) row["opp_count"] = get_lead_opp_count(leads) row["order_count"] = get_quotation_ordered_count(leads) row["order_value"] = get_order_amount(leads) or 0 row["opp_lead"] = flt(row["opp_count"]) / flt(row["lead_count"] or 1.0) * 100.0 row["quot_lead"] = flt(row["quot_count"]) / flt(row["lead_count"] or 1.0) * 100.0 row["order_quot"] = flt(row["order_count"]) / flt(row["quot_count"] or 1.0) * 100.0 data.append(row) return data
def _get_tree_conditions(parenttype, allow_blank=True): field = dataent.scrub(parenttype) condition = "" if args.get(field): if not dataent.flags.tree_conditions: dataent.flags.tree_conditions = {} key = ( parenttype, args[field], ) if key in dataent.flags.tree_conditions: return dataent.flags.tree_conditions[key] try: lft, rgt = dataent.db.get_value(parenttype, args[field], ["lft", "rgt"]) except TypeError: dataent.throw(_("Invalid {0}").format(args[field])) parent_groups = dataent.db.sql_list( """select name from `tab%s` where lft<=%s and rgt>=%s""" % (parenttype, '%s', '%s'), (lft, rgt)) if parent_groups: if allow_blank: parent_groups.append('') condition = " ifnull("+field+", '') in ('" + \ "', '".join([dataent.db.escape(d) for d in parent_groups])+"')" dataent.flags.tree_conditions[key] = condition return condition
def get_gl_entries(self, party_type, date=None, for_future=False): conditions, values = self.prepare_conditions(party_type) if self.filters.get(scrub(party_type)): select_fields = "sum(debit_in_account_currency) as debit, sum(credit_in_account_currency) as credit" else: select_fields = "sum(debit) as debit, sum(credit) as credit" if date and not for_future: conditions += " and posting_date <= '%s'" % date if date and for_future: conditions += " and posting_date > '%s'" % date self.gl_entries = dataent.db.sql(""" select name, posting_date, account, party_type, party, voucher_type, voucher_no, against_voucher_type, against_voucher, account_currency, remarks, {0} from `tabGL Entry` where docstatus < 2 and party_type=%s and (party is not null and party != '') {1} group by voucher_type, voucher_no, against_voucher_type, against_voucher, party order by posting_date, party""".format(select_fields, conditions), values, as_dict=True) return self.gl_entries
def test_Custom_Field_fixture_simple_name_not_equal(self): fixture = ["Custom Field", {"name": ["Item-vat"], "op": "!="}] path = dataent.scrub(fixture[0]) + "_simple_name_not_equal.csv" export_csv(fixture, path) self.assertTrue(True) os.remove(path)