def execute(): item_details = dataent._dict() for d in dataent.db.sql("select name, description from `tabItem`", as_dict=1): description = cstr(d.description).strip() new_desc = extract_description(description) item_details.setdefault(d.name, dataent._dict({ "old_description": description, "new_description": new_desc })) dt_list= ["Purchase Order Item","Supplier Quotation Item", "BOM", "BOM Explosion Item" , \ "BOM Item", "Opportunity Item" , "Quotation Item" , "Sales Order Item" , "Delivery Note Item" , \ "Material Request Item" , "Purchase Receipt Item" , "Stock Entry Detail"] for dt in dt_list: dataent.reload_doctype(dt) records = dataent.db.sql("""select name, `{0}` as item_code, description from `tab{1}` where description is not null and description like '%%<table%%'""" .format("item" if dt=="BOM" else "item_code", dt), as_dict=1) count = 1 for d in records: if d.item_code and item_details.get(d.item_code) \ and cstr(d.description) == item_details.get(d.item_code).old_description: desc = item_details.get(d.item_code).new_description else: desc = extract_description(cstr(d.description)) dataent.db.sql("""update `tab{0}` set description = %s where name = %s """.format(dt), (desc, d.name)) count += 1 if count % 500 == 0: dataent.db.commit()
def set_auto_repeat_period(args, mcount, new_document): if mcount and new_document.meta.get_field( 'from_date') and new_document.meta.get_field('to_date'): last_ref_doc = dataent.db.sql(""" select name, from_date, to_date from `tab{0}` where auto_repeat=%s and docstatus < 2 order by creation desc limit 1 """.format(args.reference_doctype), args.name, as_dict=1) if not last_ref_doc: return from_date = get_next_date(last_ref_doc[0].from_date, mcount) if (cstr(get_first_day(last_ref_doc[0].from_date)) == cstr(last_ref_doc[0].from_date)) and \ (cstr(get_last_day(last_ref_doc[0].to_date)) == cstr(last_ref_doc[0].to_date)): to_date = get_last_day( get_next_date(last_ref_doc[0].to_date, mcount)) else: to_date = get_next_date(last_ref_doc[0].to_date, mcount) new_document.set('from_date', from_date) new_document.set('to_date', to_date)
def check_stock_uom_with_bin(item, stock_uom): if stock_uom == dataent.db.get_value("Item", item, "stock_uom"): return matched = True ref_uom = dataent.db.get_value("Stock Ledger Entry", {"item_code": item}, "stock_uom") if ref_uom: if cstr(ref_uom) != cstr(stock_uom): matched = False else: bin_list = dataent.db.sql("select * from tabBin where item_code=%s", item, as_dict=1) for bin in bin_list: if (bin.reserved_qty > 0 or bin.ordered_qty > 0 or bin.indented_qty > 0 or bin.planned_qty > 0) and cstr( bin.stock_uom) != cstr(stock_uom): matched = False break if matched and bin_list: dataent.db.sql( """update tabBin set stock_uom=%s where item_code=%s""", (stock_uom, item)) if not matched: dataent.throw( _("Default Unit of Measure for Item {0} cannot be changed directly because you have already made some transaction(s) with another UOM. You will need to create a new Item to use a different Default UOM." ).format(item))
def validate_bom(self): if cstr(self.current_bom) == cstr(self.new_bom): dataent.throw(_("Current BOM and New BOM can not be same")) if dataent.db.get_value("BOM", self.current_bom, "item") \ != dataent.db.get_value("BOM", self.new_bom, "item"): dataent.throw(_("The selected BOMs are not for the same item"))
def validate_filters(filters, account_details): if not filters.get('company'): dataent.throw(_('{0} is mandatory').format(_('Company'))) if filters.get("account") and not account_details.get(filters.account): dataent.throw(_("Account {0} does not exists").format(filters.account)) if (filters.get("account") and filters.get("group_by") == _('Group by Account') and account_details[filters.account].is_group == 0): dataent.throw( _("Can not filter based on Account, if grouped by Account")) if (filters.get("voucher_no") and filters.get("group_by") in [_('Group by Voucher')]): dataent.throw( _("Can not filter based on Voucher No, if grouped by Voucher")) if filters.from_date > filters.to_date: dataent.throw(_("From Date must be before To Date")) if filters.get('project'): projects = cstr(filters.get("project")).strip() filters.project = [d.strip() for d in projects.split(',') if d] if filters.get('cost_center'): cost_centers = cstr(filters.get("cost_center")).strip() filters.cost_center = [d.strip() for d in cost_centers.split(',') if d]
def get_bom_material_detail(self, args=None): """ Get raw material details like uom, desc and rate""" if not args: args = dataent.form_dict.get('args') if isinstance(args, string_types): import json args = json.loads(args) item = self.get_item_det(args['item_code']) self.validate_rm_item(item) args['bom_no'] = args['bom_no'] or item and cstr(item[0]['default_bom']) or '' args['transfer_for_manufacture'] = (cstr(args.get('include_item_in_manufacturing', '')) or item and item[0].include_item_in_manufacturing or 0) args.update(item[0]) rate = self.get_rm_rate(args) ret_item = { 'item_name' : item and args['item_name'] or '', 'description' : item and args['description'] or '', 'image' : item and args['image'] or '', 'stock_uom' : item and args['stock_uom'] or '', 'uom' : item and args['stock_uom'] or '', 'conversion_factor': 1, 'bom_no' : args['bom_no'], 'rate' : rate, 'qty' : args.get("qty") or args.get("stock_qty") or 1, 'stock_qty' : args.get("qty") or args.get("stock_qty") or 1, 'base_rate' : rate, 'include_item_in_manufacturing': cint(args['transfer_for_manufacture']) or 0 } return ret_item
def test_make_vehicle_log(self): license_plate = random_string(10).upper() employee_id = dataent.db.sql( """select name from `tabEmployee` order by modified desc limit 1""" )[0][0] vehicle = dataent.get_doc({ "doctype": "Vehicle", "license_plate": cstr(license_plate), "make": "Maruti", "model": "PCM", "last_odometer": 5000, "acquisition_date": dataent.utils.nowdate(), "location": "Mumbai", "chassis_no": "1234ABCD", "uom": "Litre", "vehicle_value": dataent.utils.flt(500000) }) try: vehicle.insert() except dataent.DuplicateEntryError: pass vehicle_log = dataent.get_doc({ "doctype": "Vehicle Log", "license_plate": cstr(license_plate), "employee": employee_id, "date": dataent.utils.nowdate(), "odometer": 5010, "fuel_qty": dataent.utils.flt(50), "price": dataent.utils.flt(500) }) vehicle_log.insert() vehicle_log.submit()
def get_fy_details(fy_start_date, fy_end_date): start_year = getdate(fy_start_date).year if start_year == getdate(fy_end_date).year: fy = cstr(start_year) else: fy = cstr(start_year) + '-' + cstr(start_year + 1) return fy
def allocate_leave(self): self.validate_values() leave_allocated_for = [] employees = self.get_employees() if not employees: dataent.throw(_("No employee found")) for d in self.get_employees(): try: la = dataent.new_doc('Leave Allocation') la.set("__islocal", 1) la.employee = cstr(d[0]) la.employee_name = dataent.db.get_value( 'Employee', cstr(d[0]), 'employee_name') la.leave_type = self.leave_type la.from_date = self.from_date la.to_date = self.to_date la.carry_forward = cint(self.carry_forward) la.new_leaves_allocated = flt(self.no_of_days) la.docstatus = 1 la.save() leave_allocated_for.append(d[0]) except: pass if leave_allocated_for: msgprint( _("Leaves Allocated Successfully for {0}").format( comma_and(leave_allocated_for)))
def add_calendar_event(self, opts=None, force=False): super(Lead, self).add_calendar_event({ "owner": self.lead_owner, "starts_on": self.contact_date, "ends_on": self.ends_on or "", "subject": ('Contact ' + cstr(self.lead_name)), "description": ('Contact ' + cstr(self.lead_name)) + \ (self.contact_by and ('. By : ' + cstr(self.contact_by)) or '') }, force)
def is_temporary_system_problem(self, e): messages = ( "-ERR [SYS/TEMP] Temporary system problem. Please try again later.", "Connection timed out", ) for message in messages: if message in strip(cstr(e.message)) or message in strip( cstr(getattr(e, 'strerror', ''))): return True return False
def get_autoname_with_number(number_value, doc_title, name, company): ''' append title with prefix as number and suffix as company's abbreviation separated by '-' ''' if name: name_split=name.split("-") parts = [doc_title.strip(), name_split[len(name_split)-1].strip()] else: abbr = dataent.get_cached_value('Company', company, ["abbr"], as_dict=True) parts = [doc_title.strip(), abbr.abbr] if cstr(number_value).strip(): parts.insert(0, cstr(number_value).strip()) return ' - '.join(parts)
def get_account_autoname(account_number, account_name, company): # first validate if company exists company = dataent.get_cached_value('Company', company, ["abbr", "name"], as_dict=True) if not company: dataent.throw(_('Company {0} does not exist').format(company)) parts = [account_name.strip(), company.abbr] if cstr(account_number).strip(): parts.insert(0, cstr(account_number).strip()) return ' - '.join(parts)
def update_stock_ledger(self, allow_negative_stock=False, via_landed_cost_voucher=False): self.update_ordered_and_reserved_qty() sl_entries = [] stock_items = self.get_stock_items() for d in self.get('items'): if d.item_code in stock_items and d.warehouse: pr_qty = flt(d.qty) * flt(d.conversion_factor) if pr_qty: sle = self.get_sl_entries( d, { "actual_qty": flt(pr_qty), "serial_no": cstr(d.serial_no).strip() }) if self.is_return: original_incoming_rate = dataent.db.get_value( "Stock Ledger Entry", { "voucher_type": "Purchase Receipt", "voucher_no": self.return_against, "item_code": d.item_code }, "incoming_rate") sle.update({"outgoing_rate": original_incoming_rate}) else: val_rate_db_precision = 6 if cint( self.precision("valuation_rate", d)) <= 6 else 9 incoming_rate = flt(d.valuation_rate, val_rate_db_precision) sle.update({"incoming_rate": incoming_rate}) sl_entries.append(sle) if flt(d.rejected_qty) != 0: sl_entries.append( self.get_sl_entries( d, { "warehouse": d.rejected_warehouse, "actual_qty": flt(d.rejected_qty) * flt(d.conversion_factor), "serial_no": cstr(d.rejected_serial_no).strip(), "incoming_rate": 0.0 })) self.make_sl_entries_for_supplier_warehouse(sl_entries) self.make_sl_entries(sl_entries, allow_negative_stock=allow_negative_stock, via_landed_cost_voucher=via_landed_cost_voucher)
def update_reference_in_journal_entry(d, jv_obj): """ Updates against document, if partial amount splits into rows """ jv_detail = jv_obj.get("accounts", {"name": d["voucher_detail_no"]})[0] jv_detail.set(d["dr_or_cr"], d["allocated_amount"]) jv_detail.set('debit' if d['dr_or_cr']=='debit_in_account_currency' else 'credit', d["allocated_amount"]*flt(jv_detail.exchange_rate)) original_reference_type = jv_detail.reference_type original_reference_name = jv_detail.reference_name jv_detail.set("reference_type", d["against_voucher_type"]) jv_detail.set("reference_name", d["against_voucher"]) if d['allocated_amount'] < d['unadjusted_amount']: jvd = dataent.db.sql(""" select cost_center, balance, against_account, is_advance, account_type, exchange_rate, account_currency from `tabJournal Entry Account` where name = %s """, d['voucher_detail_no'], as_dict=True) amount_in_account_currency = flt(d['unadjusted_amount']) - flt(d['allocated_amount']) amount_in_company_currency = amount_in_account_currency * flt(jvd[0]['exchange_rate']) # new entry with balance amount ch = jv_obj.append("accounts") ch.account = d['account'] ch.account_type = jvd[0]['account_type'] ch.account_currency = jvd[0]['account_currency'] ch.exchange_rate = jvd[0]['exchange_rate'] ch.party_type = d["party_type"] ch.party = d["party"] ch.cost_center = cstr(jvd[0]["cost_center"]) ch.balance = flt(jvd[0]["balance"]) ch.set(d['dr_or_cr'], amount_in_account_currency) ch.set('debit' if d['dr_or_cr']=='debit_in_account_currency' else 'credit', amount_in_company_currency) ch.set('credit_in_account_currency' if d['dr_or_cr']== 'debit_in_account_currency' else 'debit_in_account_currency', 0) ch.set('credit' if d['dr_or_cr']== 'debit_in_account_currency' else 'debit', 0) ch.against_account = cstr(jvd[0]["against_account"]) ch.reference_type = original_reference_type ch.reference_name = original_reference_name ch.is_advance = cstr(jvd[0]["is_advance"]) ch.docstatus = 1 # will work as update after submit jv_obj.flags.ignore_validate_update_after_submit = True jv_obj.save(ignore_permissions=True)
def autoname(self): if not self.address_title: if self.links: self.address_title = self.links[0].link_name if self.address_title: self.name = (cstr(self.address_title).strip() + "-" + cstr(_(self.address_type)).strip()) if dataent.db.exists("Address", self.name): self.name = make_autoname( cstr(self.address_title).strip() + "-" + cstr(self.address_type).strip() + "-.#") else: throw(_("Address Title is mandatory."))
def validate_conversion_factor(self): check_list = [] for d in self.get('uoms'): if cstr(d.uom) in check_list: dataent.throw( _("Unit of Measure {0} has been entered more than once in Conversion Factor Table" ).format(d.uom)) else: check_list.append(cstr(d.uom)) if d.uom and cstr(d.uom) == cstr( self.stock_uom) and flt(d.conversion_factor) != 1: dataent.throw( _("Conversion factor for default Unit of Measure must be 1 in row {0}" ).format(d.idx))
def render_include(content): '''render {% raw %}{% include "app/path/filename" %}{% endraw %} in js file''' content = cstr(content) # try 5 levels of includes for i in range(5): if "{% include" in content: paths = re.findall(r'''{% include\s['"](.*)['"]\s%}''', content) if not paths: dataent.throw('Invalid include path', InvalidIncludePath) for path in paths: app, app_path = path.split('/', 1) with io.open(dataent.get_app_path(app, app_path), 'r', encoding='utf-8') as f: include = f.read() if path.endswith('.html'): include = html_to_js_template(path, include) content = re.sub( r'''{{% include\s['"]{0}['"]\s%}}'''.format(path), include, content) else: break return content
def _push_insert(self): '''Inserts new local docs on remote''' mapping = self.get_mapping(self.current_mapping) connection = self.get_connection() data = self.get_new_local_data() for d in data: # pre process before insert doc = self.pre_process_doc(d) doc = mapping.get_mapped_record(doc) try: response_doc = connection.insert(mapping.remote_objectname, doc) dataent.db.set_value(mapping.local_doctype, d.name, mapping.migration_id_field, response_doc[connection.name_field], update_modified=False) dataent.db.commit() self.update_log('push_insert', 1) # post process after insert self.post_process_doc(local_doc=d, remote_doc=response_doc) except Exception as e: self.update_log('push_failed', {d.name: cstr(e)}) # update page_start self.db_set('current_mapping_start', self.current_mapping_start + mapping.page_length) if len(data) < mapping.page_length: # done, no more new data to insert self.db_set({ 'current_mapping_action': 'Update', 'current_mapping_start': 0 }) # not done with this mapping return False
def build_account_tree(tree, parent, all_accounts): # find children parent_account = parent.name if parent else "" children = [ acc for acc in all_accounts if cstr(acc.parent_account) == parent_account ] # if no children, but a group account if not children and parent.is_group: tree["is_group"] = 1 tree["account_number"] = parent.account_number # build a subtree for each child for child in children: # start new subtree tree[child.account_name] = {} # assign account_type and root_type if child.account_number: tree[child.account_name]["account_number"] = child.account_number if child.account_type: tree[child.account_name]["account_type"] = child.account_type if child.tax_rate: tree[child.account_name]["tax_rate"] = child.tax_rate if not parent: tree[child.account_name]["root_type"] = child.root_type # call recursively to build a subtree for current account build_account_tree(tree[child.account_name], child, all_accounts)
def get_mode_of_payments(filters): mode_of_payments = {} invoice_list = get_invoices(filters) invoice_list_names = ",".join( ['"' + invoice['name'] + '"' for invoice in invoice_list]) if invoice_list: inv_mop = dataent.db.sql( """select a.owner,a.posting_date, ifnull(b.mode_of_payment, '') as mode_of_payment from `tabSales Invoice` a, `tabSales Invoice Payment` b where a.name = b.parent and a.docstatus = 1 and a.name in ({invoice_list_names}) union select a.owner,a.posting_date, ifnull(b.mode_of_payment, '') as mode_of_payment from `tabSales Invoice` a, `tabPayment Entry` b,`tabPayment Entry Reference` c where a.name = c.reference_name and b.name = c.parent and b.docstatus = 1 and a.name in ({invoice_list_names}) union select a.owner, a.posting_date, ifnull(a.voucher_type,'') as mode_of_payment from `tabJournal Entry` a, `tabJournal Entry Account` b where a.name = b.parent and a.docstatus = 1 and b.reference_type = "Sales Invoice" and b.reference_name in ({invoice_list_names}) """.format(invoice_list_names=invoice_list_names), as_dict=1) for d in inv_mop: mode_of_payments.setdefault(d["owner"] + cstr(d["posting_date"]), []).append(d.mode_of_payment) return mode_of_payments
def validate_item(self, item_code, row_num): from epaas.stock.doctype.item.item import validate_end_of_life, \ validate_is_stock_item, validate_cancelled_item # using try except to catch all validation msgs and display together try: item = dataent.get_doc("Item", item_code) # end of life and stock item validate_end_of_life(item_code, item.end_of_life, item.disabled, verbose=0) validate_is_stock_item(item_code, item.is_stock_item, verbose=0) # item should not be serialized if item.has_serial_no == 1: raise dataent.ValidationError( _("Serialized Item {0} cannot be updated using Stock Reconciliation, please use Stock Entry" ).format(item_code)) # item managed batch-wise not allowed if item.has_batch_no == 1: raise dataent.ValidationError( _("Batched Item {0} cannot be updated using Stock Reconciliation, instead use Stock Entry" ).format(item_code)) # docstatus should be < 2 validate_cancelled_item(item_code, item.docstatus, verbose=0) except Exception as e: self.validation_messages.append( _("Row # ") + ("%d: " % (row_num)) + cstr(e))
def _validate_selects(self): if dataent.flags.in_import: return for df in self.meta.get_select_fields(): if df.fieldname == "naming_series" or not (self.get(df.fieldname) and df.options): continue options = (df.options or "").split("\n") # if only empty options if not filter(None, options): continue # strip and set self.set(df.fieldname, cstr(self.get(df.fieldname)).strip()) value = self.get(df.fieldname) if value not in options and not (dataent.flags.in_test and value.startswith("_T-")): # show an elaborate message prefix = _("Row #{0}:").format( self.idx) if self.get("parentfield") else "" label = _(self.meta.get_label(df.fieldname)) comma_options = '", "'.join(_(each) for each in options) dataent.throw( _('{0} {1} cannot be "{2}". It should be one of "{3}"'). format(prefix, label, value, comma_options))
def _validate_length(self): if dataent.flags.in_install: return if self.meta.issingle: # single doctype value type is mediumtext return column_types_to_check_length = ('varchar', 'int', 'bigint') for fieldname, value in iteritems(self.get_valid_dict()): df = self.meta.get_field(fieldname) if not df or df.fieldtype == 'Check': # skip standard fields and Check fields continue column_type = type_map[df.fieldtype][0] or None default_column_max_length = type_map[df.fieldtype][1] or None if df and df.fieldtype in type_map and column_type in column_types_to_check_length: max_length = cint( df.get("length")) or cint(default_column_max_length) if len(cstr(value)) > max_length: if self.parentfield and self.idx: reference = _("{0}, Row {1}").format( _(self.doctype), self.idx) else: reference = "{0} {1}".format(_(self.doctype), self.name) dataent.throw(_("{0}: '{1}' ({3}) will get truncated, as max characters allowed is {2}")\ .format(reference, _(df.label), max_length, value), dataent.CharacterLengthExceededError, title=_('Value too big'))
def update_packing_list_item(doc, packing_item_code, qty, main_item_row, description): item = get_packing_item_details(packing_item_code, doc.company) # check if exists exists = 0 for d in doc.get("packed_items"): if d.parent_item == main_item_row.item_code and d.item_code == packing_item_code and\ d.parent_detail_docname == main_item_row.name and d.description == description: pi, exists = d, 1 break if not exists: pi = doc.append('packed_items', {}) pi.parent_item = main_item_row.item_code pi.item_code = packing_item_code pi.item_name = item.item_name pi.parent_detail_docname = main_item_row.name pi.description = item.description pi.uom = item.stock_uom pi.qty = flt(qty) pi.description = description if not pi.warehouse: pi.warehouse = (main_item_row.warehouse if ((doc.get('is_pos') or not item.default_warehouse) and main_item_row.warehouse) else item.default_warehouse) if not pi.batch_no: pi.batch_no = cstr(main_item_row.get("batch_no")) if not pi.target_warehouse: pi.target_warehouse = main_item_row.get("target_warehouse") bin = get_bin_qty(packing_item_code, pi.warehouse) pi.actual_qty = flt(bin.get("actual_qty")) pi.projected_qty = flt(bin.get("projected_qty"))
def _push_delete(self): '''Deletes docs deleted from local on remote''' mapping = self.get_mapping(self.current_mapping) connection = self.get_connection() data = self.get_deleted_local_data() for d in data: # Deleted Document also has a custom field for migration_id migration_id_value = d.get(mapping.migration_id_field) # pre process before update self.pre_process_doc(d) try: response_doc = connection.delete(mapping.remote_objectname, migration_id_value) self.update_log('push_delete', 1) # post process only when action is success self.post_process_doc(local_doc=d, remote_doc=response_doc) except Exception as e: self.update_log('push_failed', {d.name: cstr(e)}) # update page_start self.db_set('current_mapping_start', self.current_mapping_start + mapping.page_length) if len(data) < mapping.page_length: # done, no more new data to delete # done with this mapping return True
def _push_update(self): '''Updates local modified docs on remote''' mapping = self.get_mapping(self.current_mapping) connection = self.get_connection() data = self.get_updated_local_data() for d in data: migration_id_value = d.get(mapping.migration_id_field) # pre process before update doc = self.pre_process_doc(d) doc = mapping.get_mapped_record(doc) try: response_doc = connection.update(mapping.remote_objectname, doc, migration_id_value) self.update_log('push_update', 1) # post process after update self.post_process_doc(local_doc=d, remote_doc=response_doc) except Exception as e: self.update_log('push_failed', {d.name: cstr(e)}) # update page_start self.db_set('current_mapping_start', self.current_mapping_start + mapping.page_length) if len(data) < mapping.page_length: # done, no more data to update self.db_set({ 'current_mapping_action': 'Delete', 'current_mapping_start': 0 }) # not done with this mapping return False
def db_update(self): if self.get("__islocal") or not self.name: self.db_insert() return d = self.get_valid_dict(convert_dates_to_str=True) # don't update name, as case might've been changed name = d['name'] del d['name'] columns = list(d) try: dataent.db.sql( """update `tab{doctype}` set {values} where name=%s""".format( doctype=self.doctype, values=", ".join(["`" + c + "`=%s" for c in columns])), list(d.values()) + [name]) except Exception as e: if e.args[0] == 1062 and "Duplicate" in cstr(e.args[1]): self.show_unique_validation_message(e) else: raise
def validate_for_items(doc): items = [] for d in doc.get("items"): if not d.qty: if doc.doctype == "Purchase Receipt" and d.rejected_qty: continue dataent.throw(_("Please enter quantity for Item {0}").format(d.item_code)) # update with latest quantities bin = dataent.db.sql("""select projected_qty from `tabBin` where item_code = %s and warehouse = %s""", (d.item_code, d.warehouse), as_dict=1) f_lst ={'projected_qty': bin and flt(bin[0]['projected_qty']) or 0, 'ordered_qty': 0, 'received_qty' : 0} if d.doctype in ('Purchase Receipt Item', 'Purchase Invoice Item'): f_lst.pop('received_qty') for x in f_lst : if d.meta.get_field(x): d.set(x, f_lst[x]) item = dataent.db.sql("""select is_stock_item, is_sub_contracted_item, end_of_life, disabled from `tabItem` where name=%s""", d.item_code, as_dict=1)[0] validate_end_of_life(d.item_code, item.end_of_life, item.disabled) # validate stock item if item.is_stock_item==1 and d.qty and not d.warehouse and not d.get("delivered_by_supplier"): dataent.throw(_("Warehouse is mandatory for stock Item {0} in row {1}").format(d.item_code, d.idx)) items.append(cstr(d.item_code)) if items and len(items) != len(set(items)) and \ not cint(dataent.db.get_single_value("Buying Settings", "allow_multiple_items") or 0): dataent.throw(_("Same item cannot be entered multiple times."))
def _get_missing_mandatory_fields(self): """Get mandatory fields that do not have any values""" def get_msg(df): if df.fieldtype == "Table": return "{}: {}: {}".format(_("Error"), _("Data missing in table"), _(df.label)) elif self.parentfield: return "{}: {} {} #{}: {}: {}".format( _("Error"), dataent.bold(_(self.doctype)), _("Row"), self.idx, _("Value missing for"), _(df.label)) else: return _("Error: Value missing for {0}: {1}").format( _(df.parent), _(df.label)) missing = [] for df in self.meta.get("fields", {"reqd": ('=', 1)}): if self.get(df.fieldname) in (None, []) or not strip_html( cstr(self.get(df.fieldname))).strip(): missing.append((df.fieldname, get_msg(df))) # check for missing parent and parenttype if self.meta.istable: for fieldname in ("parent", "parenttype"): if not self.get(fieldname): missing.append( (fieldname, get_msg(dataent._dict(label=fieldname)))) return missing