def export_data(pool, cr, uid, model, fn, field_list, arg): if arg: arg=eval(arg) else: arg=[] obj=pool.get(model) fields = obj.fields_get(cr, uid) f_map={} for f,v in fields.items(): if f in field_list: f_map[f]=v fields = f_map #id_ref_ids = pool.get('ir.model.data').search(cr, uid, [('model','=',model)]) #ref_ids = [x.res_id for x in pool.get('ir.model.data').browse(cr, uid, id_ref_ids)] ids = pool.get(model).search(cr, uid, arg) header=[] header_export=['id'] for f, v in fields.items(): if 'function' not in v: if v['type'] in ['many2one', 'many2many']: if v['relation'] in ['account.account', 'account.journal']: header_export.append( "%s/code" % f ) #elif v['relation'] in ['account.tax']: # header_export.append( "%s/description" % f ) else: header_export.append( "%s/id" % f ) header.append(f) elif v['type']=='one2many': pass else: header.append(f) header_export.append(f) header_types = [fields[x]['type'] for x in header] data = pool.get(model).export_data(cr, uid, ids, header_export) out=[] for row in data['datas']: out_row=[row[0]] for i,h in enumerate(header): v=row[i+1] t=header_types[i] if (v is False) and (t != 'boolean'): out_row.append('') else: out_row.append(v.encode('utf8')) out.append(out_row) import csv fp = open(fn, 'wb') csv_writer=csv.writer(fp) csv_writer.writerows( [header_export] ) csv_writer.writerows( out ) fp.close() return out
def generate_shop_attributes(self, cr, uid, ids, context=None): attr_loc_obj = self.pool.get('attribute.shop.location') attr_obj = self.pool.get('product.attribute') model_id = self.pool.get('ir.model').search(cr, uid, [('model', '=', 'product.product')], context=context)[0] for shop in self.browse(cr, uid, ids, context=context): fields = self._prepare_attribute_shop_fields(cr, uid, context=context) for field, field_type in fields.items(): attribute_loc_ids = attr_loc_obj.search(cr, uid, [('shop_id', '=', shop.id),('external_name', '=', field)], context=context) if not attribute_loc_ids: field_name = 'x_shop%s_attr_%s' %(shop.id, field) prod_attribute_ids = attr_obj.search(cr, uid, [('name', '=', field_name)], context=context) if not prod_attribute_ids: vals = { 'name': field_name, 'field_description': field, 'attribute_type': field_type, 'based_on': 'product_product', 'translate': field_type in ('char', 'text'), } prod_attribute_id = attr_obj.create(cr, uid, vals, context=context) else: prod_attribute_id = prod_attribute_ids[0] attribute_id = attr_loc_obj.create(cr, uid, { 'external_name': field, 'attribute_id': prod_attribute_id, 'shop_id': shop.id }, context=context) attribute_loc_ids.append(attribute_id) return attribute_loc_ids
def compare_registries(cr, module, registry, local_registry): """ OpenUpgrade: Compare the local registry with the global registry, log any differences and merge the local registry with the global one. """ if not table_exists(cr, 'openupgrade_record'): return for model, fields in local_registry.items(): registry.setdefault(model, {}) for field, attributes in fields.items(): old_field = registry[model].setdefault(field, {}) mode = old_field and 'modify' or 'create' record_id = False for key, value in attributes.items(): if key not in old_field or old_field[key] != value: if not record_id: record_id = get_record_id(cr, module, model, field, mode) cr.execute( "SELECT id FROM openupgrade_attribute " "WHERE name = %s AND value = %s AND " "record_id = %s", (key, value, record_id)) if not cr.fetchone(): cr.execute( "INSERT INTO openupgrade_attribute " "(name, value, record_id) VALUES (%s, %s, %s)", (key, value, record_id)) old_field[key] = value
def compare_registries(cr, module, registry, local_registry): """ OpenUpgrade: Compare the local registry with the global registry, log any differences and merge the local registry with the global one. """ if not table_exists(cr, 'openupgrade_record'): return for model, fields in local_registry.items(): registry.setdefault(model, {}) for field, attributes in fields.items(): old_field = registry[model].setdefault(field, {}) mode = old_field and 'modify' or 'create' record_id = False for key, value in attributes.items(): if key not in old_field or old_field[key] != value: if not record_id: record_id = get_record_id( cr, module, model, field, mode) cr.execute( "SELECT id FROM openupgrade_attribute " "WHERE name = %s AND value = %s AND " "record_id = %s", (key, value, record_id) ) if not cr.fetchone(): cr.execute( "INSERT INTO openupgrade_attribute " "(name, value, record_id) VALUES (%s, %s, %s)", (key, value, record_id) ) old_field[key] = value
def _data_write(self, cr, uid, model_obj, data, schema=None, context=None): # get data definition fields = self.data_def(cr, uid, model_obj._name, schema=schema, context=context)["fields"] # get id mapping_obj = self.pool.get("res.mapping") uuid = data.get(META_ID) oid = None if uuid: oid=mapping_obj.get_id(cr, uid, model_obj._name, uuid) # values to write values = {} # for name, field_def in fields.items(): if not data.has_key(name): continue # field_data = data[name] value = self._parse_field(cr, uid, name, field_data, field_def, schema=schema, context=context) values[name]=value # check if values are found if not values: raise osv.except_osv(_("Error!"), _("Cannot write %s into model %s") % (data,model_obj._name)) if oid: #write model_obj.write(cr, uid, [oid], values, context=context) else: #create oid = model_obj.create(cr, uid, values, context=context) # build result obj = model_obj.browse(cr, uid, oid, context=context) return self._data_read(cr, uid, model_obj, obj, schema=schema, refonly=True, context=context)
def _data_read(self, cr, uid, model_obj, obj, schema=None, refonly=False, context=None): res = {} definition = self.data_def(cr, uid, model_obj._name, schema=schema, context=context) model = definition["model"] res[META_MODEL]=model schema=definition.get("schema") if schema and schema != model: res[META_SCHEMA]=schema mapping_obj = self.pool.get("res.mapping") res[META_ID]=mapping_obj.get_uuid(cr, uid, model_obj._name, obj.id) if refonly: return res fields = definition["fields"] for name, attrib in fields.items(): # check for hidden attribute if attrib.get("hidden"): continue # get type dtype = attrib["dtype"] # reset value value = None # evaluate composite, reference and list type if dtype in ("c","r","l"): # get model and view dtype_model = attrib["model"] dtype_model_schema = attrib.get("schema") # uuid helper function def get_value(dtype_obj,refonly): if not dtype_obj: return None uuid = mapping_obj.get_uuid(cr, uid, dtype_model, dtype_obj.id) return uuid and self.data_read(cr, uid, dtype_model, uuid, schema=dtype_model_schema, refonly=refonly, context=context) or None # prepare single type if dtype in ("c","r"): dtype_obj = getattr(obj, attrib["name"]) value = get_value(dtype_obj,refonly=(dtype=="r")) else: dtype_objs = getattr(obj, attrib["name"]) value = [] for dtype_obj in dtype_objs: list_value = get_value(dtype_obj,refonly=(attrib.get("ltype")=="r")) if list_value: value.append(list_value) # evaluate primitive values else: value = getattr(obj, attrib["name"]) res[name]=value return res
def update_relatives(nid, ref_id, ref_model): relatives = [] for dummy, tr in transitions.items(): if tr['source'] == nid: relatives.append(tr['target']) if tr['target'] == nid: relatives.append(tr['source']) if not ref_id: nodes[nid]['res'] = False return nodes[nid]['res'] = resource = {'id': ref_id, 'model': ref_model} refobj = self.pool[ref_model].browse(cr, uid, ref_id, context=context) fields = self.pool[ref_model].fields_get(cr, uid, context=context) # check for directory_id from inherited from document module if nodes[nid].get('directory_id', False): resource['directory'] = self.pool[ 'document.directory'].get_resource_path( cr, uid, nodes[nid]['directory_id'], ref_model, ref_id) resource['name'] = self.pool[ref_model].name_get( cr, uid, [ref_id], context=context)[0][1] resource['perm'] = self.pool[ref_model].perm_read( cr, uid, [ref_id], context=context)[0] ref_expr_context = Env(refobj, current_user) try: if not nodes[nid]['gray']: nodes[nid]['active'] = eval(nodes[nid]['model_states'], ref_expr_context) except: pass for r in relatives: node = nodes[r] if 'res' not in node: for n, f in fields.items(): if node['model'] == ref_model: update_relatives(r, ref_id, ref_model) elif f.get('relation') == node['model']: rel = refobj[n] if rel and isinstance(rel, list): rel = rel[0] try: # XXX: rel has been reported as string (check it) _id = (rel or False) and rel.id _model = node['model'] update_relatives(r, _id, _model) except: pass
def update_relatives(nid, ref_id, ref_model): relatives = [] for dummy, tr in transitions.items(): if tr['source'] == nid: relatives.append(tr['target']) if tr['target'] == nid: relatives.append(tr['source']) if not ref_id: nodes[nid]['res'] = False return nodes[nid]['res'] = resource = {'id': ref_id, 'model': ref_model} refobj = pool.get(ref_model).browse(cr, uid, ref_id, context=context) fields = pool.get(ref_model).fields_get(cr, uid, context=context) # check for directory_id from inherited from document module if nodes[nid].get('directory_id', False): resource['directory'] = self.pool.get('document.directory').get_resource_path(cr, uid, nodes[nid]['directory_id'], ref_model, ref_id) resource['name'] = pool.get(ref_model).name_get(cr, uid, [ref_id], context=context)[0][1] resource['perm'] = pool.get(ref_model).perm_read(cr, uid, [ref_id], context=context)[0] ref_expr_context = Env(refobj, current_user) try: if not nodes[nid]['gray']: nodes[nid]['active'] = eval(nodes[nid]['model_states'], ref_expr_context) except: pass for r in relatives: node = nodes[r] if 'res' not in node: for n, f in fields.items(): if node['model'] == ref_model: update_relatives(r, ref_id, ref_model) elif f.get('relation') == node['model']: rel = refobj[n] if rel and isinstance(rel, list) : rel = rel[0] try: # XXX: rel has been reported as string (check it) _id = (rel or False) and rel.id _model = node['model'] update_relatives(r, _id, _model) except: pass
def generate_shop_attributes(self, cr, uid, ids, context=None): attr_loc_obj = self.pool.get('attribute.shop.location') attr_obj = self.pool.get('product.attribute') model_id = self.pool.get('ir.model').search( cr, uid, [('model', '=', 'product.product')], context=context)[0] for shop in self.browse(cr, uid, ids, context=context): fields = self._prepare_attribute_shop_fields(cr, uid, context=context) for field, field_type in fields.items(): attribute_loc_ids = attr_loc_obj.search( cr, uid, [('shop_id', '=', shop.id), ('external_name', '=', field)], context=context) if not attribute_loc_ids: field_name = 'x_shop%s_attr_%s' % (shop.id, field) prod_attribute_ids = attr_obj.search( cr, uid, [('name', '=', field_name)], context=context) if not prod_attribute_ids: vals = { 'name': field_name, 'field_description': field, 'attribute_type': field_type, 'based_on': 'product_product', 'translate': field_type in ('char', 'text'), } prod_attribute_id = attr_obj.create(cr, uid, vals, context=context) else: prod_attribute_id = prod_attribute_ids[0] attribute_id = attr_loc_obj.create( cr, uid, { 'external_name': field, 'attribute_id': prod_attribute_id, 'shop_id': shop.id }, context=context) attribute_loc_ids.append(attribute_id) return attribute_loc_ids
def salesforce_import_mapping(self, cr, uid, response, fields, con, account=False, context=None): """ Map the fields to import , ARGS : cr : database cursor uid : ID the user response : API response dict from Salesforce fields : fields dict prepared by get_salesforce_field_mapping() : con : connection object to salesforce api Return : dict which is ready to be wriiten or created a record """ res = {} # iterate through the fileds for key,value in fields.items(): if value['evaluation_type'] == 'direct' : #if the field is directy mapping type the assing the value from response res[value['name']]= response.get(key,False) elif value['evaluation_type'] == 'function' : # if its functional filed execute the function space = { 'self': self, 'cr': cr, 'uid': uid, 'key': key, 'response': response, 'con' : con, 'account' : account, 'context':context, } # these are the variables which will be availble in functions try: exec value['in_function'] in space # execute the functions except Exception, e: # If there is any execpion catch that raise osv.except_osv(_("Error "),_(e)) result = space.get('result', False) # the ooutput of the function should be in variable result if result: # If there is any result add that to res dict if isinstance(result, list): for each_tuple in result: if isinstance(each_tuple, tuple) and len(each_tuple) == 2: res[each_tuple[0]] = each_tuple[1] else : # if the field is ot found assign it as false res[value['name']]= False
def salesforce_export_mapping(self, cr, uid, record, fields, con,account=False, context=None): """ SalesForce Export Mapping @param cr: database cursor @param uid: id of the executing user @param record: Data Object @param con: SalesForce Connection Object @param account: SalesForce Account object @return: dictionary """ res= {} for key,value in fields.items(): if value['evaluation_type'] == 'direct' and hasattr(record,key): res[value['salesforce_name']]= getattr(record,key) or None elif value['evaluation_type'] == 'function' : space = { 'self': self, 'cr': cr, 'uid': uid, 'key': key, 'record': record, 'con' : con, 'account':account, 'context':context, } try: exec value['out_function'] in space except Exception, e: raise osv.except_osv(_("Error "),_(e)) result = space.get('result', False) if result: if isinstance(result, list): for each_tuple in result: if isinstance(each_tuple, tuple) and len(each_tuple) == 2: res[each_tuple[0]] = each_tuple[1] else: res[value['salesforce_name']]= None
def export_data(pool, cr, uid, model, fn, db_only=True, ext_ref=None, module=None): obj = pool.get(model) def db_field(obj, fn): #print fn, obj._columns[fn] if fn in obj._columns: f = obj._columns[fn] #print f.__class__.__dict__.keys() if f._type in [ 'boolean', 'char', 'text', 'many2one', 'integer', 'float' ]: if '_fnct' not in f.__dict__: return True else: return False #print obj._columns if db_only: fields = dict([ x for x in obj.fields_get(cr, uid).items() if db_field(obj, x[0]) ]) else: fields = obj.fields_get(cr, uid) id_ref_ids = pool.get('ir.model.data').search(cr, uid, [('model', '=', model), ('module', '=', module)]) ref_ids = [ x.res_id for x in pool.get('ir.model.data').browse(cr, uid, id_ref_ids) ] #print fields.keys() ids = pool.get(model).search(cr, uid, []) if ext_ref is None: pass elif ext_ref is 'ref_only': ids = ref_ids elif ext_ref is 'noref': ids = list(set(ids) - set(ref_ids)) #if len(ids)>100: # ids=ids[0:90] if os.path.isfile(fn): header_export = read_csv(fn)[0] data = pool.get(model).export_data(cr, uid, ids, header_export) out = [] for row in data['datas']: if 0: out_row = [row[0]] for i, h in enumerate(header): v = row[i + 1] t = header_types[i] if (v is False) and (t != 'boolean'): out_row.append('') else: out_row.append(v.encode('utf8')) out.append(out_row) else: row_t = [] for c in row: if c in ['FALSE', 'False', 0]: row_t.append('') else: row_t.append(c) out.append(row_t) fp = open(fn, 'wb') csv_writer = csv.writer(fp) csv_writer.writerows([header_export]) csv_writer.writerows(out) fp.close() return out else: header = [] header_export = ['id'] #print model for f, v in fields.items(): #print ' ',f,v if 'function' not in v: if v['type'] in ['many2one', 'many2many']: if v['relation'] in ['account.account', 'account.journal']: header_export.append("%s/code" % f) elif v['relation'] in ['account.tax']: header_export.append("%s/description" % f) else: header_export.append("%s/id" % f) header.append(f) elif v['type'] == 'one2many': pass else: header.append(f) header_export.append(f) #if 1: header_types = [fields[x]['type'] for x in header] #print [ids, header_export] data = pool.get(model).export_data(cr, uid, ids, header_export) out = [] for row in data['datas']: out_row = [row[0]] for i, h in enumerate(header): v = row[i + 1] t = header_types[i] if (v is False) and (t != 'boolean'): out_row.append('') else: out_row.append(v.encode('utf8')) out.append(out_row) fp = open(fn, 'wb') csv_writer = csv.writer(fp) csv_writer.writerows([header_export]) csv_writer.writerows(out) fp.close() return out
def _data_def(self, cr, uid, model_obj, view=None, add_description=False, recursion_set=None, context=None): if not context: context = {} #check recursion if recursion_set and model_obj._name in recursion_set: raise DataDefRecursion #create result field_defs = {} res = { "fields" : field_defs, "model" : model_obj._name } field_alias={} field_views={} field_include=set() field_exclude=set() field_ltype={} field_hidden={} if not view: view = self._schema_get(cr, uid, model_obj._name, context) if view: res["schema"]=view.name if view.ltype: res["ltype"]=view.ltype for rule in view.rule_ids: rule_field = rule.field_id if not rule_field: continue # options if rule.option=="i": field_include.add(rule_field.name) elif rule.option=="e": field_exclude.add(rule_field.name) elif rule.option=="h": field_hidden[rule_field.name]=True elif rule.option=="v": field_hidden[rule_field.name]=False # view rule_view = rule.field_view_id if rule_view: field_views[rule_field.name]=rule_view.name # flags if rule_view.ltype: field_ltype[rule_field.name]=rule_view.ltype # alias field_alias[rule_field.name]=rule.name # flags if rule.ltype: field_ltype[rule_field.name]=rule.ltype fields = model_obj.fields_get(cr, uid) for field, attrib in fields.items(): # exclude field if field in field_exclude: continue # exclude not included field if included fields are defined if field_include and not field in field_include: continue field_type = attrib.get("type") field_relation = attrib.get("relation") or None field_name = field_alias.get(field) or field field_view = field_views.get(field_name) field_def = {} # default hide function fields field_function = attrib.get("function") or None if field_function: field_def["hidden"]=True # get sub list or composite type sub_view_name = field_view or field_relation sub_view = None if sub_view_name: sub_view = self._schema_get(cr, uid, sub_view_name, context) if sub_view and sub_view.ltype: sub_ltype_hint = sub_view.ltype # sub_ltype_hint = field_ltype.get(field) sub_ltype = sub_ltype_hint or "r" # evaluate type if field_type == "many2one": field_def["dtype"]=sub_ltype if field_relation and not field_function: rel_fields = fields if not field_relation == model_obj._name: rel_fields = self.pool.get(field_relation).fields_get(cr, uid) for rel_attrib in rel_fields.values(): if rel_attrib.get("relation") == model_obj._name and rel_attrib.get("relation_field") == field: field_def["hidden"]=True elif field_type == "one2many": if not sub_ltype_hint and field_relation != model_obj._name: #check recursion if recursion_set is None: recursion_set = set() recursion_set.add(model_obj._name) try: sub_ltype="c" #check recursion self._data_def(cr, uid, self.pool.get(field_relation), view=sub_view, add_description=False, recursion_set=recursion_set, context=context ) #check for parent relation rel_fields = self.pool.get(field_relation).fields_get(cr, uid) for rel_attrib in rel_fields.values(): if rel_attrib.get("type") == "one2many" and not rel_attrib.get("function") and rel_attrib.get("relation") == field_relation: sub_ltype="r" break except DataDefRecursion: sub_ltype="r" finally: recursion_set.remove(model_obj._name) field_def["dtype"]="l" field_def["ltype"]=sub_ltype field_def["hidden"]=(sub_ltype=="r") elif field_type == "many2many": field_def["dtype"]="l" field_def["ltype"]=field_ltype.get(field,"r") elif field_type == "char": field_def["dtype"]="s" elif field_type == "integer": field_def["dtype"]="i" elif field_type == "float": field_def["dtype"]="f" elif field_type == "text": field_def["dtype"]="t" else: continue # hidden override if field_hidden.has_key(field): field_def["hidden"]=field_hidden[field] field_label = attrib.get("string") field_def["name"]=field field_def["label"]=field_label if add_description: field_def["description"]=attrib.get("help") if field_relation: field_def["model"]=field_relation if field_view: field_def["schema"]=field_view field_defs[field_name]=field_def return res