def parameter(dico, resource, special=None): """ Convert value to a parameter for SOAP query @type dico: dict @param dico: Contain parameter starts with OERP_ @type resource: dict @param resource: Contain parameter starts with WIZARD_ @rtype: xmlstring @return: XML String representation """ res = '' for key in resource: _logger.debug(' PARAMETER -> RESOURCE: %s' % key) if key in 'xml_data': continue e = Element('parameter') e.set('name', 'OERP_%s' % key.upper()) e.text = ustr(resource[key]) res += tostring(e) + '\n' for key in dico: _logger.debug(' PARAMETER -> DICO: %s' % key) if key in 'params': continue val = dico[key] e = Element('parameter') e.set('name', 'WIZARD_%s' % key.upper()) if isinstance(val, list): if isinstance(val[0], tuple): e.text = ','.join(map(str, val[0][2])) else: e.text = ','.join(map(str, val)) else: e.text = val and ustr(val) or '' res += tostring(e) + '\n' if special is None: special = {} for key in special: _logger.debug(' PARAMETER -> SPECIAL: %s' % key) e = Element('parameter') e.set('name', key) e.text = ustr(special[key]) res += tostring(e) + '\n' res = entities(res) if resource.get('xml_data'): res += '<parameter class="java.lang.String" name="XML_DATA">' res += '<![CDATA["%s"]]></parameter>' % resource[ 'xml_data'] return res
def put(self, uri, data, content_type=None): """ put the object into the filesystem """ self.parent.log_message( 'Putting %s (%d), %s' % (misc.ustr(uri), data and len(data) or 0, content_type)) cr, uid, pool, dbname, uri2 = self.get_cr(uri) if not dbname: if cr: cr.close() raise DAV_Forbidden try: node = self.uri2object(cr, uid, pool, uri2[:]) except Exception: node = False objname = misc.ustr(uri2[-1]) ret = None if not node: dir_node = self.uri2object(cr, uid, pool, uri2[:-1]) if not dir_node: cr.close() raise DAV_NotFound('Parent folder not found') newchild = self._try_function(dir_node.create_child, (cr, objname, data), "create %s" % objname, cr=cr) if not newchild: cr.commit() cr.close() raise DAV_Error(400, "Failed to create resource") uparts = urlparse.urlparse(uri) fileloc = '/'.join(newchild.full_path()) if isinstance(fileloc, unicode): fileloc = fileloc.encode('utf-8') # the uri we get is a mangled one, where the davpath has been removed davpath = self.parent.get_davpath() surl = '%s://%s' % (uparts[0], uparts[1]) uloc = urllib.quote(fileloc) hurl = False if uri != ('/' + uloc) and uri != (surl + '/' + uloc): hurl = '%s%s/%s/%s' % (surl, davpath, dbname, uloc) etag = False try: etag = str(newchild.get_etag(cr)) except Exception, e: self.parent.log_error("Cannot get etag for node: %s" % e) ret = (str(hurl), etag)
def parameter(dico, resource): """ Convert value to a parameter for SOAP query @type dico: dict @param dico: Contain parameter starts with OERP_ @type resource: dict @param resource: Contain parameter starts with WIZARD_ @rtype: xmlstring @return: XML String representation """ res = '' for key in resource: _logger.debug(' PARAMETER -> RESOURCE: %s' % key) if key in 'xml_data': continue e = Element('parameter') e.set('name', 'OERP_%s' % key.upper()) e.text = ustr(resource[key]) res += tostring(e) + '\n' for key in dico: _logger.debug(' PARAMETER -> DICO: %s' % key) if key in 'params': continue val = dico[key] e = Element('parameter') e.set('name', 'WIZARD_%s' % key.upper()) if isinstance(val, list): if isinstance(val[0], tuple): e.text = ','.join(map(str, val[0][2])) else: e.text = ','.join(map(str, val)) else: e.text = val and ustr(val) or '' res += tostring(e) + '\n' for key, val in [('REPORT_LOCALE', 'fr_FR'), ('IS_JASPERSERVER', 'yes')]: e = Element('parameter') e.set('name', key) e.text = ustr(val) res += tostring(e) + '\n' res = entities(res) if resource.get('xml_data'): res += '<parameter class="java.lang.String" name="XML_DATA">' res += '<![CDATA["%s"]]></parameter>' % resource['xml_data'] return res
def apply_promotions(self, cursor, user, order_id, context=None): """ Applies promotions @param cursor: Database Cursor @param user: ID of User @param order_id: ID of sale order @param context: Context(no direct use). """ order = self.pool.get('sale.order').browse(cursor, user, order_id, context=context) active_promos = self.search(cursor, user, [('active', '=', True)], context=context) for promotion_rule in self.browse(cursor, user, active_promos, context): result = self.evaluate(cursor, user, promotion_rule, order, context) # If evaluates to true if result: try: self.execute_actions(cursor, user, promotion_rule, order_id, context) except Exception, e: raise osv.except_osv( "Promotions", ustr(e) ) # If stop further is true if promotion_rule.stop_further: return True
def apply_promotions(self, cursor, user, order_id, context=None): """ Applies promotions @param cursor: Database Cursor @param user: ID of User @param order_id: ID of sale order @param context: Context(no direct use). """ order = self.pool.get('sale.order').browse(cursor, user, order_id, context=context) active_promos = self.search(cursor, user, [('active', '=', True)], context=context) for promotion_rule in self.browse(cursor, user, active_promos, context): result = self.evaluate(cursor, user, promotion_rule, order, context) #If evaluates to true if result: try: self.execute_actions(cursor, user, promotion_rule, order_id, context) except Exception, e: raise osv.except_osv("Promotions", ustr(e)) #If stop further is true if promotion_rule.stop_further: return True
def put(self, uri, data, content_type=None): """ put the object into the filesystem """ self.parent.log_message('Putting %s (%d), %s'%( misc.ustr(uri), data and len(data) or 0, content_type)) cr, uid, pool,dbname, uri2 = self.get_cr(uri) if not dbname: if cr: cr.close() raise DAV_Forbidden try: node = self.uri2object(cr, uid, pool, uri2[:]) except Exception: node = False objname = misc.ustr(uri2[-1]) ret = None if not node: dir_node = self.uri2object(cr, uid, pool, uri2[:-1]) if not dir_node: cr.close() raise DAV_NotFound('Parent folder not found') newchild = self._try_function(dir_node.create_child, (cr, objname, data), "create %s" % objname, cr=cr) if not newchild: cr.commit() cr.close() raise DAV_Error(400, "Failed to create resource") uparts=urlparse.urlparse(uri) fileloc = '/'.join(newchild.full_path()) if isinstance(fileloc, unicode): fileloc = fileloc.encode('utf-8') # the uri we get is a mangled one, where the davpath has been removed davpath = self.parent.get_davpath() surl = '%s://%s' % (uparts[0], uparts[1]) uloc = urllib.quote(fileloc) hurl = False if uri != ('/'+uloc) and uri != (surl + '/' + uloc): hurl = '%s%s/%s/%s' %(surl, davpath, dbname, uloc) etag = False try: etag = str(newchild.get_etag(cr)) except Exception, e: self.parent.log_error("Cannot get etag for node: %s" % e) ret = (str(hurl), etag)
def close(self): # TODO: locking in init, close() fname = self.__file.name self.__file.close() if self.mode in ('w', 'w+', 'r+'): par = self._get_parent() cr = pooler.get_db(par.context.dbname).cursor() icont = '' mime = '' filename = par.path if isinstance(filename, (tuple, list)): filename = '/'.join(filename) try: mime, icont = cntIndex.doIndex(None, filename=filename, content_type=None, realfname=fname) except Exception: logging.getLogger('document.storage').debug( 'Cannot index file:', exc_info=True) pass try: icont_u = ustr(icont) except UnicodeError: icont_u = '' try: fsize = os.stat(fname).st_size cr.execute("UPDATE ir_attachment " \ " SET index_content = %s, file_type = %s, " \ " file_size = %s " \ " WHERE id = %s", (icont_u, mime, fsize, par.file_id)) par.content_length = fsize par.content_type = mime cr.commit() cr.close() except Exception: logging.getLogger('document.storage').warning( 'Cannot save file indexed content:', exc_info=True) elif self.mode in ('a', 'a+'): try: par = self._get_parent() cr = pooler.get_db(par.context.dbname).cursor() fsize = os.stat(fname).st_size cr.execute("UPDATE ir_attachment SET file_size = %s " \ " WHERE id = %s", (fsize, par.file_id)) par.content_length = fsize cr.commit() cr.close() except Exception: logging.getLogger('document.storage').warning( 'Cannot save file appended content:', exc_info=True)
def create(self, cursor, user, vals, context=None): """ Serialise before save @param cursor: Database Cursor @param user: ID of User @param vals: Values of current record. @param context: Context(no direct use). """ try: self.validate(cursor, user, vals, context) except Exception, e: raise osv.except_osv("Invalid Expression", ustr(e))
def close(self): # TODO: locking in init, close() fname = self.__file.name self.__file.close() if self.mode in ('w', 'w+', 'r+'): par = self._get_parent() cr = pooler.get_db(par.context.dbname).cursor() icont = '' mime = '' filename = par.path if isinstance(filename, (tuple, list)): filename = '/'.join(filename) try: mime, icont = cntIndex.doIndex(None, filename=filename, content_type=None, realfname=fname) except Exception: _logger.debug('Cannot index file:', exc_info=True) pass try: icont_u = ustr(icont) except UnicodeError: icont_u = '' try: fsize = os.stat(fname).st_size cr.execute("UPDATE ir_attachment " \ " SET index_content = %s, file_type = %s, " \ " file_size = %s " \ " WHERE id = %s", (icont_u, mime, fsize, par.file_id)) par.content_length = fsize par.content_type = mime cr.commit() cr.close() except Exception: _logger.warning('Cannot save file indexed content:', exc_info=True) elif self.mode in ('a', 'a+' ): try: par = self._get_parent() cr = pooler.get_db(par.context.dbname).cursor() fsize = os.stat(fname).st_size cr.execute("UPDATE ir_attachment SET file_size = %s " \ " WHERE id = %s", (fsize, par.file_id)) par.content_length = fsize cr.commit() cr.close() except Exception: _logger.warning('Cannot save file appended content:', exc_info=True)
def close(self): # TODO: locking in init, close() fname = self.__file.name self.__file.close() if self.mode in ("w", "w+", "r+"): par = self._get_parent() cr = pooler.get_db(par.context.dbname).cursor() icont = "" mime = "" filename = par.path if isinstance(filename, (tuple, list)): filename = "/".join(filename) try: mime, icont = cntIndex.doIndex(None, filename=filename, content_type=None, realfname=fname) except Exception: logging.getLogger("document.storage").debug("Cannot index file:", exc_info=True) pass try: icont_u = ustr(icont) except UnicodeError: icont_u = "" try: fsize = os.stat(fname).st_size cr.execute( "UPDATE ir_attachment " " SET index_content = %s, file_type = %s, " " file_size = %s " " WHERE id = %s", (icont_u, mime, fsize, par.file_id), ) par.content_length = fsize par.content_type = mime cr.commit() cr.close() except Exception: logging.getLogger("document.storage").warning("Cannot save file indexed content:", exc_info=True) elif self.mode in ("a", "a+"): try: par = self._get_parent() cr = pooler.get_db(par.context.dbname).cursor() fsize = os.stat(fname).st_size cr.execute("UPDATE ir_attachment SET file_size = %s " " WHERE id = %s", (fsize, par.file_id)) par.content_length = fsize cr.commit() cr.close() except Exception: logging.getLogger("document.storage").warning("Cannot save file appended content:", exc_info=True)
def close(self): # we now open a *separate* cursor, to update the data. # FIXME: this may be improved, for concurrency handling par = self._get_parent() # uid = par.context.uid cr = pooler.get_db(par.context.dbname).cursor() try: if self.mode in ('w', 'w+', 'r+'): data = self.getvalue() icont = '' mime = '' filename = par.path if isinstance(filename, (tuple, list)): filename = '/'.join(filename) try: mime, icont = cntIndex.doIndex(data, filename=filename, content_type=None, realfname=None) except Exception: logging.getLogger('document.storage').debug( 'Cannot index file:', exc_info=True) pass try: icont_u = ustr(icont) except UnicodeError: icont_u = '' out = psycopg2.Binary(data) cr.execute("UPDATE ir_attachment " \ "SET db_datas = %s, file_size=%s, " \ " index_content= %s, file_type=%s " \ " WHERE id = %s", (out, len(data), icont_u, mime, par.file_id)) elif self.mode == 'a': data = self.getvalue() out = psycopg2.Binary(data) cr.execute("UPDATE ir_attachment " \ "SET db_datas = COALESCE(db_datas,'') || %s, " \ " file_size = COALESCE(file_size, 0) + %s " \ " WHERE id = %s", (out, len(data), par.file_id)) cr.commit() except Exception: logging.getLogger('document.storage').exception( 'Cannot update db file #%d for close:', par.file_id) raise finally: cr.close() StringIO.close(self)
def close(self): # we now open a *separate* cursor, to update the data. # FIXME: this may be improved, for concurrency handling par = self._get_parent() # uid = par.context.uid cr = pooler.get_db(par.context.dbname).cursor() try: if self.mode in ("w", "w+", "r+"): data = self.getvalue() icont = "" mime = "" filename = par.path if isinstance(filename, (tuple, list)): filename = "/".join(filename) try: mime, icont = cntIndex.doIndex(data, filename=filename, content_type=None, realfname=None) except Exception: logging.getLogger("document.storage").debug("Cannot index file:", exc_info=True) pass try: icont_u = ustr(icont) except UnicodeError: icont_u = "" out = psycopg2.Binary(data) cr.execute( "UPDATE ir_attachment " "SET db_datas = %s, file_size=%s, " " index_content= %s, file_type=%s " " WHERE id = %s", (out, len(data), icont_u, mime, par.file_id), ) elif self.mode == "a": data = self.getvalue() out = psycopg2.Binary(data) cr.execute( "UPDATE ir_attachment " "SET db_datas = COALESCE(db_datas,'') || %s, " " file_size = COALESCE(file_size, 0) + %s " " WHERE id = %s", (out, len(data), par.file_id), ) cr.commit() except Exception: logging.getLogger("document.storage").exception("Cannot update db file #%d for close:", par.file_id) raise finally: cr.close() StringIO.close(self)
def close(self): # we now open a *separate* cursor, to update the data. # FIXME: this may be improved, for concurrency handling par = self._get_parent() # uid = par.context.uid cr = pooler.get_db(par.context.dbname).cursor() try: if self.mode in ('w', 'w+', 'r+'): data = self.getvalue() icont = '' mime = '' filename = par.path if isinstance(filename, (tuple, list)): filename = '/'.join(filename) try: mime, icont = cntIndex.doIndex(data, filename=filename, content_type=None, realfname=None) except Exception: logging.getLogger('document.storage').debug( 'Cannot index file:', exc_info=True) pass try: icont_u = ustr(icont) except UnicodeError: icont_u = '' cr.execute('UPDATE ir_attachment SET db_datas = %s::bytea, file_size=%s, ' \ 'index_content = %s, file_type = %s ' \ 'WHERE id = %s', (base64.encodestring(data), len(data), icont_u, mime, par.file_id)) elif self.mode == 'a': data = self.getvalue() # Yes, we're obviously using the wrong representation for storing our # data as base64-in-bytea cr.execute("UPDATE ir_attachment " \ "SET db_datas = encode( (COALESCE(decode(encode(db_datas,'escape'),'base64'),'') || decode(%s, 'base64')),'base64')::bytea , " \ " file_size = COALESCE(file_size, 0) + %s " \ " WHERE id = %s", (base64.encodestring(data), len(data), par.file_id)) cr.commit() except Exception: logging.getLogger('document.storage').exception( 'Cannot update db file #%d for close:', par.file_id) raise finally: cr.close() StringIO.close(self)
def action_validate(self, cr, uid, ids, context=None): if not ids: return False module_proxy = self.pool.get('ir.module.module') module_ids = module_proxy.search(cr, uid, [('state', '=', 'installed')]) modules = module_proxy.read(cr, uid, module_ids, ['name', 'installed_version']) contract = self.read(cr, uid, ids, ['name', 'password'])[0] try: contract_info = tm.remote_contract(contract['name'], contract['password'], modules) except tm.RemoteContractException, rce: raise osv.except_osv(_('Error'), ustr(rce))
def close(self): # we now open a *separate* cursor, to update the data. # FIXME: this may be improved, for concurrency handling par = self._get_parent() # uid = par.context.uid cr = pooler.get_db(par.context.dbname).cursor() try: if self.mode in ("w", "w+", "r+"): data = self.getvalue() icont = "" mime = "" filename = par.path if isinstance(filename, (tuple, list)): filename = "/".join(filename) try: mime, icont = cntIndex.doIndex(data, filename=filename, content_type=None, realfname=None) except Exception: logging.getLogger("document.storage").debug("Cannot index file:", exc_info=True) pass try: icont_u = ustr(icont) except UnicodeError: icont_u = "" cr.execute( "UPDATE ir_attachment SET db_datas = %s::bytea, file_size=%s, " "index_content = %s, file_type = %s " "WHERE id = %s", (base64.encodestring(data), len(data), icont_u, mime, par.file_id), ) elif self.mode == "a": data = self.getvalue() # Yes, we're obviously using the wrong representation for storing our # data as base64-in-bytea cr.execute( "UPDATE ir_attachment " "SET db_datas = encode( (COALESCE(decode(encode(db_datas,'escape'),'base64'),'') || decode(%s, 'base64')),'base64')::bytea , " " file_size = COALESCE(file_size, 0) + %s " " WHERE id = %s", (base64.encodestring(data), len(data), par.file_id), ) cr.commit() except Exception: logging.getLogger("document.storage").exception("Cannot update db file #%d for close:", par.file_id) raise finally: cr.close() StringIO.close(self)
def close(self): # we now open a *separate* cursor, to update the data. # FIXME: this may be improved, for concurrency handling par = self._get_parent() # uid = par.context.uid cr = pooler.get_db(par.context.dbname).cursor() try: if self.mode in ('w', 'w+', 'r+'): data = self.getvalue() icont = '' mime = '' filename = par.path if isinstance(filename, (tuple, list)): filename = '/'.join(filename) try: mime, icont = cntIndex.doIndex(data, filename=filename, content_type=None, realfname=None) except Exception: _logger.debug('Cannot index file:', exc_info=True) pass try: icont_u = ustr(icont) except UnicodeError: icont_u = '' out = psycopg2.Binary(data) cr.execute("UPDATE ir_attachment " \ "SET db_datas = %s, file_size=%s, " \ " index_content= %s, file_type=%s " \ " WHERE id = %s", (out, len(data), icont_u, mime, par.file_id)) elif self.mode == 'a': data = self.getvalue() out = psycopg2.Binary(data) cr.execute("UPDATE ir_attachment " \ "SET db_datas = COALESCE(db_datas,'') || %s, " \ " file_size = COALESCE(file_size, 0) + %s " \ " WHERE id = %s", (out, len(data), par.file_id)) cr.commit() except Exception: _logger.exception('Cannot update db file #%d for close.', par.file_id) raise finally: cr.close() StringIO.close(self)
def notifyChannel(self, name, level, msg): warnings.warn( "notifyChannel API shouldn't be used anymore, please use " "the standard `logging` module instead", PendingDeprecationWarning, stacklevel=2) from service.web_services import common from tools.misc import ustr log = logging.getLogger(ustr(name)) if level in [LOG_DEBUG_RPC, LOG_TEST] and not hasattr(log, level): fct = lambda msg, *args, **kwargs: log.log( getattr(logging, level.upper()), msg, *args, **kwargs) setattr(log, level, fct) level_method = getattr(log, level) if isinstance(msg, Exception): msg = tools.exception_to_unicode(msg) try: msg = tools.ustr(msg).strip() if level in (LOG_ERROR, LOG_CRITICAL) and tools.config.get_misc( 'debug', 'env_info', False): msg = common().exp_get_server_environment() + "\n" + msg result = msg.split('\n') except UnicodeDecodeError: result = msg.strip().split('\n') try: if len(result) > 1: for idx, s in enumerate(result): level_method('[%02d]: %s' % ( idx + 1, s, )) elif result: level_method(result[0]) except IOError: # TODO: perhaps reset the logger streams? #if logrotate closes our files, we end up here.. pass except Exception: # better ignore the exception and carry on.. pass
def evaluate(self, cursor, user, promotion_rule, order, context=None): """ Evaluates if a promotion is valid @param cursor: Database Cursor @param user: ID of User @param promotion_rule: Browse Record @param order: Browse Record @param context: Context(no direct use). """ if not context: context = {} expression_obj = self.pool.get("promos.rules.conditions.exps") try: self.check_primary_conditions(cursor, user, promotion_rule, order, context) except Exception, e: if DEBUG: netsvc.Logger().notifyChannel("Promotions", netsvc.LOG_INFO, ustr(e)) return False
def write(self, cursor, user, ids, vals, context): """ Validate before Write @param cursor: Database Cursor @param user: ID of User @param vals: Values of current record. @param context: Context(no direct use). """ # Validate before save if type(ids) in [list, tuple] and ids: ids = ids[0] try: old_vals = self.read(cursor, user, ids, ["action_type", "product_code", "arguments"], context) old_vals.update(vals) old_vals.has_key("id") and old_vals.pop("id") self.validate(cursor, user, old_vals, context) except Exception, e: raise osv.except_osv("Invalid Expression", ustr(e))
def evaluate(self, cursor, user, promotion_rule, order, context=None): """ Evaluates if a promotion is valid @param cursor: Database Cursor @param user: ID of User @param promotion_rule: Browse Record @param order: Browse Record @param context: Context(no direct use). """ if not context: context = {} expression_obj = self.pool.get('promos.rules.conditions.exps') try: self.check_primary_conditions(cursor, user, promotion_rule, order, context) except Exception, e: if DEBUG: LOGGER.notifyChannel("Promotions", netsvc.LOG_INFO, ustr(e)) return False
def write(self, cursor, user, ids, vals, context): """ Serialise before Write @param cursor: Database Cursor @param user: ID of User @param ids: ID of current record. @param vals: Values of current record. @param context: Context(no direct use). """ #Validate before save if type(ids) in [list, tuple] and ids: ids = ids[0] try: old_vals = self.read(cursor, user, ids, ['attribute', 'comparator', 'value'], context) old_vals.update(vals) old_vals.has_key('id') and old_vals.pop('id') self.validate(cursor, user, old_vals, context) except Exception, e: raise osv.except_osv("Invalid Expression", ustr(e))
def write(self, cursor, user, ids, vals, context): """ Serialise before Write @param cursor: Database Cursor @param user: ID of User @param ids: ID of current record. @param vals: Values of current record. @param context: Context(no direct use). """ # Validate before save if type(ids) in [list, tuple] and ids: ids = ids[0] try: old_vals = self.read(cursor, user, ids, ['attribute', 'comparator', 'value'], context) old_vals.update(vals) old_vals.has_key('id') and old_vals.pop('id') self.validate(cursor, user, old_vals, context) except Exception, e: raise osv.except_osv("Invalid Expression", ustr(e))
def check_vat_mx(self, vat): ''' Mexican VAT verification Verificar RFC México ''' # we convert to 8-bit encoding, to help the regex parse only bytes vat = ustr(vat).encode('iso8859-1') m = self.__check_vat_mx_re.match(vat) if not m: #No valid format return False try: ano = int(m.group('ano')) if ano > 30: ano = 1900 + ano else: ano = 2000 + ano datetime.date(ano, int(m.group('mes')), int(m.group('dia'))) except ValueError: return False #Valid format and valid date return True
def button_validate(self, cr, uid, ids, context=None): """ Import XLS file """ # Some verifications if not context: context = {} if isinstance(ids, (int, long)): ids = [ids] for wiz in self.browse(cr, uid, ids): # Prepare some values created = 0 updated = 0 processed = 0 errors = [] # Check that a file is given if not wiz.file: raise osv.except_osv(_('Error'), _('No file given')) # Check file extension if wiz.filename.split('.')[-1] != 'xml': raise osv.except_osv(_('Warning'), _('This wizard only accept XML files.')) # Read file fileobj = SpreadsheetXML(xmlstring=decodestring(wiz.file)) reader = fileobj.getRows() reader.next() start = 1 column_list = ['name', 'identification_id'] #, 'job', 'dest', 'cc', 'fp', 'f1', 'f2'] for num, line in enumerate(reader): processed += 1 # Fetch values vals = {} if line.cells: for i, el in enumerate(column_list): if len(line.cells) > i: vals[el] = ustr(line.cells[i]) else: vals[el] = False # Check values employee_id = False try: vals, employee_id = self.update_or_create_employee(cr, uid, vals, context) except osv.except_osv, e: errors.append('Line %s, %s' % (start+num, e.value)) continue # Do creation/update context.update({'from': 'import'}) if employee_id: self.pool.get('hr.employee').write(cr, uid, [employee_id], vals, context) updated += 1 else: self.pool.get('hr.employee').create(cr, uid, vals, context) created += 1 for error in errors: self.pool.get('hr.payroll.employee.import.errors').create(cr, uid, {'wizard_id': wiz.id, 'msg': error}) if errors: context.update({'employee_import_wizard_ids': wiz.id}) context.update({'message': ' '}) view_id = self.pool.get('ir.model.data').get_object_reference(cr, uid, 'msf_homere_interface', 'payroll_import_confirmation') view_id = view_id and view_id[1] or False # This is to redirect to Employee Tree View context.update({'from': 'nat_staff_import'}) res_id = self.pool.get('hr.payroll.import.confirmation').create(cr, uid, {'created': created, 'updated': updated, 'total': processed, 'state': 'employee', 'filename': wiz.filename or False,}, context=context) return { 'name': 'National staff employee import confirmation', 'type': 'ir.actions.act_window', 'res_model': 'hr.payroll.import.confirmation', 'view_mode': 'form', 'view_type': 'form', 'view_id': [view_id], 'res_id': res_id, 'target': 'new', 'context': context, }
def lock(self, uri, lock_data): """ Lock (may create) resource. Data is a dict, may contain: depth, token, refresh, lockscope, locktype, owner """ cr, uid, pool, dbname, uri2 = self.get_cr(uri) created = False if not dbname: if cr: cr.close() raise DAV_Error, 409 try: node = self.uri2object(cr, uid, pool, uri2[:]) except Exception: node = False objname = misc.ustr(uri2[-1]) if not node: dir_node = self.uri2object(cr, uid, pool, uri2[:-1]) if not dir_node: cr.close() raise DAV_NotFound('Parent folder not found') # We create a new node (file) but with empty data=None, # as in RFC4918 p. 9.10.4 node = self._try_function(dir_node.create_child, (cr, objname, None), "create %s" % objname, cr=cr) if not node: cr.commit() cr.close() raise DAV_Error(400, "Failed to create resource") created = True try: node_fn = node.dav_lock except AttributeError: # perhaps the node doesn't support locks cr.close() raise DAV_Error(400, 'No locks for this resource') # Obtain the lock on the node lres, pid, token = self._try_function(node_fn, (cr, lock_data), "lock %s" % objname, cr=cr) if not lres: cr.commit() cr.close() raise DAV_Error(423, "Resource already locked") assert isinstance(lres, list), 'lres: %s' % repr(lres) try: data = mk_lock_response(self, uri, lres) cr.commit() except Exception: cr.close() raise cr.close() return created, data, token
def fields_view_get(self, cr, uid, view_id=None, view_type='form', context=None, toolbar=False, submenu=False): """ Verify that all lines have an analytic distribution. Create all non-analytic-a-holic lines to give a third parties """ if not context: context = {} res = super(hr_payroll_validation, self).fields_view_get(cr, uid, view_id, view_type, context, toolbar, submenu) # Verification and sorting lines as explained in UTP-342 line_ids = self.pool.get('hr.payroll.msf').search(cr, uid, [('state', '=', 'draft')], order='account_id, name') for line in self.pool.get('hr.payroll.msf').browse(cr, uid, line_ids): if line.account_id and line.account_id.is_analytic_addicted and line.analytic_state != 'valid': raise osv.except_osv(_('Warning'), _('Some lines have analytic distribution problems!')) if view_type == 'form': form = ET.fromstring(res['arch']) field = form.find('.//label') parent = field.getparent() for el in self.pool.get('hr.payroll.msf').browse(cr, uid, line_ids): if el.account_id and not el.account_id.is_analytic_addicted: third = 'third' + str(el.id) fourth = 'fourth' + str(el.id) fifth = 'fifth' + str(el.id) is_required = False if el.account_id.type_for_register and el.account_id.type_for_register == 'payroll': is_required = True parent.insert(parent.index(field)+1, ET.XML('<group col="4" colspan="4" invisible="%s"> <label string="%s"/><group col="6" colspan="1"><field name="%s" readonly="1"/><field name="%s" readonly="1"/><field name="%s" required="%s"/></group></group>' % (not is_required, ustr(el.name) + ' - ' + ustr(el.ref), fourth, fifth, third, is_required))) res['arch'] = ET.tostring(form) return res
# 2nd phase: store the metadata try: icont = '' mime = ira.file_type if not mime: mime = "" try: mime, icont = cntIndex.doIndex(data, ira.datas_fname, ira.file_type or None, fname) except Exception: self._doclog.debug('Cannot index file:', exc_info=True) pass try: icont_u = ustr(icont) except UnicodeError: icont_u = '' # a hack: /assume/ that the calling write operation will not try # to write the fname and size, and update them in the db concurrently. # We cannot use a write() here, because we are already in one. cr.execute('UPDATE ir_attachment SET store_fname = %s, file_size = %s, file_type = %s WHERE id = %s', (store_fname, filesize, mime, file_node.file_id)) file_node.content_length = filesize file_node.content_type = mime return True except Exception, e : self._doclog.warning("Couldn't save data:", exc_info=True) # should we really rollback once we have written the actual data? # at the db case (only), that rollback would be safe
# US_263: get employee destination, if haven't get default destination if employee_id: emp = self.pool.get('hr.employee').browse(cr, uid, employee_id, context=context) if destination_id and destination_id != emp.destination_id.id: to_update_employee = True # turn the flag to update the employee if not destination_id and emp.destination_id: # US-671: Only update if the destination from the import is not valid destination_id = emp.destination_id.id if not destination_id: if not account.default_destination_id: raise osv.except_osv(_('Warning'), _('No default Destination defined for this account: %s') % (account.code or '',)) destination_id = account.default_destination_id and account.default_destination_id.id or False # Fetch description if not name: name = description and description[0] and ustr(description[0]) or '' if is_payroll_rounding: name = 'Payroll rounding' if not employee_id: if second_description and second_description[0]: ref = ustr(second_description[0]) # Check if currency exists if not currency and not currency[0]: raise osv.except_osv(_('Warning'), _('One currency is missing!')) currency_ids = self.pool.get('res.currency').search(cr, uid, [('name', '=', ustr(currency[0])), ('active', '=', True)]) if not currency_ids: raise osv.except_osv(_('Error'), _('No \'%s\' currency or non-active currency.') % (ustr(currency[0]),)) if len(currency_ids) > 1: raise osv.except_osv(_('Error'), _('More than one currency \'%s\' found.') % (ustr(currency[0]),)) currency_id = currency_ids[0] # Create the payroll entry
def button_validate(self, cr, uid, ids, context=None): """ Open ZIP file and search staff.csv """ if not context: context = {} # Prepare some values staff_file = 'staff.csv' contract_file = 'contrat.csv' job_file = 'fonction.csv' res = False message = _("Employee import FAILED.") created = 0 updated = 0 processed = 0 filename = "" registered_keys = {} # Delete old errors error_ids = self.pool.get('hr.payroll.employee.import.errors').search(cr, uid, []) if error_ids: self.pool.get('hr.payroll.employee.import.errors').unlink(cr, uid, error_ids) for wiz in self.browse(cr, uid, ids): if not wiz.file: raise osv.except_osv(_('Error'), _('Nothing to import.')) fileobj = NamedTemporaryFile('w+b', delete=False) fileobj.write(decodestring(wiz.file)) # now we determine the file format filename = fileobj.name fileobj.close() job_reader, contract_reader, staff_reader, desc_to_close, tmpdir = self.read_files(cr, uid, filename) filename = wiz.filename or "" job_ids = False if job_reader: job_ids = self.update_job(cr, uid, ids, job_reader, context=context) # Do not raise error for job file because it's just a useful piece of data, but not more. # read the contract file contract_ids = False if contract_reader: contract_ids = self.update_contract(cr, uid, ids, contract_reader, context=context) # UF-2472: Read all lines to check employee's code before importing staff_data = [] staff_codes = [] duplicates = [] staff_seen = [] for line in staff_reader: staff_seen.append(line) data = self.read_employee_infos(cr, uid, line) processed += 1 if data: # to avoid False value in staff_data list staff_data.append(data) code = data[0] if code in staff_codes: duplicates.append(code) staff_codes.append(code) # Delete duplicates of… duplicates! duplicates = list(set(duplicates)) details = [] for employee_infos in staff_data: employee_code = employee_infos[0] if employee_code in duplicates: details.append(','.join([ustr(employee_infos[1]), ustr(employee_infos[2])])) res = True if not details: created = 0 processed = 0 updated = 0 # UF-2504 read staff file again for next enumeration # (because already read/looped above for staff codes) for i, employee_data in enumerate(staff_seen): update, nb_created, nb_updated = self.update_employee_infos( cr, uid, employee_data, wiz.id, i, registered_keys=registered_keys) if not update: res = False created += nb_created updated += nb_updated processed += 1 else: res = False message = _('Several employees have the same unique code: %s.') % (';'.join(details)) self.pool.get('hr.payroll.employee.import.errors').create(cr, uid, {'wizard_id': wiz.id, 'msg': message}) # Close Temporary File # Delete previous created lines for employee's contracts if contract_ids: self.pool.get('hr.contract.msf').unlink(cr, uid, contract_ids) for to_close in desc_to_close: to_close.close() if tmpdir: shutil.rmtree(tmpdir) del registered_keys if res: message = _("Employee import successful.") else: context.update({'employee_import_wizard_ids': ids}) context.update({'message': message}) view_id = self.pool.get('ir.model.data').get_object_reference(cr, uid, 'msf_homere_interface', 'payroll_import_confirmation') view_id = view_id and view_id[1] or False # This is to redirect to Employee Tree View context.update({'from': 'employee_import'}) res_id = self.pool.get('hr.payroll.import.confirmation').create(cr, uid, {'filename': filename, 'created': created, 'updated': updated, 'total': processed, 'state': 'employee'}, context) return { 'name': 'Employee Import Confirmation', 'type': 'ir.actions.act_window', 'res_model': 'hr.payroll.import.confirmation', 'view_mode': 'form', 'view_type': 'form', 'view_id': [view_id], 'res_id': res_id, 'target': 'new', 'context': context, }
# 2nd phase: store the metadata try: icont = '' mime = ira.file_type if not mime: mime = "" try: mime, icont = cntIndex.doIndex(data, ira.datas_fname, ira.file_type or None, fname) except Exception: _logger.debug('Cannot index file:', exc_info=True) pass try: icont_u = ustr(icont) except UnicodeError: icont_u = '' # a hack: /assume/ that the calling write operation will not try # to write the fname and size, and update them in the db concurrently. # We cannot use a write() here, because we are already in one. cr.execute('UPDATE ir_attachment SET store_fname = %s, file_size = %s, index_content = %s, file_type = %s WHERE id = %s', (store_fname, filesize, icont_u, mime, file_node.file_id)) file_node.content_length = filesize file_node.content_type = mime return True except Exception, e : self._logger.warning("Couldn't save data:", exc_info=True) # should we really rollback once we have written the actual data? # at the db case (only), that rollback would be safe