if OpenOffice_service: cr.execute( "SELECT id, state FROM ir_module_module WHERE name='report_aeroo_ooo'" ) helper_module = cr.dictfetchone() helper_installed = helper_module and helper_module[ 'state'] == 'installed' if OpenOffice_service and helper_installed: cr.execute("SELECT host, port FROM oo_config") host, port = cr.fetchone() try: OpenOffice_service(cr, host, port) netsvc.Logger().notifyChannel( 'report_aeroo', netsvc.LOG_INFO, "OpenOffice.org connection successfully established") except Exception, e: cr.rollback() netsvc.Logger().notifyChannel('report_aeroo', netsvc.LOG_WARNING, str(e)) ############################################## cr.execute( "SELECT * FROM ir_act_report_xml WHERE report_type = 'aeroo' and active = true ORDER BY id" ) # change for OpenERP 6.0 records = cr.dictfetchall() for record in records: parser = rml_parse if record['parser_state'] == 'loc' and record['parser_loc']: parser = self.load_from_file(record['parser_loc'], cr.dbname,
def log(self, message, level=netsvc.LOG_DEBUG): """ Helper method used print debug messages """ netsvc.Logger().notifyChannel('pxgo_openffice_reports', level, message)
def do_export(self, cr, uid, data, context): categ_new = 0 categ_update = 0 categ_fail = 0 logger = netsvc.Logger() self.pool = pooler.get_pool(cr.dbname) categ_pool = self.pool.get('product.category') mw_id = self.pool.get('magento.web').search(cr, uid, [('magento_flag', '=', True)]) mw = self.pool.get('magento.web').browse(cr, uid, mw_id[0]) (server, session) = mw.connect() #=============================================================================== # Getting ids #=============================================================================== if data['model'] == 'ir.ui.menu': categ_ids = categ_pool.search(cr, uid, [('exportable', '=', True)]) else: categ_ids=[] categ_not=[] for id in data['ids']: exportable_category = categ_pool.search(cr, uid, [('id', '=', id), ('exportable', '=', True)]) if len(exportable_category) == 1: categ_ids.append(exportable_category[0]) else: categ_not.append(id) if len(categ_not) > 0: raise wizard.except_wizard("Error", "you asked to export non-exportable categories : IDs %s" % categ_not) #=============================================================================== # Category packaging #=============================================================================== categories = categ_pool.browse(cr, uid, categ_ids, context=context) categories.sort(lambda x, y : (int(x.parent_id) or 0) - int(y.parent_id)) for category in categories : path='' #construct path magento_parent_id=1 #root catalog if(type(category.parent_id.id) == (int)): #if not root category last_parent=categ_pool.browse(cr, uid, category.parent_id.id) magento_parent_id=last_parent.magento_id path= str(last_parent.magento_id) while(type(last_parent.parent_id.id) == (int)): last_parent=categ_pool.browse(cr, uid, last_parent.parent_id.id) path=str(last_parent.magento_id)+'/'+path path='1/'+path path=path.replace("//","/") if path.endswith('/'): path=path[0:-1] category_data = { 'name' : category.name, 'path' : path, 'is_active' : 1, } #=============================================================================== # Category upload to Magento #=============================================================================== try: if(category.magento_id == 0): new_id = server.call(session,'category.create', [magento_parent_id, category_data]) categ_pool.write_magento_id(cr, uid, category.id, {'magento_id': new_id}) logger.notifyChannel("Magento Export", netsvc.LOG_INFO, " Successfully created category with OpenERP id %s and Magento id %s" % (category.id, new_id)) categ_new += 1 else: server.call(session,'category.update',[category_magento_id, category_data]) logger.notifyChannel("Magento Export", netsvc.LOG_INFO, " Successfully updated category with OpenERP id %s and Magento id %s" % (category.id, category.magento_id)) categ_update += 1 except xmlrpclib.Fault, error: if error.faultCode == 102: #turns out that the category doesn't exist in Magento (might have been deleted), try to create a new one. try: new_id = server.call(session,'category.create', [magento_parent_id, category_data]) categ_pool.write_magento_id(cr, uid, category.id, {'magento_id': new_id}) logger.notifyChannel("Magento Export", netsvc.LOG_INFO, " Successfully created category with OpenERP id %s and Magento id %s" % (category.id, new_id)) categ_new += 1 except xmlrpclib.Fault, error: logger.notifyChannel("Magento Export", netsvc.LOG_ERROR, "Magento API return an error on category id %s . Error %s" % (category.id, error)) categ_fail += 1 else: logger.notifyChannel("Magento Export", netsvc.LOG_ERROR, "Magento API return an error on category id %s . Error %s" % (category.id, error)) categ_fail += 1
# along with this program. If not, see <http://www.gnu.org/licenses/>. # ############################################################################## import xmlrpclib import socket import os import time import base64 from osv import fields,osv import tools import netsvc from tools.translate import _ logger = netsvc.Logger() def execute(connector, method, *args): res = False try: res = getattr(connector,method)(*args) except socket.error,e: raise e return res addons_path = tools.config['addons_path'] + '/auto_backup/DBbackups' class db_backup(osv.osv): _name = 'db.backup' def get_db_list(self, cr, user, ids, host='localhost', port='8069', context={}):
def __init__(self, name): """ constructor """ self.logger = netsvc.Logger() super(indicator_account_detail, self).__init__(name)
import netsvc import os from osv import osv, fields import pooler import re import time from tools.translate import _ from tools.config import config import tools from oorq.decorators import job import six LOGGER = netsvc.Logger() class PowersmsSMSbox(osv.osv): _name = "powersms.smsbox" _description = 'Power SMS SMSbox included all type inbox,outbox,junk..' _rec_name = "reference" _order = "date_sms desc" callbacks = { 'create': 'powersms_create_callback', 'write': 'powersms_write_callback', 'unlink': 'powersms_unlink_callback', } def powersms_callback(self, cursor, uid, ids,
def log(msg, level=netsvc.LOG_INFO): logger = netsvc.Logger() logger.notifyChannel('async_reports', level, msg)
def sync_images(self, cr, uid, ids, context): logger = netsvc.Logger() shop_ids = self.pool.get('sale.shop').search(cr, uid, []) for inst in self.browse(cr, uid, ids, context): shop_groups = inst.shop_group_ids for shop_group in shop_groups: shops = shop_group.shop_ids for shop in shops: conn = self.external_connection(cr, uid, inst) for product in shop.exportable_product_ids: try: img_list = conn.call( 'catalog_product_attribute_media.list', [product.magento_sku]) except Exception, e: self.log( cr, uid, product.id, "failed to find product with sku %s for product id %s in Magento!" % ( product.magento_sku, product.id, )) logger.notifyChannel( 'ext synchro', netsvc.LOG_DEBUG, "failed to find product with sku %s for product id %s in Magento!" % ( product.magento_sku, product.id, )) continue logger.notifyChannel( 'ext synchro', netsvc.LOG_INFO, "Magento image for SKU %s: %s" % (product.magento_sku, img_list)) for image in img_list: data = { 'name': image['label'] or os.path.splitext( os.path.split(image['file'])[1])[0], 'link': True, 'filename': image['url'], 'product_id': product.id, 'base_image': image['types'].count('image') == 1, 'small_image': image['types'].count('small_image') == 1, 'thumbnail': image['types'].count('thumbnail') == 1, 'exclude': image['exclude'], 'position': image['position'] } image_ext_name_obj = self.pool.get( 'product.images.external.name') image_ext_name_id = image_ext_name_obj.search( cr, uid, [('name', '=', image['file']), ('external_referential_id', '=', inst.id)], context=context) if image_ext_name_id: # update existing image # find the correspondent product image from the external name image_ext_name = image_ext_name_obj.read( cr, uid, image_ext_name_id, [], context=context) if self.pool.get('product.images').search( cr, uid, [('id', '=', image_ext_name[0]['id'])], context=context): self.pool.get('product.images').write( cr, uid, image_ext_name[0]['id'], data, context=context) image_ext_name_obj.write( cr, uid, image_ext_name_id, {'name': image['file']}, context=context) else: self.log( cr, uid, product.id, "failed to find product image with id %s for product id %s in OpenERP!" % ( image_ext_name_id, product.id, )) logger.notifyChannel( 'ext synchro', netsvc.LOG_DEBUG, "failed to find product image with id %s for product id %s in OpenERP!" % ( image_ext_name_id, product.id, )) continue else: # create new image new_image_id = self.pool.get( 'product.images').create(cr, uid, data, context=context) image_ext_name_obj.create( cr, uid, { 'name': image['file'], 'external_referential_id': inst.id, 'image_id': new_image_id }, context=context)
class res_config_configurable(osv.osv_memory): ''' Base classes for new-style configuration items Configuration items should inherit from this class, implement the execute method (and optionally the cancel one) and have their view inherit from the related res_config_view_base view. ''' _name = 'res.config' logger = netsvc.Logger() def get_current_progress(self, cr, uid, context=None): '''Return a description the current progress of configuration: a tuple of (non_open_todos:int, total_todos: int) ''' return (self.pool.get('ir.actions.todo')\ .search_count(cr, uid, [('state','<>','open')], context), self.pool.get('ir.actions.todo')\ .search_count(cr, uid, [], context)) def _progress(self, cr, uid, context=None): closed, total = self.get_current_progress(cr, uid, context=context) if total: return round(closed * 100. / total) return 100. def _get_image(self, cr, uid, context=None): file_no = str(random.randint(1, 3)) path = os.path.join('base', 'res', 'config_pixmaps/%s.png' % file_no) file_data = tools.file_open(path, 'rb').read() return base64.encodestring(file_data) _columns = dict( progress=fields.float('Configuration Progress', readonly=True), config_logo=fields.binary('Image', readonly=True), ) _defaults = dict(progress=_progress, config_logo=_get_image) def _next_action(self, cr, uid): todos = self.pool.get('ir.actions.todo') self.logger.notifyChannel('actions', netsvc.LOG_INFO, 'getting next %s' % todos) active_todos = todos.search(cr, uid, [('state', '=', 'open')], limit=1) dont_skip_todo = True if active_todos: todo_obj = todos.browse(cr, uid, active_todos[0], context=None) todo_groups = map(lambda x: x.id, todo_obj.groups_id) if todo_groups: cr.execute( "select 1 from res_groups_users_rel where uid=%s and gid IN %s", ( uid, tuple(todo_groups), )) dont_skip_todo = bool(cr.fetchone()) if dont_skip_todo: return todos.browse(cr, uid, active_todos[0], context=None) else: todos.write(cr, uid, active_todos[0], {'state': 'skip'}, context=None) return self._next_action(cr, uid) return None def _set_previous_todo(self, cr, uid, state): """ lookup the previous (which is still the next at this point) ir.actions.todo, set it to whatever state was provided. Raises `LookupError`: if we couldn't find *any* previous todo `ValueError`: if no state is provided anything ir_actions_todo.write can throw """ # this is ultra brittle, but apart from storing the todo id # into the res.config view, I'm not sure how to get the # "previous" todo previous_todo = self._next_action(cr, uid) if not previous_todo: raise LookupError(_("Couldn't find previous ir.actions.todo")) if not state: raise ValueError( _("Can't set an ir.actions.todo's state to " "nothingness")) previous_todo.write({'state': state}) def _next(self, cr, uid): self.logger.notifyChannel('actions', netsvc.LOG_INFO, 'getting next operation') next = self._next_action(cr, uid) self.logger.notifyChannel('actions', netsvc.LOG_INFO, 'next action is %s' % next) if next: action = next.action_id return { 'view_mode': action.view_mode, 'view_type': action.view_type, 'view_id': action.view_id and [action.view_id.id] or False, 'res_model': action.res_model, 'type': action.type, 'target': action.target, } self.logger.notifyChannel( 'actions', netsvc.LOG_INFO, 'all configuration actions have been executed') current_user_menu = self.pool.get('res.users')\ .browse(cr, uid, uid).menu_id # return the action associated with the menu return self.pool.get(current_user_menu.type)\ .read(cr, uid, current_user_menu.id) def start(self, cr, uid, ids, context=None): print 'Start' ids2 = self.pool.get('ir.actions.todo').search(cr, uid, [], context=context) for todo in self.pool.get('ir.actions.todo').browse(cr, uid, ids2, context=context): if (todo.restart == 'always') or (todo.restart == 'onskip' and (todo.state in ('skip', 'cancel'))): todo.write({'state': 'open'}) return self.next(cr, uid, ids, context) def next(self, cr, uid, ids, context=None): """ Returns the next todo action to execute (using the default sort order) """ return self._next(cr, uid) def execute(self, cr, uid, ids, context=None): """ Method called when the user clicks on the ``Next`` button. Execute *must* be overloaded unless ``action_next`` is overloaded (which is something you generally don't need to do). If ``execute`` returns an action dictionary, that action is executed rather than just going to the next configuration item. """ raise NotImplementedError( 'Configuration items need to implement execute') def cancel(self, cr, uid, ids, context=None): """ Method called when the user click on the ``Skip`` button. ``cancel`` should be overloaded instead of ``action_skip``. As with ``execute``, if it returns an action dictionary that action is executed in stead of the default (going to the next configuration item) The default implementation is a NOOP. ``cancel`` is also called by the default implementation of ``action_cancel``. """ pass def action_next(self, cr, uid, ids, context=None): """ Action handler for the ``next`` event. Sets the status of the todo the event was sent from to ``done``, calls ``execute`` and -- unless ``execute`` returned an action dictionary -- executes the action provided by calling ``next``. """ self._set_previous_todo(cr, uid, state='done') next = self.execute(cr, uid, ids, context=None) if next: return next return self.next(cr, uid, ids, context=context) def action_skip(self, cr, uid, ids, context=None): """ Action handler for the ``skip`` event. Sets the status of the todo the event was sent from to ``skip``, calls ``cancel`` and -- unless ``cancel`` returned an action dictionary -- executes the action provided by calling ``next``. """ self._set_previous_todo(cr, uid, state='skip') next = self.cancel(cr, uid, ids, context=None) if next: return next return self.next(cr, uid, ids, context=context) def action_cancel(self, cr, uid, ids, context=None): """ Action handler for the ``cancel`` event. That event isn't generated by the res.config.view.base inheritable view, the inherited view has to overload one of the buttons (or add one more). Sets the status of the todo the event was sent from to ``cancel``, calls ``cancel`` and -- unless ``cancel`` returned an action dictionary -- executes the action provided by calling ``next``. """ self._set_previous_todo(cr, uid, state='cancel') next = self.cancel(cr, uid, ids, context=None) if next: return next return self.next(cr, uid, ids, context=context)
def action_do_import(self, cr, uid, ids, context=None): if context is None: context = {} obj_model = self.pool.get('ir.model.data') obj_worksheet = self.pool.get('google.worksheet') obj_mapping = self.pool.get('google.worksheet.fields.mapping') obj_cells = self.pool.get('google.worksheet.cells') obj_rows = self.pool.get('google.worksheet.rows') wizard = self.browse(cr, uid, ids[0], context=context) obj = self.pool.get(wizard.model_id.model) ws_id = wizard.worksheet_id.id rows_created = [] rows_error = [] rows_ignored = [] err = '' msg = '' for row in wizard.import_data: netsvc.Logger().notifyChannel( "action_do_import", netsvc.LOG_INFO, "Row:%s (id: %s) \n" % (row.row, row.id)) if row.row < wizard.row_data: rows_ignored.append(row.id) msg += "Row:%s (id:%s) Ignored less than %s \n" % ( row.row, row.id, wizard.row_data) netsvc.Logger().notifyChannel( "action_do_import", netsvc.LOG_INFO, "Row:%s (id:%s) Ignored less than %s \n" % (row.row, row.id, wizard.row_data)) continue if row.state == 'done': rows_ignored.append(row.id) msg += "Row:%s (id:%s) Ignored state:%s \n" % (row.row, row.id, row.state) netsvc.Logger().notifyChannel( "action_do_import", netsvc.LOG_INFO, "Row:%s (id:%s) Ignored state:%s \n" % (row.row, row.id, row.state)) continue cell_ids = [] vals = {} for cell in row.cell_ids: mapping_ids = obj_mapping.search( cr, uid, [('worksheet_id', '=', cell.worksheet_id.id), ('worksheet_field_id', '=', cell.col_id.id)], context=context) if not mapping_ids: continue mapping_id = mapping_ids[0] mapping = obj_mapping.browse(cr, uid, mapping_id, context=context) cell_id = cell.id vals[mapping.model_field_id.name] = obj_cells.get_map_value( cr, uid, cell_id, mapping_id) cell_ids.append(cell_id) if not vals: rows_ignored.append(row.id) msg += "Row:%s (id:%s) Ignored there aren't values %s \n" % ( row.row, row.id, vals) netsvc.Logger().notifyChannel( "action_do_import", netsvc.LOG_INFO, "Row:%s (id:%s) Ignored there aren't values %s \n" % (row.row, row.id, vals)) continue try: obj_id = obj.create(cr, uid, vals, context=context) rows_created.append(obj_id) msg += "Row:%s (id:%s) Created: %s(id:%s) \n" % ( row.row, row.id, wizard.model_id.name, obj_id) netsvc.Logger().notifyChannel( "action_do_import", netsvc.LOG_INFO, "Row:%s (id:%s) Created: %s(id:%s) \n" % (row.row, row.id, wizard.model_id.name, obj_id)) obj_rows.write(cr, uid, [row.id], { 'state': 'done', 'model_row_id': obj_id }, context=context) obj_cells.write(cr, uid, cell_ids, {'state': 'done'}, context=context) except Exception, e: netsvc.Logger().notifyChannel( "action_do_import", netsvc.LOG_INFO, "Error at create te Row:%s (id:%s) Vals: %s - Error:%s \n" % (row.row, row.id, vals, e)) raise osv.except_osv( 'Error', 'model:%s - row: %s \n vals: %s \n Error: %s' % (wizard.model_id.model, row.row, vals, e)) err += "Error at create te Row:%s (id:%s) Vals: %s - Error:%s \n" % ( row.row, row.id, vals, e) rows_error.append(row.id) obj_rows.write(cr, uid, [row.id], { 'vals: %s \n error': (vals, e), 'state': 'error' }, context=context) obj_cells.write(cr, uid, cell_ids, {'state': 'error'}, context=context)
def onchange_partner_id(self, cr, uid, ids, part, email=False): ret = super(crm_lead, self).onchange_partner_id(cr, uid, ids, part, email=email) logger = netsvc.Logger() #logger.notifyChannel('Productivity/CRM - onchanger_partner_id', netsvc.LOG_INFO, " ret="+str(ret) ) part_addr = [] cnt_jobs = [] contacts = [] if ret['value']['partner_address_id']: partner = self.pool.get('res.partner').browse(cr, uid, part, context={}) if partner: for pa in partner.address: part_addr.append(pa.id) for job in pa.job_ids: if job.id not in cnt_jobs: cnt_jobs.append(job.id) if job.contact_id and (job.contact_id.id not in contacts): contacts.append(job.contact_id.id) ret['value']['partner_addresses'] = part_addr ret['value']['contacts_jobs'] = cnt_jobs ret['value']['sel_contact_id'] = False contact = { 'partner_name': False, 'title': False, 'function': False, 'street': False, 'street2': False, 'zip': False, 'city': False, 'country_id': False, 'state_id': False, } #logger.notifyChannel('Productivity/CRM - onchanger_partner_id', netsvc.LOG_INFO, " contacts="+str(contacts) ) if len(contacts) == 1: contact_id = contacts[0] ret['value']['sel_contact_id'] = contact_id cr.execute( "Select min(sequence_partner), address_id from res_partner_job " + \ "where address_id in (" + ','.join( map( lambda x: str(x), part_addr ) ) + ") " + \ "and contact_id=" + str( contact_id ) + \ " group by address_id" ) row = cr.fetchone() if row and row[1]: addr_obj = self.pool.get('res.partner.address').read( cr, uid, row[1]) if addr_obj: for fld in contact: if (fld in addr_obj) and addr_obj[fld]: contact[fld] = addr_obj[fld] contacts_names = self.pool.get('res.partner.contact').name_get( cr, uid, [contact_id]) contact['partner_name'] = contacts_names[0][1] for fld in contact: ret['value'][fld] = contact[fld] return ret
def get_map_value(self, cr, uid, cell_id, mapping_id, context=None): if context is None: context = {} obj_mapping = self.pool.get('google.worksheet.fields.mapping') cell = self.browse(cr, uid, cell_id, context=context) mapping = obj_mapping.browse(cr, uid, mapping_id, context=context) val = cell.content try: if mapping.model_field_id.ttype in ('float'): val = float(val) if mapping.model_field_id.ttype in ('boolean'): if val.upper() == 'FALSE': val = False elif val.upper() == 'TRUE': val = True elif val.isdigit(): val = int(val) val = bool(val) if mapping.model_field_id.ttype in ('integer'): val = int(val) if mapping.model_field_id.ttype in ('many2one'): if val.isdigit(): val = int(val) else: obj = self.pool.get(mapping.model_field_id.relation) obj_ids = obj.name_search(cr, uid, name=val) if obj_ids: val = obj_ids[0][0] else: netsvc.Logger().notifyChannel( "get_map_value", netsvc.LOG_INFO, "cell_id:%s - content: %s - val: %s \n No Values - relation: %s" % (cell_id, cell.content, val, obj_ids, mapping.model_field_id.relation)) self.write(cr, uid, cell_id, { 'error': 'No Values - relation: %s - val: %s' % (mapping.model_field_id.relation, val) }, context=context) if len(obj_ids) > 1: netsvc.Logger().notifyChannel( "get_map_value", netsvc.LOG_INFO, "cell_id:%s - content: %s - val: %s \n Multipe Values: %s - relation: %s" % (cell_id, cell.content, val, obj_ids, mapping.model_field_id.relation)) self.write( cr, uid, cell_id, { 'error': 'Multiple Values: %s - relation: %s' % (obj_ids, mapping.model_field_id.relation) }, context=context) except Exception, e: netsvc.Logger().notifyChannel( "get_map_value", netsvc.LOG_INFO, 'cell_id:%s - name: %s - content: %s - val: %s \n Error: %s' % (cell_id, cell.name, cell.content, val, e)) raise osv.except_osv( 'Error', 'cell_id:%s - name: %s - content: %s - val: %s \n Error: %s' % (cell_id, cell.name, cell.content, val, e)) self.write(cr, uid, cell_id, { 'state': 'error', 'error': e }, context=context)
def create_single_pdf(self, cursor, uid, ids, data, report_xml, context=None): """generate the PDF""" if context is None: context = {} if report_xml.report_type != 'webkit': return super(WebKitParser, self).create_single_pdf(cursor, uid, ids, data, report_xml, context=context) self.parser_instance = self.parser(cursor, uid, self.name2, context=context) self.pool = pooler.get_pool(cursor.dbname) objs = self.getObjects(cursor, uid, ids, context) self.parser_instance.set_context(objs, data, ids, report_xml.report_type) template = False if report_xml.report_file: path = addons.get_module_resource(report_xml.report_file) if os.path.exists(path): template = file(path).read() if not template and report_xml.report_webkit_data: template = report_xml.report_webkit_data if not template: raise except_osv(_('Error!'), _('Webkit Report template not found !')) header = report_xml.webkit_header.html footer = report_xml.webkit_header.footer_html if not header and report_xml.header: raise except_osv(_('No header defined for this Webkit report!'), _('Please set a header in company settings')) if not report_xml.header: #I know it could be cleaner ... header = u""" <html> <head> <style type="text/css"> ${css} </style> <script> function subst() { var vars={}; var x=document.location.search.substring(1).split('&'); for(var i in x) {var z=x[i].split('=',2);vars[z[0]] = unescape(z[1]);} var x=['frompage','topage','page','webpage','section','subsection','subsubsection']; for(var i in x) { var y = document.getElementsByClassName(x[i]); for(var j=0; j<y.length; ++j) y[j].textContent = vars[x[i]]; } } </script> </head> <body style="border:0; margin: 0;" onload="subst()"> </body> </html>""" css = report_xml.webkit_header.css if not css: css = '' user = self.pool.get('res.users').browse(cursor, uid, uid) company = user.company_id #default_filters=['unicode', 'entity'] can be used to set global filter body_mako_tpl = Template(template, input_encoding='utf-8') helper = WebKitHelper(cursor, uid, report_xml.id, context) try: html = body_mako_tpl.render(helper=helper, css=css, _=self.translate_call, **self.parser_instance.localcontext) except Exception, e: msg = exceptions.text_error_template().render() netsvc.Logger().notifyChannel('Webkit render', netsvc.LOG_ERROR, msg) raise except_osv(_('Webkit render'), msg)
foot = False if footer: foot_mako_tpl = Template(footer, input_encoding='utf-8') try: foot = foot_mako_tpl.render( company=company, time=time, helper=helper, css=css, formatLang=self.formatLang, setLang=self.setLang, _=self.translate_call, ) except: msg = exceptions.text_error_template().render() netsvc.Logger().notifyChannel('Webkit render', netsvc.LOG_ERROR, msg) raise except_osv(_('Webkit render'), msg) if report_xml.webkit_debug: try: deb = head_mako_tpl.render( company=company, time=time, helper=helper, css=css, _debug=html, formatLang=self.formatLang, setLang=self.setLang, _=self.translate_call, ) except Exception, e: msg = exceptions.text_error_template().render()
def _do_create_saleorder(self, cr, uid, data, context): today = datetime.datetime.today() order_cr = pooler.get_pool(cr.dbname).get('sale.order') partner_cr = pooler.get_pool(cr.dbname).get('res.partner') address_cr = pooler.get_pool(cr.dbname).get('res.partner.address') product_cr = pooler.get_pool(cr.dbname).get('product.product') sale_order_cr = pooler.get_pool(cr.dbname).get('sale.order') line_cr = pooler.get_pool(cr.dbname).get('sale.order.line') tax_cr = pooler.get_pool(cr.dbname).get('sale.order.tax') import_order_cr = pooler.get_pool(cr.dbname).get('ica.import_order') logger = netsvc.Logger() ord_nr = [] for jid in data['ids']: # Om det är flera markerade dagjournaler journal = pooler.get_pool(cr.dbname).get('ica.mrpjournal').read( cr, uid, jid) # raise osv.except_osv(_('Journal'), _('Data\n%s') % (journal)) # Just a search to check order_ids is growing print journal if journal['status'] == 'i': # Journal ready for create saleorder import_order_ids = import_order_cr.search( cr, uid, [("status", "=", "n"), ("mrpjournal_id", "=", jid)]) print "import_order_ids", import_order_ids if import_order_ids: for order_record in import_order_cr.read( cr, uid, import_order_ids): order = json.loads(order_record['blob']) # Partner / shop #partner_id = sock.execute(dbname, uid, pwd, 'res.partner', 'search',[("shop_iln","=",order['HEA']['EANSHOP'])]) # Use EANSHOP as key in partner #if not partner_id: # partner_id = sock.execute(dbname, uid, pwd, 'res.partner', 'create', { 'consignee_iln': order['HEA']['EANCONSIGNEE'], 'shop_iln': order['HEA']['EANSHOP'], 'customernumber': order['HEA']['CUSTOMERNUMBER'], 'name': 'Saknad kund %s' % order['HEA']['EANSHOP']}) # partner_id = sock.execute(dbname, uid, pwd, 'res.partner', 'search',[("shop_iln","=",order['HEA']['EANSHOP'])]) # Use EANSHOP as key in partner #partner = sock.execute(dbname, uid, pwd, 'res.partner', 'read', partner_id[0]) #addr = {} #for addr_rec in sock.execute(dbname, uid, pwd, 'res.partner.address', 'read', partner['address'] ): # addr[addr_rec['type']] = addr_rec #adress_order = addr.get('default',{'id': 1}) #adress_invoice = addr.get('invoice',addr.get('default',{'id': 1})) #adress_shipping = addr.get('delivery',addr.get('default',{'id': 1})) # Partner / shop partner_id = partner_cr.search(cr, uid, [ ("shop_iln", "=", order['HEA']['EANDELIVERY']) ]) # Use EANDELIVERY as key in partner if not partner_id: partner_id = partner_cr.create( cr, uid, { 'consignee_iln': order['HEA']['EANCONSIGNEE'], 'shop_iln': order['HEA']['EANDELIVERY'], 'customernumber': order['HEA']['CUSTOMERNUMBER'], 'name': 'Saknad kund %s' % order['HEA']['EANDELIVERY'] }) partner_id = partner_cr.search( cr, uid, [("shop_iln", "=", order['HEA']['EANDELIVERY']) ]) # Use EANDELIVERY as key in partner partner = partner_cr.read(cr, uid, partner_id) print "partner ", partner print "address", partner[0]['address'] addr = {} for addr_rec in address_cr.read( cr, uid, partner[0]['address']): addr[addr_rec['type']] = addr_rec adress_order = addr.get('default', {'id': 1}) adress_invoice = addr.get( 'invoice', addr.get('default', {'id': 1})) adress_shipping = addr.get( 'delivery', addr.get('default', {'id': 1})) # Buyer / ICA buyer_id = partner_cr.search(cr, uid, [ ("shop_iln", "=", order['HEA']['EANBUYER']) ]) # Use EANSHOP as key in partner if not buyer_id: buyer_id = partner_cr.create( cr, uid, { 'consignee_iln': order['HEA']['EANCONSIGNEE'], 'shop_iln': order['HEA']['EANBUYER'], 'customernumber': order['HEA']['CUSTOMERNUMBER'], 'name': 'Saknad kund %s' % order['HEA']['EANBUYER'] }) buyer_id = partner_cr.search( cr, uid, [ ("shop_iln", "=", order['HEA']['EANBUYER']) ]) # Use EANSHOP as key in partner buyer = partner_cr.read(cr, uid, buyer_id) # Sender / ICA sender_id = partner_cr.search(cr, uid, [ ("shop_iln", "=", order['HEA']['EANSENDER']) ]) # Use EANSHOP as key in partner if not sender_id: sender_id = partner_cr.create( cr, uid, { 'consignee_iln': order['HEA']['EANCONSIGNEE'], 'shop_iln': order['HEA']['EANSENDER'], 'customernumber': order['HEA']['CUSTOMERNUMBER'], 'name': 'Saknad kund %s' % order['HEA']['EANSENDER'] }) sender_id = partner_cr.search( cr, uid, [("shop_iln", "=", order['HEA']['EANSENDER']) ]) # Use EANSHOP as key in partner sender = partner_cr.read(cr, uid, sender_id) # Create sale.order when mrpjournal are marked with state = s for orders in that journal if order_record['saleorder_id'] == 0: print "Skapa sale.order", partner[0]['id'] ord_nr.append(order['HEA']['ORDERNUMBER']) # print mrpjournal, order, "Saleorder" saleorder_id = sale_order_cr.create( cr, uid, { 'partner_id': buyer[0]['id'], 'ica_status': 'u', 'client_order_ref': order['HEA']['ORDERNUMBER'], 'origin': order['HEA']['ORDERNUMBER'], 'date_order': order['HEA']['ORDERDATE'], 'date_requested': order['HEA']['DELDATESTORE'], 'date_promised': order['HEA']['DELDATELE'], 'date_delfromica': order['HEA']['SHIPDATEFROMLE'], 'partner_order_id': adress_order['id'], 'partner_invoice_id': adress_invoice['id'], 'partner_shipping_id': adress_shipping['id'], 'pricelist_id': partner[0]['property_product_pricelist'] [0], 'ica_mrpjournal': journal['id'], 'eansender': order['HEA']['EANSENDER'], 'eanreceiver': order['HEA']['EANRECEIVER'], 'eandelivery': order['HEA']['EANDELIVERY'], 'eanconsignee': order['HEA']['EANCONSIGNEE'], 'eanshop': order['HEA']['EANSHOP'], 'eanbuyer': order['HEA']['EANBUYER'], 'eansupplier': order['HEA']['EANSUPPLIER'], 'customernumber': order['HEA']['CUSTOMERNUMBER'], # Retrieve freigh information #"FREIGHTLABEL1": "03/05/527/02/005/011", 'utlevomr': order['HEA']['FREIGHTLABEL1'].split( '/')[0], 'port': order['HEA']['FREIGHTLABEL1'].split( '/')[1], 'lass': order['HEA']['FREIGHTLABEL1'].split( '/')[2], 'pl': order['HEA']['FREIGHTLABEL1'].split( '/')[3], 'ruta1': order['HEA']['FREIGHTLABEL1'].split( '/')[4], 'ruta2': order['HEA']['FREIGHTLABEL1'].split( '/')[5], 'state': 'progress', }) # saleorder = sock.execute(dbname, uid, pwd, 'sale.order', 'read', saleorder_id) # print "Saleorder_id ", saleorder_id, order['HEA']['LIN'] for line in order['HEA']['LIN']: # Product product_id = product_cr.search( cr, uid, [("ean14", "=", line['EAN']) ]) # Use EAN as key in product if not product_id: # Create missing products logger.notifyChannel( "do_create_saleorder", netsvc.LOG_WARNING, "Missing product '%s' '%s' ." % (line['SU_ARTICLECODE'], line['EAN'])) cr.rollback() raise osv.except_osv( _('Saknad produkt'), _('%s %s') % (line['SU_ARTICLECODE'], line['EAN'])) return {} #partner_id = product_cr.create(cr, uid, {'su_articlecode': line['SU_ARTICLECODE'], 'ean14': line['EAN'], 'name': 'Saknad produkt %s' % line['SU_ARTICLECODE'], 'categ_id': 1,}) #product_id = product_cr.search(cr, uid, [("ean14","=",line['EAN'])]) product = product_cr.read( cr, uid, product_id[0]) print product_id, product tax_id = int(product['taxes_id'][0]) print "tax_id", tax_id saleorderline_id = line_cr.create( cr, uid, { # 'address_allotment_id': , 'delay': 0.0, 'discount': 0.0, 'invoiced': 0, # 'invoice_lines': fields.many2many('account.invoice.line', 'sale_order_line_invoice_rel', 'order_line_id', 'invoice_id', 'Invoice Lines', readonly=True), # 'move_ids': fields.one2many('stock.move', 'sale_line_id', 'Inventory Moves', readonly=True), 'name': product['name'], 'notes': '', # 'number_packages': fields.function(_number_packages, method=True, type='integer', string='Number Packages'), 'order_id': saleorder_id, # 'order_partner_id': fields.related('order_id', 'partner_id', type='many2one', relation='res.partner', string='Customer') # 'price_net': fields.function(_amount_line_net, method=True, string='Net Price', digits=(16, int(config['price_accuracy']))), # 'price_subtotal': fields.function(_amount_line, method=True, string='Subtotal', digits=(16, int(config['price_accuracy']))), # 'price_unit': product['uos_id'], 'price_unit': product['list_price'], # 'procurement_id': fields.many2one('mrp.procurement', 'Procurement'), 'product_id': product['id'], 'product_packaging': 0, # 'product_uom': product['list_price'], 'product_uom': product['uom_id'][0], 'product_uom_qty': line['QTY_ORDERED'], 'product_uos': 1, 'product_uos_qty': 1, # 'property_ids': fields.many2many('mrp.property', 'sale_order_line_property_rel', 'order_id', 'property_id', 'Properties', readonly=True, states={'draft':[('readonly',False)]}), 'sequence': 10, 'state': 'confirmed', 'th_weight': 0, 'type': 'make_to_stock', 'tax_id': product['taxes_id'], }) print "Sale order line id", saleorderline_id for tax in product['taxes_id']: # saleordertax_id = tax_cr.create(cr, uid, {'order_line_id': saleorderline_ids, 'tax_id': int(tax)}) cr.execute( 'INSERT INTO sale_order_tax ("order_line_id", "tax_id", "create_uid", "create_date") VALUES (' + str(saleorderline_id) + ',' + str(tax) + ', ' + str(uid) + ', ' + today.strftime("'%Y-%m-%d %H:%M'") + ' )') # Mark import_order imported / Confirmed # Knyt ica.import_order till mrpjournal # Create tax-lines #print sale_order_cr.amount_tax(cr,uid,saleorder_id) order_marked = import_order_cr.write( cr, uid, order_record['id'], { 'status': 'c', 'saleorder_id': saleorder_id, }) pooler.get_pool( cr.dbname).get('ica.mrpjournal').write( cr, uid, jid, { 'saleorder_imported': today.strftime('%Y-%m-%d %H:%M'), 'status': 's' }) else: pooler.get_pool(cr.dbname).get('ica.mrpjournal').write( cr, uid, jid, { 'saleorder_imported': today.strftime('%Y-%m-%d %H:%M'), 'status': 's' }) cr.commit() doubles = check_ordernumber(cr, uid, ord_nr) # Kontrollerar orderdubletter if len(doubles) > 0: logger.notifyChannel("warning", netsvc.LOG_WARNING, "Orderdubletter '%s' ." % (doubles)) raise osv.except_osv( _('Dublettorder'), _('Dessa ordernumber (Client ref) finns redan\n%s') % (doubles)) # Kontroll order_requested före dagens datum, meddela ordernummer för manuell kontroll return {}
def magento_export(self, cr, uid, prod_ids, context): #=============================================================================== # Init #=============================================================================== prod_new = 0 prod_update = 0 prod_fail = 0 logger = netsvc.Logger() mw_id = self.pool.get('magento.web').search(cr, uid, [('magento_flag', '=', True)]) mw = self.pool.get('magento.web').browse(cr, uid, mw_id[0]) (server, session) = mw.connect() #=============================================================================== # Product packaging #=============================================================================== #Getting the set attribute #TODO: customize this code in order to pass custom attribute sets (configurable products), possibly per product sets = server.call(session, 'product_attribute_set.list') for set in sets: if set['name'] == 'Default': attr_set_id = set['set_id'] else : attr_set_id = 1 #=============================================================================== # Product pricelists #=============================================================================== pricelist_obj = self.pool.get('product.pricelist') pl_default_id = pricelist_obj.search(cr, uid, [('magento_default', '=', True)]) if len(pl_default_id) != 1: raise osv.except_osv(_("User Error"), _("You have not set any default pricelist to compute the Magento general prices (the standard prices of each product)")) pl_other_ids = pricelist_obj.search(cr, uid, [('magento_id', '<>', 0)]) # splitting the prod_ids array in subarrays to avoid memory leaks in case of massive upload. Hint by Gunter Kreck import math l = 200 f = lambda v, l: [v[i * l:(i + 1) * l] for i in range(int(math.ceil(len(v) / float(l))))] split_prod_id_arrays = f(prod_ids, l) for prod_ids in split_prod_id_arrays: for product in self.browse(cr, uid, prod_ids, context=context): #Getting Magento categories category_tab = {'0':1} key = 1 last_category = product.categ_id while(type(last_category.parent_id.id) == (int)): category_tab[str(key)] = last_category.magento_id last_category = self.pool.get('product.category').browse(cr, uid, last_category.parent_id.id) key += 1 sku = (product.code or "mag") + "_" + str(product.id) # Product data product_data = { 'name': product.name, 'price' : pricelist_obj.price_get(cr, uid, pl_default_id, product.id, 1.0)[pl_default_id[0]], 'weight': (product.weight_net or 0), 'category_ids': category_tab, 'description' : (product.description or _("description")), 'short_description' : (product.description_sale or _("short description")), 'websites':['base'], 'tax_class_id': product.magento_tax_class_id or 2, 'status': product.active and 1 or 2, 'meta_title': product.name, 'meta_keyword': product.name, 'meta_description': product.description_sale and product.description_sale[:255], } # Stock data stock_data = { 'qty': product.virtual_available, 'is_in_stock': product.virtual_available, } # Pricelist data (tier prices) prices_data = [] for pl_id, price in pricelist_obj.price_get(cr, uid, pl_other_ids, product.id, 1.0).iteritems(): pl = pricelist_obj.browse(cr, uid, pl_id, context=context) prices_data.append({'website': 'all', 'customer_group_id': pl.magento_id, 'qty': 1, 'price': price}) # Image data image_name = '' image_data = {} if product.image: image_data = { 'file': {'content': product.image, 'mime': 'image/jpeg'}, 'label': product.image_label or product.name, #'position': 0, 'types': ['image', 'small_image', 'thumbnail'], 'exclude': 0, } updated = True #=============================================================================== # Product upload to Magento #=============================================================================== try: #Create if(product.magento_id == 0): new_id = server.call(session, 'product.create', ['simple', attr_set_id, sku, product_data]) server.call(session, 'product_stock.update', [sku, stock_data]) if prices_data: server.call(session, 'product_tier_price.update', [sku, prices_data]) if image_data: image_name = server.call(session, 'product_media.create', [sku, image_data]) self.write_magento(cr, uid, product.id, {'magento_id': new_id, 'image_name': image_name}) logger.notifyChannel(_("Magento Export"), netsvc.LOG_INFO, _("Successfully created product with OpenERP id %s and Magento id %s") % (product.id, new_id)) prod_new += 1 #Or Update else: server.call(session, 'product.update', [sku, product_data]) server.call(session, 'product_stock.update', [sku, stock_data]) server.call(session, 'product_tier_price.update', [sku, prices_data]) if image_data and not product.image_name: # Image added image_name = server.call(session, 'product_media.create', [sku, image_data]) self.write_magento(cr, uid, product.id, {'image_name': image_name}) if not image_data and product.image_name: # Image removed server.call(session, 'product_media.remove', [sku, product.image_name]) self.write_magento(cr, uid, product.id, {'image_name': ''}) logger.notifyChannel(_("Magento Export"), netsvc.LOG_INFO, _("Successfully updated product with OpenERP id %s and Magento id %s") % (product.id, product.magento_id)) prod_update += 1 except xmlrpclib.Fault, error: #If fail, try to create if error.faultCode == 101: #turns out that the product doesn't exist in Magento (might have been deleted), try to create a new one. try: new_id = server.call(session, 'product.create', ['simple', attr_set_id, sku, product_data]) server.call(session, 'product_stock.update', [sku, stock_data]) if prices_data: server.call(session, 'product_tier_price.update', [sku, prices_data]) if image_data: image_name = server.call(session, 'product_media.create', [sku, image_data]) self.write_magento(cr, uid, product.id, {'magento_id': new_id, 'image_name': image_name}) logger.notifyChannel(_("Magento Export"), netsvc.LOG_INFO, _("Successfully created product with OpenERP id %s and Magento id %s") % (product.id, new_id)) prod_new += 1 except xmlrpclib.Fault, error: logger.notifyChannel(_("Magento Export"), netsvc.LOG_ERROR, _("Magento API return an error on product id %s . Error %s") % (product.id, error)) updated = False prod_fail += 1 else: logger.notifyChannel(_("Magento Export"), netsvc.LOG_ERROR, _("Magento API return an error on product id %s . Error %s") % (product.id, error)) updated = False prod_fail += 1 except Exception, error: raise osv.except_osv(_("OpenERP Error"), _("An error occurred : %s ") % error)
def text_plain(text): netsvc.Logger().notifyChannel('report_aeroo', netsvc.LOG_INFO, msg) return text
def magento_export(self, cr, uid, categ_ids, context): #=============================================================================== # Init #=============================================================================== categ_new = 0 categ_update = 0 categ_fail = 0 logger = netsvc.Logger() mw_id = self.pool.get('magento.web').search(cr, uid, [('magento_flag', '=', True)]) mw = self.pool.get('magento.web').browse(cr, uid, mw_id[0]) (server, session) = mw.connect() #=============================================================================== # Category packaging #=============================================================================== categories = self.browse(cr, uid, categ_ids, context=context) categories.sort(lambda x, y : (int(x.parent_id) or 0) - int(y.parent_id)) for category in categories : path='' #construct path magento_parent_id=1 #root catalog if(type(category.parent_id.id) == (int)): #if not root category last_parent=self.browse(cr, uid, category.parent_id.id) magento_parent_id=last_parent.magento_id path= str(last_parent.magento_id) while(type(last_parent.parent_id.id) == (int)): last_parent=self.browse(cr, uid, last_parent.parent_id.id) path=str(last_parent.magento_id)+'/'+path path='1/'+path path=path.replace("//","/") if path.endswith('/'): path=path[0:-1] category_data = { 'name' : category.name, 'path' : path, 'is_active' : 1, 'default_sort_by': 'name', 'available_sort_by': 'name', } updated = True #=============================================================================== # Category upload to Magento #=============================================================================== try: if(category.magento_id == 0): new_id = server.call(session,'category.create', [magento_parent_id, category_data]) self.write_magento(cr, uid, category.id, {'magento_id': new_id}) logger.notifyChannel(_("Magento Export"), netsvc.LOG_INFO, _("Successfully created category with OpenERP id %s and Magento id %s") % (category.id, new_id)) categ_new += 1 else: # The path must include magento_id at the end when a category is updated category_data['path'] = category_data['path'] + '/' + str(category.magento_id) server.call(session,'category.update', [category.magento_id, category_data]) logger.notifyChannel(_("Magento Export"), netsvc.LOG_INFO, _("Successfully updated category with OpenERP id %s and Magento id %s") % (category.id, category.magento_id)) categ_update += 1 except xmlrpclib.Fault, error: if error.faultCode == 102: #turns out that the category doesn't exist in Magento (might have been deleted), try to create a new one. try: new_id = server.call(session,'category.create', [magento_parent_id, category_data]) self.write_magento(cr, uid, category.id, {'magento_id': new_id}) logger.notifyChannel(_("Magento Export"), netsvc.LOG_INFO, _("Successfully created category with OpenERP id %s and Magento id %s") % (category.id, new_id)) categ_new += 1 except xmlrpclib.Fault, error: logger.notifyChannel(_("Magento Export"), netsvc.LOG_ERROR, _("Magento API return an error on category id %s . Error %s") % (category.id, error)) updated = False categ_fail += 1 else: logger.notifyChannel(_("Magento Export"), netsvc.LOG_ERROR, _("Magento API return an error on category id %s . Error %s") % (category.id, error)) updated = False categ_fail += 1
def action_import(self, cr, uid, ids, context=None): """ Imports the accounts from the CSV file using the options from the wizard. """ # List of the imported accounts imported_account_ids = [] for wiz in self.browse(cr, uid, ids, context): if not wiz.input_file: raise osv.except_osv(_('UserError'), _("You need to select a file!")) # Decode the file data data = base64.b64decode(wiz.input_file) # # Read the file # reader = csv.reader(StringIO.StringIO(data), delimiter=str(wiz.csv_delimiter), quotechar=str(wiz.csv_quotechar)) for record in reader: # Ignore short records if len(record) > wiz.csv_code_index \ and len(record) > wiz.csv_name_index: record_code = record[wiz.csv_code_index] record_name = record[wiz.csv_name_index] # # Ignore invalid records # if re.match(wiz.csv_code_regexp, record_code) \ and re.match(wiz.csv_name_regexp, record_name): # # Search for the account # account_ids = self.pool.get('account.account').search( cr, uid, [('code', '=', record_code), ('company_id', '=', wiz.company_id.id)]) if account_ids: if wiz.overwrite: netsvc.Logger().notifyChannel( 'account_importer', netsvc.LOG_DEBUG, "Overwriting account: %s %s" % (record_code, record_name)) self.pool.get('account.account').write( cr, uid, account_ids, {'name': record_name}) imported_account_ids.extend(account_ids) else: # # Find the account's parent # parent_account_id = self._find_parent_account_id( cr, uid, wiz, record_code) if not parent_account_id: netsvc.Logger().notifyChannel( 'account_importer', netsvc.LOG_WARNING, "Couldn't find a parent account for: %s" % record_code) # # Find the account's brother (will be used as template) # brother_account_id = self._find_brother_account_id( cr, uid, wiz, record_code) if not brother_account_id: netsvc.Logger().notifyChannel( 'account_importer', netsvc.LOG_WARNING, "Couldn't find a brother account for: %s" % record_code) brother_account = self.pool.get( 'account.account').browse( cr, uid, brother_account_id) # # Create the new account # netsvc.Logger().notifyChannel( 'account_importer', netsvc.LOG_DEBUG, "Creating new account: %s %s" % (record_code, record_name)) account_id = self.pool.get( 'account.account').create( cr, uid, { 'code': record_code, 'name': record_name, 'parent_id': parent_account_id, 'type': brother_account.type, 'user_type': brother_account.user_type.id, 'reconcile': brother_account.reconcile, 'company_id': wiz.company_id.id, 'currency_id': brother_account.currency_id.id, 'currency_mode': brother_account.currency_mode, 'active': 1, 'tax_ids': [(6, 0, [ tax.id for tax in brother_account.tax_ids ])], 'note': False, }) imported_account_ids.append(account_id) else: netsvc.Logger().notifyChannel( 'account_importer', netsvc.LOG_WARNING, "Invalid record format (ignoring line): %s" % repr(record)) else: netsvc.Logger().notifyChannel( 'account_importer', netsvc.LOG_WARNING, "Too short record (ignoring line): %s" % repr(record)) # # Show the accounts to the user # model_data_ids = self.pool.get('ir.model.data').search( cr, uid, [('model', '=', 'ir.ui.view'), ('module', '=', 'account'), ('name', '=', 'view_account_form')]) resource_id = self.pool.get('ir.model.data').read( cr, uid, model_data_ids, fields=['res_id'], context=context)[0]['res_id'] return { 'name': _("Imported accounts"), 'type': 'ir.actions.act_window', 'res_model': 'account.account', 'view_type': 'form', 'view_mode': 'tree,form', 'views': [(False, 'tree'), (resource_id, 'form')], 'domain': "[('id', 'in', %s)]" % imported_account_ids, 'context': context, }
def _get_account_balance(self, cr, uid, ids, code, balance_mode=0, context=None): """ It returns the (debit, credit, balance*) tuple for a account with the given code, or the sum of those values for a set of accounts when the code is in the form "400,300,(323)" Depending on the balance_mode, the balance is calculated as follows: Mode 0: debit-credit for all accounts (default); Mode 1: debit-credit, credit-debit for accounts in brackets; Mode 2: credit-debit for all accounts; Mode 3: credit-debit, debit-credit for accounts in brackets. Also the user may specify to use only the debit or credit of the account instead of the balance writing "debit(551)" or "credit(551)". """ acc_facade = self.pool.get('account.account') res = 0.0 line = self.browse(cr, uid, ids)[0] assert balance_mode in ('0', '1', '2', '3'), "balance_mode should be in [0..3]" # We iterate over the accounts listed in "code", so code can be # a string like "430+431+432-438"; accounts split by "+" will be added, # accounts split by "-" will be substracted. # # We also take in consideration the balance_mode: # Mode 0: credit-debit for all accounts # Mode 1: debit-credit, credit-debit for accounts in brackets # Mode 2: credit-debit, debit-credit for accounts in brackets # Mode 3: credit-debit, debit-credit for accounts in brackets. # # And let the user get just the credit or debit if he specifies so. # for account_code in re.findall('(-?\w*\(?[0-9a-zA-Z_]*\)?)', code): # Check if the code is valid (findall might return empty strings) if len(account_code) > 0: # # Check the sign of the code (substraction) # if account_code.startswith('-'): sign = -1.0 account_code = account_code[1:] # Strip the sign else: sign = 1.0 if re.match(r'^debit\(.*\)$', account_code): # Use debit instead of balance mode = 'debit' account_code = account_code[6:-1] # Strip debit() if balance_mode == '2': # We use credit-debit in the balance sign = -1.0 * sign elif re.match(r'^credit\(.*\)$', account_code): # Use credit instead of balance mode = 'credit' account_code = account_code[7:-1] # Strip credit() if balance_mode == '2': # We use credit-debit in the balance sign = -1.0 * sign else: mode = 'balance' # # Calculate the balance, as given by the balance mode # if balance_mode == '1': # We use debit-credit as default balance, # but for accounts in brackets we use credit-debit if account_code.startswith( '(') and account_code.endswith(')'): sign = -1.0 * sign elif balance_mode == '2': # We use credit-debit as the balance, sign = -1.0 * sign elif balance_mode == '3': # We use credit-debit as default balance, # but for accounts in brackets we use debit-credit if not account_code.startswith( '(') and account_code.endswith(')'): sign = -1.0 * sign # Strip the brackets (if there are brackets) if account_code.startswith('(') and account_code.endswith( ')'): account_code = account_code[1:-1] # Search for the account (perfect match) account_ids = acc_facade.search( cr, uid, [('code', '=', account_code), ('company_id', '=', line.report_id.company_id.id)], context=context) if not account_ids: # We didn't find the account, search for a subaccount ending with '0' account_ids = acc_facade.search( cr, uid, [('code', '=like', '%s%%0' % account_code), ('company_id', '=', line.report_id.company_id.id)], context=context) if len(account_ids) > 0: balance = acc_facade.browse(cr, uid, account_ids, context)[0].balance if ((mode == 'debit' and balance > 0.0) or (mode == 'credit' and balance < 0.0) or (mode == 'balance')): res += balance * sign else: netsvc.Logger().notifyChannel( 'account_balance_reporting', netsvc.LOG_WARNING, "Account with code '%s' not found!" % account_code) return res
def do_contract_inv_installation(self, cr, uid, ids, context={}, force_invoicing=False): invoices = self.pool.get('account.invoice') invoice_lines = self.pool.get('account.invoice.line') recurrences = self.pool.get('product.recurrence.unit') contract_positions = self.pool.get('eagle.contract.position') eagle_param = self.__get_eagle_parameters(cr, uid, context=context) if not eagle_param: return False if eagle_param.invoicing_mode != 'auto' and not force_invoicing: return False for contract in self.browse(cr, uid, ids, context=context): if contract.state not in ['installation', 'production']: continue # This loop handles non-recurrent contract positions # - those in 'open' state are added # - the others are skipped # - each time an object is correctly added to an invoice, its state is set to 'done' invoice_id = False invoice = False for contract_position in contract.positions: if contract_position.state != 'open': continue # If needed, prepare a new invoice, if not already defined if not invoice_id: invoice_ids = invoices.search( cr, uid, [('state', '=', 'draft'), ('contract_id', '=', contract.id)], context=context) if invoice_ids and len(invoice_ids): invoice_id = invoice_ids[0] if not invoice_id: vals = self.get_invoice_default_values(cr, uid, contract, context=context) invoice_id = invoices.create(cr, uid, vals, context=context) if not invoice_id: break if not invoice: invoice = invoices.browse(cr, uid, invoice_id, context=context) if not invoice: invoice_id = False break now = datetime.now().strftime('%Y-%m-%d') if contract_position.next_invoice_date and contract_position.next_invoice_date > now: continue # Prepare a new invoice line do_it = True invoice_line_id = False if not contract_position.is_billable: if not eagle_param.make_inv_lines_with_unbillables: do_it = False invoice_line_id = True if do_it: vals = self.get_invoice_line_default_values( cr, uid, invoice, contract, contract_position, context=context) vals['contract_position_id'] = contract_position.id netsvc.Logger().notifyChannel('addons.' + self._name, netsvc.LOG_DEBUG, "vals=" + str(vals)) invoice_line_id = invoice_lines.create(cr, uid, vals, context=context) if invoice_line_id: contract_positions.write(cr, uid, contract_position.id, {'state': 'done'}, context=context) netsvc.Logger().notifyChannel( 'addons.' + self._name, netsvc.LOG_DEBUG, "invoice_line_id=" + str(invoice_line_id)) if contract.state == 'production': # This loop handles recurrent contract positions # - recurrent products may be put either in the same or in a different invoice, depending on # how much time has passed between the 1st invoice and 1st occurence of the recurrent product # - each time an object is correctly added to an invoice, its state is set to 'done' for contract_position in contract.positions: if not contract_position.is_active: continue if not contract_position.recurrence_id: continue if contract_position.state != 'recurrent': continue if not contract_position.next_invoice_date: continue recurrence = recurrences.browse( cr, uid, contract_position.recurrence_id.id, context=context) if not recurrence: continue now = datetime.now().strftime('%Y-%m-%d') next = datetime.strptime( contract_position.next_invoice_date, '%Y-%m-%d') dt = next - relativedelta( days=contract_position.cancellation_deadline) before = dt.strftime('%Y-%m-%d') if recurrence.unit == 'day': after = next + relativedelta(days=recurrence.value) if recurrence.value > 0: after -= relativedelta(days=1) elif recurrence.unit == 'month': after = next + relativedelta(months=recurrence.value) if recurrence.value > 0: after -= relativedelta(days=1) elif recurrence.unit == 'year': after = next + relativedelta(years=recurrence.value) if recurrence.value > 0: after -= relativedelta(days=1) if before > now: continue # If needed, prepare a new invoice, if not already defined if not invoice_id: invoice_ids = invoices.search( cr, uid, [('state', '=', 'draft'), ('contract_id', '=', contract.id)], context=context) if invoice_ids and len(invoice_ids): invoice_id = invoice_ids[0] if not invoice_id: vals = self.get_invoice_default_values(cr, uid, contract, context=context) invoice_id = invoices.create(cr, uid, vals, context=context) if not invoice_id: break if not invoice: invoice = invoices.browse(cr, uid, invoice_id, context=context) if not invoice: invoice_id = False break # Prepare a new invoice line do_it = True invoice_line_id = False if not contract_position.is_billable: if not eagle_param.make_inv_lines_with_unbillables: do_it = False invoice_line_id = True if do_it: vals = self.get_invoice_line_default_values( cr, uid, invoice, contract, contract_position, context=context) vals['contract_position_id'] = contract_position.id invoice_line_id = invoice_lines.create(cr, uid, vals, context=context) if invoice_line_id: txt = contract_position.description + ' - ' + next.strftime( '%d.%m.%Y') + ' ' + _('to') + ' ' + after.strftime( '%d.%m.%Y') contract_positions.write( cr, uid, contract_position.id, { 'next_invoice_date': after.strftime('%Y-%m-%d'), 'out_description': txt }) eagle_param = self.__get_eagle_parameters(cr, uid, context=context) if eagle_param and eagle_param.auto_production_state: valid = True for cnt_line in contract.positions: if cnt_line.state == 'open': valid = False break if valid: self.contract_production(cr, uid, [contract.id], {}) return True
def get_critical_activities(self, d_activities): warning = {} #Read the activity details activities = d_activities.values() for start_activity in activities: if start_activity.is_start: break l_successor_date_earliest_start = [] for successor in start_activity.successors: if successor.date_earliest_start: l_successor_date_earliest_start.append( successor.date_earliest_start) if l_successor_date_earliest_start: start_activity.date_early_start = min( l_successor_date_earliest_start) else: start_activity.date_early_start = network_activity.next_work_day( datetime.today()) network_activity.walk_list_ahead(start_activity) for stop_activity in activities: if stop_activity.is_stop: break #stop_activity.late_finish = stop_activity.early_finish stop_activity.date_late_finish = stop_activity.date_early_finish #stop_activity.late_start = stop_activity.late_finish - stop_activity.replan_duration stop_activity.date_late_start = network_activity.sub_work_days( stop_activity.date_late_finish, stop_activity.replan_duration) network_activity.walk_list_aback(stop_activity) #start_activity.late_finish = start_activity.early_finish start_activity.date_late_finish = start_activity.date_early_finish start_activity.date_late_start = network_activity.sub_work_days( start_activity.date_late_finish, start_activity.replan_duration) #Calculate Float for act in activities: l_successor_date_early_start = [] for successor in act.successors: l_successor_date_early_start.append(successor.date_early_start) if l_successor_date_early_start: [act.free_float, rr] = network_activity.work_days_diff( act.date_early_finish, min(l_successor_date_early_start)) # [act.total_float, rr] = network_activity.work_days_diff(act.date_early_start, act.date_late_start) # # if (act.date_early_finish == act.date_late_finish and act.date_early_start == act.date_late_start): # act.is_critical_path = True #Calculate shortest path C_INFINITE = 9999 d_graph = {} for act in d_activities.keys(): d_neighbours = {} for other_act in d_activities.keys(): if other_act <> act: d_neighbours[other_act] = C_INFINITE for pred_act in d_activities[act].predecessors: if other_act == pred_act.activity_id: d_neighbours[other_act] = pred_act.total_float for succ_act in d_activities[act].successors: if other_act == succ_act.activity_id: d_neighbours[other_act] = succ_act.total_float d_graph[act] = d_neighbours # d_graph = {} # for act in d_activities.keys(): # d_predecessors = {} # for pred_act in d_activities[act].predecessors: # d_predecessors[pred_act.activity_id] = pred_act.replan_duration # d_graph[act] = d_predecessors logger = netsvc.Logger() l_spath = [] try: l_spath = shortestPath(d_graph, 'start', 'stop') except Exception as e: logger.notifyChannel( "warning", netsvc.LOG_WARNING, "Could not calculate the critical path due to existing negative floats in one or more of the network activities." ) for act in activities: item = next((i for i in l_spath if i == act.activity_id), None) if item is not None: act.is_critical_path = True
def get_wizard(self, type): logger = netsvc.Logger() logger.notifyChannel( "warning", netsvc.LOG_WARNING, "No wizard found for the payment type '%s'." % type) return None
def _get_account_balance(self, cr, uid, ids, code, balance_mode=0, context=None): """ It returns the (debit, credit, balance*) tuple for a account with the given code, or the sum of those values for a set of accounts when the code is in the form "400,300,(323)" Depending on the balance_mode, the balance is calculated as follows: Mode 0: debit-credit for all accounts (default); Mode 1: debit-credit, credit-debit for accounts in brackets; Mode 2: credit-debit for all accounts; Mode 3: credit-debit, debit-credit for accounts in brackets. Also the user may specify to use only the debit or credit of the account instead of the balance writing "debit(551)" or "credit(551)". """ fiscalyear_obj = self.pool.get('account.fiscalyear') fiscalperiod_obj = self.pool.get('account.period') acc_facade = self.pool.get('account.account') fiscalyear_obj = self.pool.get('account.fiscalyear') company_obj = self.pool.get('res.company') res = 0.0 line = self.browse(cr, uid, ids)[0] assert balance_mode in ('0', '1', '2', '3'), "balance_mode should be in [0..3]" if line.report_id.target_move == 'posted': context.update({'state':'posted'}) if line.report_id.date_from: context.update({'date_from':line.report_id.date_from}) if line.report_id.date_to: context.update({'date_to':line.report_id.date_to}) context.update({'type':'statement'}) # get fisacl years of all childs companies with same code company_id= fiscalyear_obj.browse(cr, uid, context['fiscalyear'], context=context).company_id.id company_ids=company_obj.search(cr, uid, [ ('parent_id', '=', line.report_id.company_id.id)], context=context) year_code=fiscalyear_obj.browse(cr, uid, context['fiscalyear'], context=context).code if company_ids: fiscalyear_ids= fiscalyear_obj.search(cr, uid, [('code', '=',year_code), ('company_id', 'in', company_ids)], context=context) if not company_ids: fiscalyear_ids= fiscalyear_obj.search(cr, uid, [('code', '=',year_code), ('company_id', '=', line.report_id.company_id.id)], context=context) init_period = fiscalperiod_obj.search(cr, uid, [('special', '=', True), ('fiscalyear_id', 'in', fiscalyear_ids)]) date_start = fiscalperiod_obj.browse(cr, uid, init_period[0], context=context).date_start if not line.report_id.date_from: context.update({'date_from':date_start}) if fiscalyear_ids: context.update({'fiscalyear':fiscalyear_ids}) # We iterate over the accounts listed in "code", so code can be # a string like "430+431+432-438"; accounts split by "+" will be added, # accounts split by "-" will be substracted. # We also take in consideration the balance_mode: # Mode 0: credit-debit for all accounts # Mode 1: debit-credit, credit-debit for accounts in brackets # Mode 2: credit-debit, debit-credit for accounts in brackets # Mode 3: credit-debit, debit-credit for accounts in brackets. # And let the user get just the credit or debit if he specifies so. for account_code in re.findall('(-?\w*\(?[0-9a-zA-Z_]*\)?)', code): # Check if the code is valid (findall might return empty strings) if len(account_code) > 0: # Check the sign of the code (substraction) if account_code.startswith('-'): sign = -1.0 account_code = account_code[1:] # Strip the sign else: sign = 1.0 if re.match(r'^debit\(.*\)$', account_code): # Use debit instead of balance mode = 'debit' account_code = account_code[6:-1] # Strip debit() elif re.match(r'^credit\(.*\)$', account_code): # Use credit instead of balance mode = 'credit' account_code = account_code[7:-1] # Strip credit() else: mode = 'balance' # Calculate the balance, as given by the balance mode if balance_mode == '1' and account_code.startswith('(') and account_code.endswith(')'): # We use debit-credit as default balance, # but for accounts in brackets we use credit-debit sign = -1.0 * sign elif balance_mode == '2': # We use credit-debit as the balance, sign = -1.0 * sign elif balance_mode == '3' and not account_code.startswith('(') and account_code.endswith(')'): # We use credit-debit as default balance, # but for accounts in brackets we use debit-credit sign = -1.0 * sign # Strip the brackets (if there are brackets) if account_code.startswith('(') and account_code.endswith(')'): account_code = account_code[1:-1] # Search for the account (perfect match)context account_ids = acc_facade.search(cr, uid, [('code', '=', account_code), ('company_id', '=', line.report_id.company_id.id)], context=context) if not account_ids: # We didn't find the account, search for a subaccount ending with '0' account_ids = acc_facade.search(cr, uid, [('code', '=like', '%s%%0' % account_code), ('company_id', '=', line.report_id.company_id.id)], context=context) if len(account_ids) > 0: if mode == 'debit': res += acc_facade.read(cr, uid, account_ids, ['debit'], context)[0]['debit'] or 0.0 elif mode == 'credit': res += acc_facade.read(cr, uid, account_ids, ['credit'], context)[0]['credit'] or 0.0 else: # MODIFY HERE res += acc_facade.read(cr, uid, account_ids, ['balance'], context)[0]['balance'] * sign or 0.0 else: netsvc.Logger().notifyChannel('account_balance_reporting', netsvc.LOG_WARNING, "Account with code '%s' not found!" % account_code) return res