Ejemplo n.º 1
0
def execute(conf_attrs, dbname, uid, obj, method, *args, **kwargs):
    import openerp
    from openerp.api import Environment
    from openerp.modules.registry import Registry
    for attr, value in conf_attrs.items():
        openerp.tools.config[attr] = value
    with Environment.manage():
        registry = Registry(dbname)
        cr = registry.cursor()
        context = kwargs.get('context') and kwargs.pop('context') or {}
        env = Environment(cr, uid, context)
        # openerp.api.Environment._local.environments = env
        try:
            print args
            getattr(env.registry[obj], method)(cr, uid, *args, **kwargs)
            # Commit only when function finish
            env.cr.commit()
        except Exception as exc:
            print exc
            env.cr.rollback()
            try:
                raise execute.retry(
                    queue=execute.request.delivery_info['routing_key'],
                    exc=exc, countdown=(execute.request.retries + 1) * 60,
                    max_retries=5)
            except Exception as retry_exc:
                raise retry_exc
        finally:
            env.cr.close()
    return True
Ejemplo n.º 2
0
    def _procure_calculation_all(self, cr, uid, ids, context=None):
        """
        @param self: The object pointer.
        @param cr: A database cursor
        @param uid: ID of the user currently logged in
        @param ids: List of IDs selected
        @param context: A standard dictionary
        """
        with Environment.manage():
            proc_obj = self.pool.get('procurement.order')
            #As this function is in a new thread, i need to open a new cursor, because the old one may be closed

            new_cr = self.pool.cursor()
            # Avoid to run the scheduler multiple times in the same time
            try:
                with tools.mute_logger('openerp.sql_db'):
                    new_cr.execute("SELECT id FROM ir_cron WHERE id = %s FOR UPDATE NOWAIT", (scheduler_cron_id,))
            except Exception:
                _logger.info('Attempt to run procurement scheduler aborted, as already running')
                new_cr.rollback()
                new_cr.close()
                return {}
            user = self.pool.get('res.users').browse(new_cr, uid, uid, context=context)
            comps = [x.id for x in user.company_ids]
            for comp in comps:
                proc_obj.run_scheduler(new_cr, uid, use_new_cursor=new_cr.dbname, company_id = comp, context=context)
            #close the new cursor
            new_cr.close()
            return {}
Ejemplo n.º 3
0
    def _procure_calculation_all(self, cr, uid, ids, context=None):
        """
        @param self: The object pointer.
        @param cr: A database cursor
        @param uid: ID of the user currently logged in
        @param ids: List of IDs selected
        @param context: A standard dictionary
        """
        with Environment.manage():
            proc_obj = self.pool.get('procurement.order')
            #As this function is in a new thread, i need to open a new cursor, because the old one may be closed

            new_cr = self.pool.cursor()
            user = self.pool.get('res.users').browse(new_cr,
                                                     uid,
                                                     uid,
                                                     context=context)
            comps = [x.id for x in user.company_ids]
            for comp in comps:
                proc_obj.run_scheduler(new_cr,
                                       uid,
                                       use_new_cursor=new_cr.dbname,
                                       company_id=comp,
                                       context=context)
            #close the new cursor
            new_cr.close()
            return {}
Ejemplo n.º 4
0
 def _procurement_run_thread(self, cr, uid, ids, context=None):
     with Environment.manage():
         proc_obj = self.pool.get('procurement.order')
         new_cr = self.pool.cursor()
         proc_obj.run_procurement(new_cr, uid, context.get('active_ids'), use_new_cursor=new_cr.dbname, context=context)
         new_cr.close()
         return {}
Ejemplo n.º 5
0
    def read_group(self,
                   cr,
                   uid,
                   domain,
                   fields,
                   groupby,
                   offset=0,
                   limit=None,
                   context=None,
                   orderby=False,
                   lazy=True):
        res = super(stock_history, self).read_group(cr,
                                                    uid,
                                                    domain,
                                                    fields,
                                                    groupby,
                                                    offset=offset,
                                                    limit=limit,
                                                    context=context,
                                                    orderby=orderby,
                                                    lazy=lazy)

        if 'parallel_inventory_value' in fields and 'inventory_value' in fields:
            context['date'] = context.get('history_date')
            for line in res:

                with Environment.manage():  # class function
                    env = Environment(cr, uid, context)

                line[
                    'parallel_inventory_value'] = env.user.company_id.currency_id.compute(
                        line['inventory_value'],
                        env.user.company_id.parallel_currency_id)

        return res
Ejemplo n.º 6
0
def execute(conf_attrs, dbname, uid, obj, method, *args, **kwargs):
    import openerp
    from openerp.api import Environment
    from openerp.modules.registry import Registry
    for attr, value in conf_attrs.items():
        openerp.tools.config[attr] = value
    with Environment.manage():
        registry = Registry(dbname)
        cr = registry.cursor()
        context = kwargs.get('context') and kwargs.pop('context') or {}
        env = Environment(cr, uid, context)
        # openerp.api.Environment._local.environments = env
        try:
            getattr(env.registry[obj], method)(cr, uid, *args, **kwargs)
            # Commit only when function finish
            env.cr.commit()
        except Exception as exc:
            env.cr.rollback()
            try:
                raise execute.retry(
                    queue=execute.request.delivery_info['routing_key'],
                    exc=exc,
                    countdown=(execute.request.retries + 1) * 60,
                    max_retries=5)
            except Exception as retry_exc:
                raise retry_exc
        finally:
            env.cr.close()
    return True
Ejemplo n.º 7
0
    def generate_pdf_thread(self,
                            cr,
                            uid,
                            thread_lock,
                            taxe_ids,
                            context=None):
        """
        @param self: The object pointer.
        @param cr: A database cursor
        @param uid: ID of the user currently logged in
        @param ids: List of IDs selected
        @param context: A standard dictionary
        """
        time.sleep(5)
        with Environment.manage():
            #As this function is in a new thread, i need to open a new cursor, because the old one may be closed
            new_cr = self.pool.cursor()
            new_env = Environment(new_cr, uid, context)

            report = new_env['report']
            for taxe in new_env['extraschool.taxcertificate_item'].browse(
                    taxe_ids):
                report.get_pdf(
                    taxe, 'extraschool.tpl_taxe_certificate_wizard_report')

            thread_lock[1].acquire()
            thread_lock[0] -= 1
            if thread_lock[0] == 0:
                new_env['extraschool.taxcertificate'].browse(
                    thread_lock[2]).pdf_ready = True

            thread_lock[1].release()
            new_cr.commit()
            new_cr.close()
            return {}
Ejemplo n.º 8
0
    def _procure_calculation_all(self, cr, uid, ids, context=None):
        """
        @param self: The object pointer.
        @param cr: A database cursor
        @param uid: ID of the user currently logged in
        @param ids: List of IDs selected
        @param context: A standard dictionary
        """
        with Environment.manage():
            proc_obj = self.pool.get('procurement.order')
            #As this function is in a new thread, i need to open a new cursor, because the old one may be closed

            new_cr = self.pool.cursor()
            scheduler_cron_id = self.pool['ir.model.data'].get_object_reference(new_cr, SUPERUSER_ID, 'procurement', 'ir_cron_scheduler_action')[1]
            # Avoid to run the scheduler multiple times in the same time
            try:
                with tools.mute_logger('openerp.sql_db'):
                    new_cr.execute("SELECT id FROM ir_cron WHERE id = %s FOR UPDATE NOWAIT", (scheduler_cron_id,))
            except Exception:
                _logger.info('Attempt to run procurement scheduler aborted, as already running')
                new_cr.rollback()
                new_cr.close()
                return {}
            user = self.pool.get('res.users').browse(new_cr, uid, uid, context=context)
            comps = [x.id for x in user.company_ids]
            for comp in comps:
                proc_obj.run_scheduler(new_cr, uid, use_new_cursor=new_cr.dbname, company_id = comp, context=context)
            #close the new cursor
            new_cr.close()
            return {}
Ejemplo n.º 9
0
    def create_return_picking(self):
        with Environment.manage():
            env_thread1 = Environment(self._cr, self._uid, self._context)
            #stock_move_obj=env_thread1['stock.move']

            for picking in self.order_id.picking_ids:
                if picking.picking_type_code != 'outgoing':
                    continue
                moves = []
                move_qty = {}
                for line in self.amazon_refund_line_ids:
                    if line.amazon_order_line_id.sale_order_line_id:
                        move = env_thread1['stock.move'].search([
                            ('procurement_id.sale_line_id', '=',
                             line.amazon_order_line_id.sale_order_line_id.id),
                            ('product_id', '=', line.product_id.id),
                            ('picking_id', '=', picking.id)
                        ])
                        moves.append(move.id)
                        move_qty.update({move.id: line.qty_canceled})
                    result = env_thread1['stock.return.picking'].with_context({
                        'active_id':
                        picking.id
                    }).default_get(fields=[
                        'product_return_moves', 'move_dest_exists',
                        'location_id'
                    ])

                    move_dest_exists = []
                    product_return_moves = []
                    if result.get('move_dest_exists', []):
                        for exist_line in result.get('move_dest_exists', []):
                            if exist_line.get('move_id') in moves:
                                move_dest_exists.append([0, 0, exist_line])
                    if result.get('product_return_moves', []):
                        for move_line in result.get('product_return_moves',
                                                    []):
                            if len(move_line) == 3:
                                if move_line[2].get('move_id') in moves:
                                    if move_qty.get(
                                            move_line[2].get('move_id'),
                                            0.0) > 0.0:
                                        move_line[2].update({
                                            'quantity':
                                            move_qty.get(
                                                move_line.get('move_id'), 0.0)
                                        })
                                    product_return_moves.append(move_line)
                    record = env_thread1['stock.return.picking'].create({
                        'move_dest_exists':
                        move_dest_exists,
                        'product_return_moves':
                        product_return_moves,
                        'location_id':
                        result.get('location_id')
                    })
                    result = record.with_context({
                        'active_id': picking.id
                    }).create_returns()
        return True
Ejemplo n.º 10
0
 def on_change_lines(self):
     with Environment.manage():
         env_thread1 = Environment(self._cr, self._uid, self._context)
         amazon_refund_lines_obj = env_thread1['amazon.refund.order.lines']
         for record in self:
             order = record.order_id
             vals = {}
             new_amazon_retrun_lines = []
             for line in order.ept_order_line:
                 if line.amazon_product_id:
                     info = {
                         'amazon_order_line_id': line.id,
                         'amazon_product_id': line.amazon_product_id.id,
                         'product_id': line.amazon_product_id.product_id.id,
                         'product_qty': line.product_uom_qty,
                         'price_subtotal': line.price_subtotal,
                         'order_line_amount': line.price_unit,
                         'order_line_tax': line.order_line_tax,
                         'item_promotion_adjust': line.promotion_discount,
                         'shipping_charge': line.shipping_charge_ept,
                         'shipping_tax': line.shipping_charge_tax,
                         'gift_wrap_charge': line.gift_wrapper_charge,
                         'gift_wrap_tax': line.gift_wrapper_tax,
                         'message': 'CustomerReturn'
                     }
                     vals.update(info)
                     temp_refund_lines = amazon_refund_lines_obj.new(vals)
                     retvals = amazon_refund_lines_obj._convert_to_write(
                         temp_refund_lines._cache)
                     new_amazon_retrun_lines.append(
                         amazon_refund_lines_obj.create(retvals).id)
                     self.company_id = order.warehouse_id.company_id.id
             self.amazon_refund_line_ids = amazon_refund_lines_obj.browse(
                 new_amazon_retrun_lines)
Ejemplo n.º 11
0
def assign_old_sequences(cr, registry):
    with Environment.manage():
        env = Environment(cr, SUPERUSER_ID, {})

        sequence_model = env['ir.sequence']

        claims = env['crm.claim'].search([], order="id")
        for claim in claims:
            claim.code = sequence_model.next_by_code('crm.claim')
 def sub_thread_create_accounting_entries(self, move_id, cost_line):
     with Environment.manage():
         new_env = Environment(self.pool.cursor(),
                               self.env.uid,
                               self.env.context
                               )
         self.env.cr.commit()
         this = self.with_env(env=new_env).browse(self.ids)
         this._create_accounting_entries(move_id, cost_line)
         this.env.cr.commit()
         this.env.cr.close()
    def auto_workflow_process(self,auto_workflow_process_id=False,ids=[]):            
        with Environment.manage():
            env_thread1 = Environment(self._cr,self._uid,self._context)
            sale_order_obj=env_thread1['sale.order']
            sale_order_line_obj=env_thread1['sale.order.line']
            account_payment_obj=env_thread1['account.payment']
            workflow_process_obj=env_thread1['sale.workflow.process.ept']
            if not auto_workflow_process_id:
                work_flow_process_records=workflow_process_obj.search([])
            else:
                work_flow_process_records=workflow_process_obj.browse(auto_workflow_process_id)

            if not work_flow_process_records:
                return True
            
            for work_flow_process_record in work_flow_process_records:
                if not ids:
                    orders=sale_order_obj.search([('auto_workflow_process_id','=',work_flow_process_record.id),('state','not in',('done','cancel','sale')),('invoice_status','!=','invoiced')])#('invoiced','=',False)
                else:
                    orders=sale_order_obj.search([('auto_workflow_process_id','=',work_flow_process_record.id),('id','in',ids)]) 
                if not orders:
                    continue
                for order in orders:
                    if order.invoice_status and order.invoice_status=='invoiced': 
                        continue
                    if work_flow_process_record.validate_order:
                        order.action_confirm()
                    if work_flow_process_record.invoice_policy=='delivery':
                        continue
                    if not work_flow_process_record.invoice_policy and not sale_order_line_obj.search([('product_id.invoice_policy','!=','delivery'),('order_id','in',order.ids)]):
                        continue    
                    if not order.invoice_ids:
                        if work_flow_process_record.create_invoice:
                            order.action_invoice_create()
                    if work_flow_process_record.validate_invoice:
                        for invoice in order.invoice_ids:                        
                            invoice.action_invoice_open()                                                            
                            if work_flow_process_record.register_payment:
                                if invoice.residual:
                                # Create Invoice and Make Payment                                                                                                
                                    vals={
                                        'journal_id':work_flow_process_record.journal_id.id,
                                        'invoice_ids':[(6,0,[invoice.id])],
                                        'communication':invoice.reference,
                                        'currency_id':invoice.currency_id.id,
                                        'payment_type':'inbound',
                                        'partner_id':invoice.commercial_partner_id.id,
                                        'amount':invoice.residual,
                                        'payment_method_id':work_flow_process_record.journal_id.inbound_payment_method_ids.id,
                                        'partner_type':'customer'
                                        }
                                    new_rec=account_payment_obj.create(vals)
                                    new_rec.post()                                
        return True
Ejemplo n.º 14
0
def assign_old_sequences(cr, registry):
    if not new_field_code_added:
        # the field was already existing before the installation of the addon
        return
    with Environment.manage():
        env = Environment(cr, SUPERUSER_ID, {})

        sequence_model = env['ir.sequence']

        claims = env['crm.claim'].search([], order="id")
        for claim in claims:
            claim.code = sequence_model.next_by_code('crm.claim')
Ejemplo n.º 15
0
 def shell(self, dbname):
     local_vars = {'openerp': openerp}
     with Environment.manage():
         if dbname:
             registry = openerp.modules.registry.RegistryManager.get(dbname)
             with registry.cursor() as cr:
                 uid = openerp.SUPERUSER_ID
                 ctx = Environment(cr, uid, {})['res.users'].context_get()
                 env = Environment(cr, uid, ctx)
                 local_vars['env'] = env
                 local_vars['self'] = env.user
                 self.console(local_vars)
         else:
             self.console(local_vars)
Ejemplo n.º 16
0
 def _calculation_orderpoint(self):
     """
     @param self: The object pointer.
     """
     new_cr = sql_db.db_connect(self.env.cr.dbname).cursor()
     uid, context = self.env.uid, self.env.context
     with Environment.manage():
         try:
             self.env = Environment(new_cr, uid, context)
             orderpoint_obj = self.env['stock.orderpoint.line']
             orderpoint_obj.compute_orderpoint()
         finally:
             new_cr.commit()
             new_cr.close()
     return {}
Ejemplo n.º 17
0
    def _background_rescheduling(self, cr, uid, ids, context=None):
        """
        @param self: The object pointer.
        @param cr: A database cursor
        @param uid: ID of the user currently logged in
        @param ids: List of IDs selected
        @param context: A standard dictionary
        """
        with Environment.manage():
            new_cr = self.pool.cursor()
            self._calc_rescheduling(new_cr, uid, ids, context)
            new_cr.commit()
            new_cr.close()

        return {}
Ejemplo n.º 18
0
 def _background_estimation(self, cr, uid, ids, context=None):
     """
     @param self: The object pointer.
     @param cr: A database cursor
     @param uid: ID of the user currently logged in
     @param ids: List of IDs selected
     @param context: A standard dictionary
     """
     with Environment.manage():
         new_cr = self.pool.cursor()
         self._calc_estimation(new_cr, uid, ids, context)
         new_cr.commit()
         new_cr.close()
         
     return {}        
Ejemplo n.º 19
0
 def fixdb(self, dbname):
     with Environment.manage():
         if config['db_name']:
             db_names = config['db_name'].split(',')
         else:
             db_names = openerp.service.db.list_dbs(True)
         for dbname in db_names:
             _logger.info('Running fix for tabase %s' % dbname)
             registry = openerp.modules.registry.RegistryManager.get(dbname)
             with registry.cursor() as cr:
                 uid = openerp.SUPERUSER_ID
                 ctx = Environment(cr, uid, {})['res.users'].context_get()
                 env = Environment(cr, uid, ctx)
                 _logger.info('Fixing database started')
                 env['db.configuration'].fix_db(uninstall_modules=True)
                 _logger.info('Fixing database finished')
Ejemplo n.º 20
0
 def _register_login(self, db_name, user_id, user_agent_env):
     db = pooler.get_db(db_name)
     cr = db.cursor()
     if not user_id:
         return
     with Environment.manage():
         collaborator_obj = self.pool.get('kemas.collaborator')
         collaborator_ids = collaborator_obj.search(cr, user_id, [('user_id', '=', user_id)])
         if collaborator_ids:
             vals_login = {
                           'collaborator_id' : collaborator_ids[0],
                           'base_location' : user_agent_env['base_location'],
                           'remote_address' : user_agent_env['REMOTE_ADDR'],
                           }
             self.pool.get('kemas.collaborator.logbook.login').create(cr, 1, vals_login)
     cr.commit()
Ejemplo n.º 21
0
    def _action_button_confirm_thread(self):
        _logger.debug("CMNT ENTRA EN EL HILO!!!!!!!!!")
        new_cr = self.pool.cursor()
        with Environment.manage():
            uid, context = self.env.uid, self.env.context
            env = Environment(new_cr, uid, context)
            try:
                env['sale.order'].browse(self.id).action_button_confirm()

            except Exception, e:
                new_cr.rollback()
                new_cr.close()
                _logger.debug("CMNT ERROR EN EL HILO!!!!!!!!! %s", str(e))
            new_cr.commit()
            new_cr.close()
            return {}
Ejemplo n.º 22
0
 def _calculation_summary(self):
     """
     @param self: The object pointer.
     """
     new_cr = sql_db.db_connect(self.env.cr.dbname).cursor()
     uid, context = self.env.uid, self.env.context
     with Environment.manage():
         try:
             self.env = Environment(new_cr, uid, context)
             summary_obj = self.env['product.summary']
             summary_obj.get_summary(int(self.month), int(self.year),
                                     self.product_ids)
         finally:
             new_cr.commit()
             new_cr.close()
     return {}
 def shell(self, dbname):
     local_vars = {
         'openerp': openerp
     }
     with Environment.manage():
         if dbname:
             registry = openerp.modules.registry.RegistryManager.get(dbname)
             with registry.cursor() as cr:
                 uid = openerp.SUPERUSER_ID
                 ctx = Environment(cr, uid, {})['res.users'].context_get()
                 env = Environment(cr, uid, ctx)
                 local_vars['env'] = env
                 local_vars['self'] = env.user
                 self.console(local_vars)
         else:
             self.console(local_vars)
Ejemplo n.º 24
0
    def generate_pdf_thread(self, cr, uid, thread_lock, invoices_ids, context=None):
        """
        @param self: The object pointer.
        @param cr: A database cursor
        @param uid: ID of the user currently logged in
        @param ids: List of IDs selected
        @param context: A standard dictionary
        """
        time.sleep(5)
        with Environment.manage():
#             print "******"
#             print invoices_ids
#             print "******"
            #As this function is in a new thread, i need to open a new cursor, because the old one may be closed
            new_cr = self.pool.cursor()
            new_env = Environment(new_cr, uid,context)
            count = 0
            report = new_env['report']
            for invoice in new_env['extraschool.invoice'].browse(invoices_ids):
                count = count + 1
                print "generate pdf %s count: %s" % (invoice.id, count)
                report.get_pdf(invoice ,'extraschool.invoice_report_layout')
          
                        
            thread_lock[1].acquire()
#             print "nbr_thread : %s" % (thread_lock[0])
            thread_lock[0] -= 1
            if thread_lock[0] == 0:
#                 print "this is the end"

#                 post_vars = {'subject': "Print Ready ;-)",
#                              'body': "You print is ready !",
#                              'partner_ids': [(uid)],} 
#                user = env['res.users'].browse(uid)
#                 print "Set biller pdf ready"
                new_env['extraschool.biller'].browse(thread_lock[2]).pdf_ready = True
                #print "update extraschool_biller set pdf_ready = True where id = %s" % (thread_lock[2])
                #new_cr.execute("update extraschool_biller set pdf_ready = True where id = %s",[thread_lock[2]])
                #env['res.partner'].message_post(new_cr, SUPERUSER_ID, False,context, **post_vars)                        
#                env.user.notify_info('My information message')

            thread_lock[1].release()                 
            new_cr.commit()     
            new_cr.close()
            return {}
Ejemplo n.º 25
0
    def run_create_accounting_entries(self):
        with Environment.manage():
            new_env = Environment(self.pool.cursor(),
                                  self.env.uid,
                                  self.env.context)
            self.env.cr.commit()
            this = self.with_env(env=new_env)
            move_id = this._create_account_move()
            _logger.info("Start create account entries for Purchase Cost Distribution"
                         " at %s" % (datetime.now().time().strftime("%H:%M:%S")))
            for cost_line in this.cost_lines:
                # Create Accounting Entries
                this._create_accounting_entries(move_id, cost_line)

            _logger.info("Finish create account entries for Purchase Cost Distribution"
                         " at %s" % (datetime.now().time().strftime("%H:%M:%S")))
            new_env.cr.commit()
            new_env.cr.close()
Ejemplo n.º 26
0
 def _register_login(self, db_name, user_id, user_agent_env):
     db = pooler.get_db(db_name)
     cr = db.cursor()
     if not user_id:
         return
     with Environment.manage():
         collaborator_obj = self.pool.get('kemas.collaborator')
         collaborator_ids = collaborator_obj.search(
             cr, user_id, [('user_id', '=', user_id)])
         if collaborator_ids:
             vals_login = {
                 'collaborator_id': collaborator_ids[0],
                 'base_location': user_agent_env['base_location'],
                 'remote_address': user_agent_env['REMOTE_ADDR'],
             }
             self.pool.get('kemas.collaborator.logbook.login').create(
                 cr, 1, vals_login)
     cr.commit()
Ejemplo n.º 27
0
 def _procure_calculation_orderpoint(self, cr, uid, ids, context=None):
     """
     @param self: The object pointer.
     @param cr: A database cursor
     @param uid: ID of the user currently logged in
     @param ids: List of IDs selected
     @param context: A standard dictionary
     """
     with Environment.manage():
         proc_obj = self.pool.get('procurement.order')
         #As this function is in a new thread, I need to open a new cursor, because the old one may be closed
         new_cr = self.pool.cursor()
         user_obj = self.pool.get('res.users')
         company_id = user_obj.browse(new_cr, uid, uid, context=context).company_id.id
         proc_obj._procure_orderpoint_confirm(new_cr, uid, use_new_cursor=new_cr.dbname, company_id = company_id, context=context)
         #close the new cursor
         new_cr.close()
         return {}
Ejemplo n.º 28
0
 def thread_init_session_code(self, cr, uid, start_code, context=None):
     with Environment.manage():
         try:
             new_cr = self.pool.cursor()
             print start_code
             for x in range(start_code, 89999999):
                 code = 10000000 + x
                 if self.search(new_cr, uid, [('name', '=', code)]):
                     continue
                 new_cr.execute(
                     "INSERT INTO product_session_code(name, sequence) VALUES ('%s', %s);"
                     % (code, code))
                 new_cr.commit()
         except Exception as e:
             new_cr.rollback()
             log.exception(e)
         finally:
             new_cr.close()
     return True
Ejemplo n.º 29
0
    def read_group(
        self, cr, uid, domain, fields, groupby, offset=0, limit=None, context=None, orderby=False, lazy=True
    ):
        res = super(stock_history, self).read_group(
            cr, uid, domain, fields, groupby, offset=offset, limit=limit, context=context, orderby=orderby, lazy=lazy
        )

        if "parallel_inventory_value" in fields and "inventory_value" in fields:
            context["date"] = context.get("history_date")
            for line in res:

                with Environment.manage():  # class function
                    env = Environment(cr, uid, context)

                line["parallel_inventory_value"] = env.user.company_id.currency_id.compute(
                    line["inventory_value"], env.user.company_id.parallel_currency_id
                )

        return res
    def auto_workflow_process(self,workflow_process_id=False,ids=[]):            
        with Environment.manage():
            env_thread1 = Environment(self._cr,self._uid,self._context)
            sale_order_obj=env_thread1['sale.order']
            workflow_process_obj=env_thread1['sale.workflow.process']
            if not workflow_process_id:
                work_flow_process_records=workflow_process_obj.search([])
            else:
                work_flow_process_records=workflow_process_obj.browse(workflow_process_id)

            if not work_flow_process_records:
                return True
            
            for work_flow_process_record in work_flow_process_records:
                if not ids:
                    orders=sale_order_obj.search([('workflow_process_id','=',work_flow_process_record.id),('state','not in',('done','cancel','shipping_except','invoice_except')),('invoiced','=',False)])
                else:
                    orders=sale_order_obj.search([('workflow_process_id','=',work_flow_process_record.id),('id','in',ids)]) 
                if not orders:
                    continue
                for order in orders:
                    if order.invoiced:
                        continue
                    if work_flow_process_record.validate_order:
                        order.signal_workflow('order_confirm')
                    if not order.invoice_ids:
                        if work_flow_process_record.create_invoice and order.order_policy=='manual' and work_flow_process_record.invoice_on=='manual':
                            order.manual_invoice()
                    if work_flow_process_record.validate_invoice:
                        for invoice in order.invoice_ids:
                            invoice.signal_workflow('invoice_open')
                            journal = work_flow_process_record.journal_id
                            if work_flow_process_record.invoice_date_is_order_date:
                                date = order.date_order
                            else:
                                date = time.strftime('%Y-%m-%d %H:%M:%S')    
                            amount = invoice.amount_total

                            if work_flow_process_record.register_payment:
                                sale_order_obj.pay_sale_order(order,invoice,journal,amount,date)                    
                                invoice.reconcile_invoice()
        return True
Ejemplo n.º 31
0
 def _procure_calculation_orderpoint_inherit(self,
                                             cr,
                                             uid,
                                             ids,
                                             context=None):
     """
     @param self: The object pointer.
     @param cr: A database cursor
     @param uid: ID of the user currently logged in
     @param ids: List of IDs selected
     @param context: A standard dictionary
     """
     with Environment.manage():
         proc_obj = self.pool.get('procurement.order')
         warehouse_obj = self.pool.get('stock.warehouse')
         #As this function is in a new thread, I need to open a new cursor, because the old one may be closed
         warehouse_ids = []
         new_cr = self.pool.cursor()
         user_obj = self.pool.get('res.users')
         company_id = user_obj.browse(new_cr, uid, uid,
                                      context=context).company_id.id
         user = user_obj.browse(new_cr, uid, uid, context=context)
         for warehouse in user.allowed_warehouses:
             _logger.info("Warehouse: %s", warehouse.name)
             warehouse_ids.append(warehouse.id)
         automatic = False
         use_new_cursor = False
         procure_method = 'all'
         cron_day = False
         user_id = user_obj.browse(new_cr, uid, uid, context=context).id
         proc_obj._procure_orderpoint_confirm_inherit(new_cr,
                                                      uid,
                                                      warehouse_ids,
                                                      automatic,
                                                      new_cr.dbname,
                                                      procure_method,
                                                      cron_day,
                                                      user_id,
                                                      context=context)
         #close the new cursor
         new_cr.close()
         return {}
Ejemplo n.º 32
0
 def fixdb(self, dbname):
     with Environment.manage():
         if config['db_name']:
             db_names = config['db_name'].split(',')
         else:
             db_names = openerp.service.db.list_dbs(True)
         for dbname in db_names:
             _logger.info('Running fix for tabase %s' % dbname)
             registry = openerp.modules.registry.RegistryManager.get(dbname)
             with registry.cursor() as cr:
                 uid = openerp.SUPERUSER_ID
                 ctx = Environment(cr, uid, {})['res.users'].context_get()
                 env = Environment(cr, uid, ctx)
                 _logger.info('Fixing database started')
                 # no formzamos desinstalación por las dudas de que haya
                 # un error en la clasificación de adhoc modules o en datos
                 # (los script de los modulos deberianser quien se encarguen
                 # de limpiar si corresponde)
                 env['db.configuration'].fix_db(uninstall_modules=False)
                 _logger.info('Fixing database finished')
Ejemplo n.º 33
0
 def _calculation_forecast(self):
     """
     @param self: The object pointer.
     """
     new_cr = sql_db.db_connect(self.env.cr.dbname).cursor()
     uid, context = self.env.uid, self.env.context
     with Environment.manage():
         try:
             self.env = Environment(new_cr, uid, context)
             summary_obj = self.env['product.summary']
             if self.forecast_type == 'mobile_avg':
                 summary_obj.get_avg_mobile_forecast(
                     int(self.month), int(self.year),
                     int(self.historic_periods), int(self.forecast_periods), self.product_ids, self.grow_percent)
             else:
                 summary_obj.get_time_series_forecast(
                     int(self.month), int(self.year),
                     int(self.forecast_periods), self.product_ids, self.grow_percent)
         finally:
             new_cr.commit()
             new_cr.close()
     return {}
Ejemplo n.º 34
0
    def generate_pdf_thread(self, cr, uid, reminders, context=None):
        """
        @param self: The object pointer.
        @param cr: A database cursor
        @param uid: ID of the user currently logged in
        @param ids: List of IDs selected
        @param context: A standard dictionary
        """
        with Environment.manage():

            # As this function is in a new thread, i need to open a new cursor, because the old one may be closed.
            new_cr = self.pool.cursor()
            env = Environment(new_cr, uid, context)

            report = self.pool.get('report')
            for reminder in reminders:
                print "generate pdf %s" % (reminder.id)
                env['report'].get_pdf(reminder,
                                      'extraschool.reminder_report_layout')

            new_cr.commit()
            new_cr.close()
            return {}
Ejemplo n.º 35
0
    def mfa_login_post(self, *args, **kwargs):
        """Process MFA login attempt

        Overview:
            * Try to find a user based on the MFA login token. If this doesn't
              work, redirect to the password login page with an error message
            * Validate the confirmation code provided by the user. If it's not
              valid, redirect to the previous login step with an error message
            * Generate a long-term MFA login token for the user and log the
              user in using the token
            * Build a trusted device cookie and add it to the response if the
              trusted device option was checked
            * Redirect to the provided URL or to '/web' if one was not given
        """

        # sudo() is required because there is no request.env.uid (likely since
        # there is no user logged in at the start of the request)
        user_model_sudo = request.env['res.users'].sudo()
        device_model_sudo = user_model_sudo.env['res.users.device']
        config_model_sudo = user_model_sudo.env['ir.config_parameter']

        token = request.params.get('mfa_login_token')
        try:
            user = user_model_sudo.user_from_mfa_login_token(token)
        except (MfaTokenInvalidError, MfaTokenExpiredError) as exception:
            return http.local_redirect(
                '/web/login',
                query={
                    'redirect': request.params.get('redirect'),
                    'error': exception.message,
                },
                keep_hash=True,
            )

        confirmation_code = request.params.get('confirmation_code')
        if not user.validate_mfa_confirmation_code(confirmation_code):
            return http.local_redirect(
                '/auth_totp/login',
                query={
                    'redirect':
                    request.params.get('redirect'),
                    'error':
                    _('Your confirmation code is not correct. Please try'
                      ' again.'),
                    'mfa_login_token':
                    token,
                },
                keep_hash=True,
            )

        # These context managers trigger a safe commit, which persists the
        # changes right away and is needed for the auth call
        with Environment.manage():
            with registry(request.db).cursor() as temp_cr:
                temp_env = Environment(temp_cr, SUPERUSER_ID, request.context)
                temp_user = temp_env['res.users'].browse(user.id)
                temp_user.generate_mfa_login_token(60 * 24 * 30)
                token = temp_user.mfa_login_token
        request.session.authenticate(request.db, user.login, token, user.id)

        redirect = request.params.get('redirect')
        if not redirect:
            redirect = '/web'
        response = Response(http.redirect_with_hash(redirect))

        if request.params.get('remember_device'):
            device = device_model_sudo.create({'user_id': user.id})
            secret = config_model_sudo.get_param('database.secret')
            device_cookie = JsonSecureCookie({'device_id': device.id}, secret)
            cookie_lifetime = timedelta(days=30)
            cookie_exp = datetime.utcnow() + cookie_lifetime
            device_cookie = device_cookie.serialize(cookie_exp)
            cookie_key = 'trusted_devices_%d' % user.id
            sec_config = config_model_sudo.get_param('auth_totp.secure_cookie')
            security_flag = sec_config != '0'
            response.set_cookie(
                cookie_key,
                device_cookie,
                max_age=cookie_lifetime.total_seconds(),
                expires=cookie_exp,
                httponly=True,
                secure=security_flag,
            )

        return response
 def _send_email(self, cr, uid, ids, context={}):
     def send():
         try:
             res = config_obj.send_email_event(cr, uid, int(line['event_line_id']), context)
             if not res:
                 return 'Error'
             else:
                 return 'Successful'
         except:
             return 'Error'
     
     with Environment.manage():    
         config_obj = self.pool.get('kemas.config')
         wizard_line_obj = self.pool.get('kemas.send.notification.event.line.wizard')
         line_obj = self.pool.get('kemas.event.collaborator.line') 
         event_obj = self.pool.get('kemas.event')
         #-------------------------------------------------------------------------------------------------------------
         if type(ids).__name__ == 'list':
             wizard_id = ids[0]
         else:
             wizard_id = ids
         
         event_id = self.read(cr, uid, wizard_id, ['event_id'])['event_id']
         super(kemas_send_notification_event_wizard, self).write(cr, uid, wizard_id, {'sending_emails':True})
         cr.commit()
         super(addons.kemas.kemas.kemas_event, event_obj).write(cr, uid, event_id, {'sending_emails':True})
         cr.commit()
 
         line_ids = self.read(cr, uid, wizard_id, ['send_notification_event_line_wizard_ids'])['send_notification_event_line_wizard_ids']
         lines = wizard_line_obj.read(cr, uid, line_ids)
         _lines = []
         for line in lines:
             if line['send_email']:
                 _lines.append(line)
         
         if len(_lines) == 0:
             raise osv.except_osv(u'¡Operación no válida!', _('No staff to send notifications.'))
         
         if not self.collaborator_ids_send_email.has_key(event_id):
             self.collaborator_ids_send_email[event_id] = []
         for line in _lines:
             self.collaborator_ids_send_email[event_id].append(line['collaborator_id'][0])
             line_obj.write(cr, uid, [long(line['event_line_id'])], {
                                         'send_email_state':'Waiting',
                                         })
         cr.commit()
         for line in _lines:
             cr.commit()
             sending_emails = event_obj.read(cr, uid, event_id, ['sending_emails'])['sending_emails']
             if sending_emails == False: break
             res_email = kemas_extras.timeout(send, timeout_duration=self.timeout_send_email, default='Timeout')
             if res_email == 'Successful':
                 wizard_line_obj.write(cr, uid, [line['id']], {
                                     'state':'Successful',
                                     'send_email': False,
                                     'sent_date' : time.strftime("%Y-%m-%d %H:%M:%S")
                                     })
                 line_obj.write(cr, uid, [long(line['event_line_id'])], {
                                     'send_email_state':'Sent',
                                     'sent_date' : time.strftime("%Y-%m-%d %H:%M:%S")
                                     })
             elif res_email == 'Error':
                 wizard_line_obj.write(cr, uid, [line['id']], {
                                     'state':'Error',
                                     'send_email':True,
                                     'sent_date' : time.strftime("%Y-%m-%d %H:%M:%S")
                                     })
                 line_obj.write(cr, uid, [long(line['event_line_id'])], {
                                     'send_email_state':'Error',
                                     'sent_date' : time.strftime("%Y-%m-%d %H:%M:%S")
                                     })
             elif res_email == 'Timeout':
                 wizard_line_obj.write(cr, uid, [line['id']], {
                                     'state':'Timeout',
                                     'send_email':True,
                                     'sent_date' : time.strftime("%Y-%m-%d %H:%M:%S")
                                     })
                 line_obj.write(cr, uid, [long(line['event_line_id'])], {
                                     'send_email_state':'Timeout',
                                     'sent_date' : time.strftime("%Y-%m-%d %H:%M:%S")
                                     })
             cr.commit()
         cr.commit()
         super(kemas_send_notification_event_wizard, self).write(cr, uid, wizard_id, {'sending_emails': False})
         super(addons.kemas.kemas.kemas_event, event_obj).write(cr, uid, event_id, {'sending_emails': False})
         try:
             del self.collaborator_ids_send_email[event_id]
         except:None
         cr.commit()
Ejemplo n.º 37
0
 def definitivo(self, cr, uid, ids, context=None):
     inv_obj = self.pool.get('account.invoice')
     inv_line_obj = self.pool.get('account.invoice.line')
     with Environment.manage():
         with registry(self.pool.get('account_invoice')).cursor() as new_cr:
             new_cr.execute(
                 "SELECT t2.id,t1.fe_emision,t1.grupo_producto,t2.nu_documento,t1.co_canilla ,t1.pbase::float,t1.exonerado::float,  t1.igv::float,  t1.total::float, t1.ruc ,t2.name,  t2.ti_documento ,t1.id FROM comprobante t1 join res_partner t2 on ( t1.co_canilla = t2.co_canilla  ) where not EXISTS (SELECT 1 FROM account_invoice where comprobante_id = t1.id)"
             )
             ticket_cabecera = new_cr.fetchall()
             for row in ticket_cabecera:
                 if row[2] == '1':
                     strproducto = 'E'
                 else:
                     strproducto = 'O'
                 if row[11] == 'DNI':
                     strdocumento = 'B'
                 else:
                     strdocumento = 'F'
                 strcode = 'V' + strdocumento + strproducto
                 new_cr.execute(
                     " select id from account_journal where  code = %s",
                     [strcode])
                 tid = new_cr.fetchone()
                 inv = {
                     'partner_id': row[0],
                     'journal_id': tid,
                     'account_id': 62,
                     'amount_untaxed': row[5],
                     'amount_tax': row[7],
                     'amount_total': row[8],
                     'company_id': 1,
                     'currency_id': 165,
                     'reference_type': 'none',
                     'state': 'draft',
                     'type': 'out_invoice',
                     'comprobante_id': row[12]
                 }
                 inv_id = inv_obj.create(new_cr, uid, inv, context=context)
                 new_cr.execute(
                     "SELECT t2.id, t3.id,t3.name_template,t1.p_pauta::float,q_vendida_neto::integer, t1.tipo_afecto , t1.pbase from comprobante_linea  t1, comprobante t2 ,product_product t3 where t2.id = t1.comprobante_id and  t1.co_producto = t3.co_producto and t2.id =%s ",
                     [row[12]])
                 ticket_detalle = new_cr.fetchall()
                 for row2 in ticket_detalle:
                     inv_line = {
                         'invoice_id': inv_id,
                         'product_id': row2[1],
                         'name': row2[2],
                         'account_id': 1664,
                         'quantity': row2[4],
                         'uos_id': 1,
                         'price_unit': row2[3],
                         'price_subtotal': row2[6]
                     }
                     inv_line_id = inv_line_obj.create(new_cr,
                                                       uid,
                                                       inv_line,
                                                       context=context)
                     if row2[5] == 'Afecto':
                         query = "INSERT INTO account_invoice_line_tax  (invoice_line_id, tax_id) VALUES (%s, %s);"
                         data = (inv_line_id, 1)
                         res = new_cr.execute(query, data)
                     new_cr.commit()
Ejemplo n.º 38
0
    def thread_general_import_common(self, cr, uid, ids, sheet, required_fields, o2m_required_fields, context=None):
        context.update({'active_test':False})
        import_obj = self.pool.get('import.data')
        with Environment.manage():
            try:
                new_cr = self.pool.cursor()
                for record in import_obj.browse(new_cr, uid, ids, context=None):
                    record.status_ids.unlink()
                    create_pool = self.pool.get(record.model_id.model)
                    model_id = record.model_id.id
                    path = self.save_file(record.file_name, record.file_import)
                    try:   
                        book=xlrd.open_workbook(path) 
                    except:
                        self.create_detail_import(new_cr, uid, import_id=record.id, message='Not found file!. Please check path....', status='fail')
                    finally:
                        pass
                
                    sheet=book.sheet_by_index(0)
                    from_row = 3                
                    total_row = 0
                    mess_temp = ""
                    for r in range(from_row, sheet.nrows):
                        if sheet.cell(r,0).value:
                            try:
                                int(sheet.cell(r,0).value)
                                total_row += 1
                            except Exception:
                                mess_line = "Row %s in columns A must be integer" % r
                                mess_temp += len(mess_temp) == 0 and mess_line or "\n" + mess_line
                    if len(mess_temp) or total_row == 0:
                        if len(mess_temp):
                            import_obj.create_detail_import(new_cr, uid, import_id=record.id, row=0, message=mess_temp, status='fail')
                        else:
                            import_obj.create_detail_import(new_cr, uid, import_id=record.id, row=0, message="Don't have row has value in columns A", status='fail')
                        raise Exception(mess_temp)
                    
                    val = {'state': 'processing', 'current_row': 0, 'total_row': total_row}
                    if context.get('from_row', False):
                        val.update({'current_row': context.get('from_row',0)})
                        from_row += context.get('from_row',0)
                    import_obj.write(new_cr, uid, record.id, val, context=context)
                    new_cr.commit()
                    
                    row_counter = 2
                    success_row = 0
                    current_row = 0
                    fields_name = import_obj.get_fields_name(new_cr, uid, sheet._cell_values[0], 1, sheet.ncols, context=context)
                    fields_name_in_excel = import_obj.get_fields_name(new_cr, uid, sheet._cell_values[1], 1, sheet.ncols, context=context)
#                     list_missing = map(lambda x:x, [x for x in required_fields if x not in fields_name])
                    list_missing = []
                    if list_missing:
                        str_list_missing = ""
                        for missing in list_missing:
                            value = missing.encode('utf-8').decode('utf-8')
                            str_list_missing += len(str_list_missing) == 0 and value or ','+value
                        import_obj.create_detail_import(new_cr, uid, import_id=record.id, row=current_row, message='Missing columns required: [%s]. Please check again!' % str_list_missing, status='fail')
                    else:
                        is_child = False
                        data_temp = {}
                        message = ""
                        success = True
                        for row in sheet._cell_values[from_row:]:
                            row_counter += 1
                            next_row = row_counter + 1 < sheet.nrows and row_counter + 1 or row_counter
                            if current_row == 0:
                                current_row = row_counter + 1
                            required_mess = import_obj.check_required_field(new_cr, uid, fields_name_in_excel, fields_name, row[1:], required_fields, context=None)
                            if required_mess and not is_child and sheet.cell(row_counter, 0).value:
                                import_obj.create_detail_import(new_cr, uid, import_id=record.id, row=row[0], message=required_mess, status='fail')
                            else:
                                if not (is_child or sheet.cell(row_counter, 0).value):
                                    line_message = "Row %s in file is child content of parent row has value in columns A before !" % (row_counter + 1)
                                    message += len(message) == 0 and line_message or "\n" + line_message
                                    if sheet.cell(next_row, 0).value:
                                        import_obj.create_detail_import(new_cr, uid, import_id=record.id, row=current_row, message=message, status='fail')
                                        message = ""
                                        current_row = 0
                                    continue                           
                                data = import_obj.get_values_one_row(new_cr, uid, row[1:], fields_name, model_id, sheet, row_counter, context=context)
                                vals_create = data.get('values', {})
                                for field_o2m in data.get('field_one2many', []):
                                    o2m_value = vals_create.get(field_o2m, [])
                                    if o2m_value:
                                        message_temp_child_o2m = import_obj.check_required_for_special_field(field_o2m, o2m_value[0][2], o2m_required_fields, context=None)
                                        if message_temp_child_o2m:
                                            message_tmp = data.get('message', "")
                                            message_tmp += len(message_tmp) == 0 and message_temp_child_o2m or "\n" + message_temp_child_o2m
                                            data.update({
                                                    'message': message_tmp
                                                         })
                                        if data_temp:
                                            if field_o2m in data_temp:
                                                data_temp[field_o2m].append(o2m_value[0])
                                            else:
                                                data_temp.update({field_o2m:o2m_value})
                                for m2m_key in data.get('m2m_keys', []):
                                    m2m_value = vals_create.get(m2m_key,False)
                                    if m2m_value:
                                        if data_temp:
                                            if m2m_key not in data_temp:
                                                data_temp.update({m2m_key : [(6,0,[])]})
                                            data_temp[m2m_key][0][2].append(m2m_value)
                                        else:
                                            vals_create[m2m_key] = [(6,0,[m2m_value])]
                                if not sheet.cell(next_row, 0).value:
                                    if not is_child:
                                        is_child = True
                                        data_temp = vals_create
                                        current_row = current_row
                                        success = data.get('success', False)
                                    if row_counter + 1 == sheet.nrows:
                                        is_child = False
                                else:
                                    is_child = False
                                    if not data_temp:
                                        data_temp = vals_create
                                        success = data.get('success', False)
                                if data.get('message', "") != "":
                                    message += len(message) == 0 and data.get('message', "") or "\n" + data.get('message', "")
                                    success = False
                                if not is_child:
                                    if success:
                                        try:
                                            if data.get('key_many2one',[]):
                                                message_temp = ""
                                                for key_m2o in data.get('key_many2one',[]):
                                                    message_temp_child = import_obj.check_required_for_special_field(key_m2o, data_temp[key_m2o]['values'], o2m_required_fields, context=None)
                                                    if len(message_temp_child):
                                                        message_temp += len(message_temp) == 0 and message_temp_child or "\n" + message_temp_child
                                                    if not len(message_temp):
                                                        m2o_id = self.pool.get(data_temp[key_m2o]['relation']).create(new_cr, uid, data_temp[key_m2o]['values'], context=context)
                                                        data_temp[key_m2o] = m2o_id
                                                if len(message_temp):
                                                    raise Exception(message_temp)
                                            create_pool.create(new_cr, uid, data_temp, context=context)
                                            import_obj.create_detail_import(new_cr, uid, import_id=record.id, row=current_row, message='Import line success.')
                                            new_cr.commit()
                                            success_row += 1
                                        except Exception as excep:
                                            new_cr.rollback()
                                            import_obj.create_detail_import(new_cr, uid, import_id=record.id, row=current_row, message=excep.message or excep.value, status='fail')
                                    else:
                                        import_obj.create_detail_import(new_cr, uid, import_id=record.id, row=current_row, message=message, status='fail')                                             
                            import_obj.write(new_cr, uid, record.id, {'current_row': current_row,'success_row': success_row}, context=context)
                            new_cr.commit()
                            if not is_child:
                                data_temp = {}
                                message = ""
                                current_row = 0
                    import_obj.write(new_cr, uid, record.id, {'state': 'done'}, context=context)
                    new_cr.commit()
            except Exception as excep:
                log.exception(excep)
                import_obj.create_detail_import(new_cr, uid, import_id=record.id, row=row[0], message=excep.message or excep.value, status='fail')
                import_obj.write(new_cr, uid, record.id, {'state': 'error'}, context=context)
                new_cr.commit()
                log.info(excep)
            finally:
                new_cr.close()
        return True
Ejemplo n.º 39
0
    def _thread_create_docker(self, kwargs):
        with openerp.sql_db.db_connect(kwargs.get('db')).cursor() as new_cr:
            with Environment.manage():
                env = Environment(new_cr, kwargs.get('uid'), {})
                project = env['project.project'].browse(
                    [kwargs.get('project_id')])
                try:
                    if not project:
                        raise Exception(
                            _("The project appears doesn't exists!"))

                    # Obtener informacion del proyecto a desplegar
                    eiqui_config = env['eiqui.config.settings'].search(
                        [], order="id DESC", limit=1)
                    git_username = None
                    git_password = None
                    if eiqui_config:
                        git_username = eiqui_config.git_username
                        git_password = eiqui_config.git_password
                    repos = []
                    modules = []
                    # Obtener Modulos y Repos a Instalar Vertical Base
                    vertical_base_id = env['eiqui.vertical'].search(
                        [('name', '=', '__base__')], limit=1)
                    if vertical_base_id:
                        branch_modules = env['eiqui.modules'].search([
                            ('repo_id.branch', '=', project.odoo_version),
                            ('id', 'in', vertical_base_id.modules.mapped('id'))
                        ])
                        for module in branch_modules:
                            modules.append(module.folder)
                            repos.append(module.repo_id.url)
                    # Obtener repos a instalar
                    branch_repos = env['eiqui.project.modules'].search([
                        ('repo_id.branch', '=', project.odoo_version),
                        ('id', 'in', project.repo_modules_ids.mapped('id'))
                    ])
                    for repo in branch_repos:
                        repos.append(repo.url)

                    # Crear Droplet
                    eiqui_utils.create_droplet(project.name,
                                               branch=project.odoo_version)
                    # Escribir Recetas Buildout
                    eiqui_utils.prepare_client_recipe(project.name,
                                                      repos,
                                                      project.odoo_version,
                                                      git_user=git_username,
                                                      git_pass=git_password)
                    # Crear dockers y resto de configuración del sistema
                    eiqui_utils.create_client(project.name)
                    # Instalar base de datos, cargar modulos, ...
                    (inst_info, adminpasswd,
                     odoo_url) = eiqui_utils.prepare_client_instance(
                         project.name, modules_installed=modules)
                    #eiqui_utils.monitor_client(project.name)
                    project.write({
                        'server_state': 'created',
                        'adminpass': adminpasswd
                    })
                    # Send Creation Mail
                    project.send_mail_plan_creation({
                        'inst_info': inst_info,
                        'adminpasswd': adminpasswd,
                        'url': odoo_url,
                    })
                except Exception:
                    env['project.issue'].create({
                        'name':
                        _('Error while creating a new plan'),
                        'description':
                        traceback.format_exc(),
                        'project_id':
                        project.id,
                        'priority':
                        '2',
                    })
                    project.write({'server_state': 'error'})
                    # Send Error Mail
                    try:
                        project.send_mail_plan_creation()
                    except:
                        pass