示例#1
0
def connectdb(dbname=None, uid=1, context=None):
    openerp.tools.config.parse_config([])
    r = RegistryManager.get(dbname or config["db_name"])
    cr = r.cursor()
    Environment.reset()
    env = Environment(cr, uid, context or {})
    return env
示例#2
0
    def test_01_pos_basic_order(self):
        cr = self.registry.cursor()
        assert cr == self.registry.test_cr
        env = Environment(cr, self.uid, {})

        journal_obj = env['account.journal']
        account_obj = env['account.account']
        main_company = env.ref('base.main_company')
        main_pos_config = env.ref('point_of_sale.pos_config_main')

        account_receivable = account_obj.create({'code': 'X1012',
                                                 'name': 'Account Receivable - Test',
                                                 'user_type_id': env.ref('account.data_account_type_receivable').id,
                                                 'reconcile': True})
        field = self.env['ir.model.fields'].search([('name', '=', 'property_account_receivable_id'),
                                                    ('model', '=', 'res.partner'),
                                                    ('relation', '=', 'account.account')], limit=1)
        env['ir.property'].create({'name': 'property_account_receivable_id',
                                   'company_id': main_company.id,
                                   'fields_id': field.id,
                                   'value': 'account.account,' + str(account_receivable.id)})

        # set the company currency to USD, otherwise it will assume
        # euro's. this will cause issues as the sale journal is in
        # USD, because of this all products would have a different
        # price
        main_company.currency_id = env.ref('base.USD')

        test_sale_journal = journal_obj.create({'name': 'Sale Journal - Test',
                                                'code': 'TSJ',
                                                'type': 'sale',
                                                'company_id': main_company.id})

        main_pos_config.journal_id = test_sale_journal
        main_pos_config.write({'journal_ids': [(0, 0, {'name': 'Cash Journal - Test',
                                                       'code': 'TSC',
                                                       'type': 'cash',
                                                       'company_id': main_company.id,
                                                       'journal_user': True})]})

        # open a session, the /pos/web controller will redirect to it
        main_pos_config.open_session_cb()

        # needed because tests are run before the module is marked as
        # installed. In js web will only load qweb coming from modules
        # that are returned by the backend in module_boot. Without
        # this you end up with js, css but no qweb.
        env['ir.module.module'].search([('name', '=', 'point_of_sale')], limit=1).state = 'installed'
        cr.release()

        self.phantom_js("/pos/web",
                        "odoo.__DEBUG__.services['web.Tour'].run('pos_basic_order', 'test')",
                        "odoo.__DEBUG__.services['web.Tour'].tours.pos_basic_order",
                        login="******")

        for order in env['pos.order'].search([]):
            self.assertEqual(order.state, 'paid', "Validated order has payment of " + str(order.amount_paid) + " and total of " + str(order.amount_total))
    def process_search_log(self, cr, uid, context=None):
        env = Environment(cr, uid, context=context or {})
        logs = env['website.search.log'].search([])
        if not logs:
            return

        template = env.ref('website_sale_search_log.search_log_email')
        template = template.with_context(lang=env['res.users'].browse(uid).lang, logs=[l.log for l in logs])
        template.send_mail(logs[0].id, force_send=True, raise_exception=True)

        logs.unlink()
示例#4
0
def execute(conf_attrs, dbname, uid, obj, method, *args, **kwargs):
    import openerp
    from openerp.api import Environment
    from openerp.modules.registry import Registry
    for attr, value in conf_attrs.items():
        openerp.tools.config[attr] = value
    with Environment.manage():
        registry = Registry(dbname)
        cr = registry.cursor()
        context = kwargs.get('context') and kwargs.pop('context') or {}
        env = Environment(cr, uid, context)
        # openerp.api.Environment._local.environments = env
        try:
            print args
            getattr(env.registry[obj], method)(cr, uid, *args, **kwargs)
            # Commit only when function finish
            env.cr.commit()
        except Exception as exc:
            print exc
            env.cr.rollback()
            try:
                raise execute.retry(
                    queue=execute.request.delivery_info['routing_key'],
                    exc=exc, countdown=(execute.request.retries + 1) * 60,
                    max_retries=5)
            except Exception as retry_exc:
                raise retry_exc
        finally:
            env.cr.close()
    return True
 def _procurement_run_thread(self, cr, uid, ids, context=None):
     with Environment.manage():
         proc_obj = self.pool.get('procurement.order')
         new_cr = self.pool.cursor()
         proc_obj.run_procurement(new_cr, uid, context.get('active_ids'), use_new_cursor=new_cr.dbname, context=context)
         new_cr.close()
         return {}
示例#6
0
    def _procure_calculation_all(self, cr, uid, ids, context=None):
        """
        @param self: The object pointer.
        @param cr: A database cursor
        @param uid: ID of the user currently logged in
        @param ids: List of IDs selected
        @param context: A standard dictionary
        """
        with Environment.manage():
            proc_obj = self.pool.get('procurement.order')
            #As this function is in a new thread, i need to open a new cursor, because the old one may be closed

            new_cr = self.pool.cursor()
            scheduler_cron_id = self.pool['ir.model.data'].get_object_reference(new_cr, SUPERUSER_ID, 'procurement', 'ir_cron_scheduler_action')[1]
            # Avoid to run the scheduler multiple times in the same time
            try:
                with tools.mute_logger('openerp.sql_db'):
                    new_cr.execute("SELECT id FROM ir_cron WHERE id = %s FOR UPDATE NOWAIT", (scheduler_cron_id,))
            except Exception:
                _logger.info('Attempt to run procurement scheduler aborted, as already running')
                new_cr.rollback()
                new_cr.close()
                return {}
            user = self.pool.get('res.users').browse(new_cr, uid, uid, context=context)
            comps = [x.id for x in user.company_ids]
            for comp in comps:
                proc_obj.run_scheduler(new_cr, uid, use_new_cursor=new_cr.dbname, company_id = comp, context=context)
            #close the new cursor
            new_cr.close()
            return {}
 def sub_thread_create_accounting_entries(self, move_id, cost_line):
     with Environment.manage():
         new_env = Environment(self.pool.cursor(),
                               self.env.uid,
                               self.env.context
                               )
         self.env.cr.commit()
         this = self.with_env(env=new_env).browse(self.ids)
         this._create_accounting_entries(move_id, cost_line)
         this.env.cr.commit()
         this.env.cr.close()
示例#8
0
 def _background_estimation(self, cr, uid, ids, context=None):
     """
     @param self: The object pointer.
     @param cr: A database cursor
     @param uid: ID of the user currently logged in
     @param ids: List of IDs selected
     @param context: A standard dictionary
     """
     with Environment.manage():
         new_cr = self.pool.cursor()
         self._calc_estimation(new_cr, uid, ids, context)
         new_cr.commit()
         new_cr.close()
         
     return {}        
示例#9
0
 def _register_login(self, db_name, user_id, user_agent_env):
     db = pooler.get_db(db_name)
     cr = db.cursor()
     if not user_id:
         return
     with Environment.manage():
         collaborator_obj = self.pool.get('kemas.collaborator')
         collaborator_ids = collaborator_obj.search(cr, user_id, [('user_id', '=', user_id)])
         if collaborator_ids:
             vals_login = {
                           'collaborator_id' : collaborator_ids[0],
                           'base_location' : user_agent_env['base_location'],
                           'remote_address' : user_agent_env['REMOTE_ADDR'],
                           }
             self.pool.get('kemas.collaborator.logbook.login').create(cr, 1, vals_login)
     cr.commit()
 def shell(self, dbname):
     local_vars = {
         'openerp': openerp
     }
     with Environment.manage():
         if dbname:
             registry = openerp.modules.registry.RegistryManager.get(dbname)
             with registry.cursor() as cr:
                 uid = openerp.SUPERUSER_ID
                 ctx = Environment(cr, uid, {})['res.users'].context_get()
                 env = Environment(cr, uid, ctx)
                 local_vars['env'] = env
                 local_vars['self'] = env.user
                 self.console(local_vars)
         else:
             self.console(local_vars)
 def _procure_calculation_orderpoint(self, cr, uid, ids, context=None):
     """
     @param self: The object pointer.
     @param cr: A database cursor
     @param uid: ID of the user currently logged in
     @param ids: List of IDs selected
     @param context: A standard dictionary
     """
     with Environment.manage():
         proc_obj = self.pool.get('procurement.order')
         #As this function is in a new thread, I need to open a new cursor, because the old one may be closed
         new_cr = self.pool.cursor()
         user_obj = self.pool.get('res.users')
         company_id = user_obj.browse(new_cr, uid, uid, context=context).company_id.id
         proc_obj._procure_orderpoint_confirm(new_cr, uid, use_new_cursor=new_cr.dbname, company_id = company_id, context=context)
         #close the new cursor
         new_cr.close()
         return {}
示例#12
0
    def run_create_accounting_entries(self):
        with Environment.manage():
            new_env = Environment(self.pool.cursor(),
                                  self.env.uid,
                                  self.env.context)
            self.env.cr.commit()
            this = self.with_env(env=new_env)
            move_id = this._create_account_move()
            _logger.info("Start create account entries for Purchase Cost Distribution"
                         " at %s" % (datetime.now().time().strftime("%H:%M:%S")))
            for cost_line in this.cost_lines:
                # Create Accounting Entries
                this._create_accounting_entries(move_id, cost_line)

            _logger.info("Finish create account entries for Purchase Cost Distribution"
                         " at %s" % (datetime.now().time().strftime("%H:%M:%S")))
            new_env.cr.commit()
            new_env.cr.close()
示例#13
0
    def read_group(
        self, cr, uid, domain, fields, groupby, offset=0, limit=None, context=None, orderby=False, lazy=True
    ):
        res = super(stock_history, self).read_group(
            cr, uid, domain, fields, groupby, offset=offset, limit=limit, context=context, orderby=orderby, lazy=lazy
        )

        if "parallel_inventory_value" in fields and "inventory_value" in fields:
            context["date"] = context.get("history_date")
            for line in res:

                with Environment.manage():  # class function
                    env = Environment(cr, uid, context)

                line["parallel_inventory_value"] = env.user.company_id.currency_id.compute(
                    line["inventory_value"], env.user.company_id.parallel_currency_id
                )

        return res
    def auto_workflow_process(self,workflow_process_id=False,ids=[]):            
        with Environment.manage():
            env_thread1 = Environment(self._cr,self._uid,self._context)
            sale_order_obj=env_thread1['sale.order']
            workflow_process_obj=env_thread1['sale.workflow.process']
            if not workflow_process_id:
                work_flow_process_records=workflow_process_obj.search([])
            else:
                work_flow_process_records=workflow_process_obj.browse(workflow_process_id)

            if not work_flow_process_records:
                return True
            
            for work_flow_process_record in work_flow_process_records:
                if not ids:
                    orders=sale_order_obj.search([('workflow_process_id','=',work_flow_process_record.id),('state','not in',('done','cancel','shipping_except','invoice_except')),('invoiced','=',False)])
                else:
                    orders=sale_order_obj.search([('workflow_process_id','=',work_flow_process_record.id),('id','in',ids)]) 
                if not orders:
                    continue
                for order in orders:
                    if order.invoiced:
                        continue
                    if work_flow_process_record.validate_order:
                        order.signal_workflow('order_confirm')
                    if not order.invoice_ids:
                        if work_flow_process_record.create_invoice and order.order_policy=='manual' and work_flow_process_record.invoice_on=='manual':
                            order.manual_invoice()
                    if work_flow_process_record.validate_invoice:
                        for invoice in order.invoice_ids:
                            invoice.signal_workflow('invoice_open')
                            journal = work_flow_process_record.journal_id
                            if work_flow_process_record.invoice_date_is_order_date:
                                date = order.date_order
                            else:
                                date = time.strftime('%Y-%m-%d %H:%M:%S')    
                            amount = invoice.amount_total

                            if work_flow_process_record.register_payment:
                                sale_order_obj.pay_sale_order(order,invoice,journal,amount,date)                    
                                invoice.reconcile_invoice()
        return True
示例#15
0
def post_init_hook(cr, pool):
    env = Environment(cr, SUPERUSER_ID, {})
    store_field_maturity_residual_post_init(env)
示例#16
0
 def init(self, cr):
     env = Environment(cr, SUPERUSER_ID, {})
     Fiscal = env['account.fiscalyear']
     fiscals = Fiscal.search([])
     fiscals.generate_budget_allocations()
示例#17
0
def _post_init_hook(cr, pool):
    env = Environment(cr, SUPERUSER_ID, {})
    products = env['product.product'].search([('ean13', '=', False)])
    _log.info(
        'Generating barcode for %s products without EAN13' % len(products.ids))
    products.generate_ean13()
示例#18
0
 def create_xlsx_report(self, ids, data, report):
     self.context = data['context']
     self.env = Environment(self.env.cr, SUPERUSER_ID, self.env.context)
     return super(ReportAccountFinancial,
                  self).create_xlsx_report(ids, data, report)
示例#19
0
    def thread_general_import_common(self, cr, uid, ids, sheet, required_fields, o2m_required_fields, context=None):
        context.update({'active_test':False})
        import_obj = self.pool.get('import.data')
        with Environment.manage():
            try:
                new_cr = self.pool.cursor()
                for record in import_obj.browse(new_cr, uid, ids, context=None):
                    record.status_ids.unlink()
                    create_pool = self.pool.get(record.model_id.model)
                    model_id = record.model_id.id
                    path = self.save_file(record.file_name, record.file_import)
                    try:   
                        book=xlrd.open_workbook(path) 
                    except:
                        self.create_detail_import(new_cr, uid, import_id=record.id, message='Not found file!. Please check path....', status='fail')
                    finally:
                        pass
                
                    sheet=book.sheet_by_index(0)
                    from_row = 3                
                    total_row = 0
                    mess_temp = ""
                    for r in range(from_row, sheet.nrows):
                        if sheet.cell(r,0).value:
                            try:
                                int(sheet.cell(r,0).value)
                                total_row += 1
                            except Exception:
                                mess_line = "Row %s in columns A must be integer" % r
                                mess_temp += len(mess_temp) == 0 and mess_line or "\n" + mess_line
                    if len(mess_temp) or total_row == 0:
                        if len(mess_temp):
                            import_obj.create_detail_import(new_cr, uid, import_id=record.id, row=0, message=mess_temp, status='fail')
                        else:
                            import_obj.create_detail_import(new_cr, uid, import_id=record.id, row=0, message="Don't have row has value in columns A", status='fail')
                        raise Exception(mess_temp)
                    
                    val = {'state': 'processing', 'current_row': 0, 'total_row': total_row}
                    if context.get('from_row', False):
                        val.update({'current_row': context.get('from_row',0)})
                        from_row += context.get('from_row',0)
                    import_obj.write(new_cr, uid, record.id, val, context=context)
                    new_cr.commit()
                    
                    row_counter = 2
                    success_row = 0
                    current_row = 0
                    fields_name = import_obj.get_fields_name(new_cr, uid, sheet._cell_values[0], 1, sheet.ncols, context=context)
                    fields_name_in_excel = import_obj.get_fields_name(new_cr, uid, sheet._cell_values[1], 1, sheet.ncols, context=context)
#                     list_missing = map(lambda x:x, [x for x in required_fields if x not in fields_name])
                    list_missing = []
                    if list_missing:
                        str_list_missing = ""
                        for missing in list_missing:
                            value = missing.encode('utf-8').decode('utf-8')
                            str_list_missing += len(str_list_missing) == 0 and value or ','+value
                        import_obj.create_detail_import(new_cr, uid, import_id=record.id, row=current_row, message='Missing columns required: [%s]. Please check again!' % str_list_missing, status='fail')
                    else:
                        is_child = False
                        data_temp = {}
                        message = ""
                        success = True
                        for row in sheet._cell_values[from_row:]:
                            row_counter += 1
                            next_row = row_counter + 1 < sheet.nrows and row_counter + 1 or row_counter
                            if current_row == 0:
                                current_row = row_counter + 1
                            required_mess = import_obj.check_required_field(new_cr, uid, fields_name_in_excel, fields_name, row[1:], required_fields, context=None)
                            if required_mess and not is_child and sheet.cell(row_counter, 0).value:
                                import_obj.create_detail_import(new_cr, uid, import_id=record.id, row=row[0], message=required_mess, status='fail')
                            else:
                                if not (is_child or sheet.cell(row_counter, 0).value):
                                    line_message = "Row %s in file is child content of parent row has value in columns A before !" % (row_counter + 1)
                                    message += len(message) == 0 and line_message or "\n" + line_message
                                    if sheet.cell(next_row, 0).value:
                                        import_obj.create_detail_import(new_cr, uid, import_id=record.id, row=current_row, message=message, status='fail')
                                        message = ""
                                        current_row = 0
                                    continue                           
                                data = import_obj.get_values_one_row(new_cr, uid, row[1:], fields_name, model_id, sheet, row_counter, context=context)
                                vals_create = data.get('values', {})
                                for field_o2m in data.get('field_one2many', []):
                                    o2m_value = vals_create.get(field_o2m, [])
                                    if o2m_value:
                                        message_temp_child_o2m = import_obj.check_required_for_special_field(field_o2m, o2m_value[0][2], o2m_required_fields, context=None)
                                        if message_temp_child_o2m:
                                            message_tmp = data.get('message', "")
                                            message_tmp += len(message_tmp) == 0 and message_temp_child_o2m or "\n" + message_temp_child_o2m
                                            data.update({
                                                    'message': message_tmp
                                                         })
                                        if data_temp:
                                            if field_o2m in data_temp:
                                                data_temp[field_o2m].append(o2m_value[0])
                                            else:
                                                data_temp.update({field_o2m:o2m_value})
                                for m2m_key in data.get('m2m_keys', []):
                                    m2m_value = vals_create.get(m2m_key,False)
                                    if m2m_value:
                                        if data_temp:
                                            if m2m_key not in data_temp:
                                                data_temp.update({m2m_key : [(6,0,[])]})
                                            data_temp[m2m_key][0][2].append(m2m_value)
                                        else:
                                            vals_create[m2m_key] = [(6,0,[m2m_value])]
                                if not sheet.cell(next_row, 0).value:
                                    if not is_child:
                                        is_child = True
                                        data_temp = vals_create
                                        current_row = current_row
                                        success = data.get('success', False)
                                    if row_counter + 1 == sheet.nrows:
                                        is_child = False
                                else:
                                    is_child = False
                                    if not data_temp:
                                        data_temp = vals_create
                                        success = data.get('success', False)
                                if data.get('message', "") != "":
                                    message += len(message) == 0 and data.get('message', "") or "\n" + data.get('message', "")
                                    success = False
                                if not is_child:
                                    if success:
                                        try:
                                            if data.get('key_many2one',[]):
                                                message_temp = ""
                                                for key_m2o in data.get('key_many2one',[]):
                                                    message_temp_child = import_obj.check_required_for_special_field(key_m2o, data_temp[key_m2o]['values'], o2m_required_fields, context=None)
                                                    if len(message_temp_child):
                                                        message_temp += len(message_temp) == 0 and message_temp_child or "\n" + message_temp_child
                                                    if not len(message_temp):
                                                        m2o_id = self.pool.get(data_temp[key_m2o]['relation']).create(new_cr, uid, data_temp[key_m2o]['values'], context=context)
                                                        data_temp[key_m2o] = m2o_id
                                                if len(message_temp):
                                                    raise Exception(message_temp)
                                            create_pool.create(new_cr, uid, data_temp, context=context)
                                            import_obj.create_detail_import(new_cr, uid, import_id=record.id, row=current_row, message='Import line success.')
                                            new_cr.commit()
                                            success_row += 1
                                        except Exception as excep:
                                            new_cr.rollback()
                                            import_obj.create_detail_import(new_cr, uid, import_id=record.id, row=current_row, message=excep.message or excep.value, status='fail')
                                    else:
                                        import_obj.create_detail_import(new_cr, uid, import_id=record.id, row=current_row, message=message, status='fail')                                             
                            import_obj.write(new_cr, uid, record.id, {'current_row': current_row,'success_row': success_row}, context=context)
                            new_cr.commit()
                            if not is_child:
                                data_temp = {}
                                message = ""
                                current_row = 0
                    import_obj.write(new_cr, uid, record.id, {'state': 'done'}, context=context)
                    new_cr.commit()
            except Exception as excep:
                log.exception(excep)
                import_obj.create_detail_import(new_cr, uid, import_id=record.id, row=row[0], message=excep.message or excep.value, status='fail')
                import_obj.write(new_cr, uid, record.id, {'state': 'error'}, context=context)
                new_cr.commit()
                log.info(excep)
            finally:
                new_cr.close()
        return True
示例#20
0
def migrate(cr, version):
    if not version:
        return
    env = Environment(cr, SUPERUSER_ID, {})
    convert_action_mail_server_email(env)
示例#21
0
def post_init_hook(cr, registry):
    env = Environment(cr, SUPERUSER_ID, {})

    warehouses = env['stock.warehouse'].search([])
    warehouses.create_locations_rma()
    warehouses.create_sequences_picking_types()
示例#22
0
    def executeOrm(self, api_name, task, dbname, uid, obj, method, *args,
                   **kwargs):
        result = False
        _logger.debug(
            "executeOrm self=%s,  task = %s ,dbname = %s ,uid= %s, obj=%s, method=%s, args=%s,"
            "kwargs=%s" % (self, task, dbname, uid, obj, method, args, kwargs))

        # openerp.multi_process = True
        # rdb.set_trace()
        uid = SUPERUSER_ID
        global init
        with Environment.manage():
            if not init:
                # 多进程间 不共享数据库连接.
                openerp.sql_db.close_all()
                init = True

            registry = RegistryManager.get(dbname)
            model_method = False
            model = False

            try:
                cr = registry.cursor()
                _logger.debug("Orm Cursor = %s  start " % cr)
                context = kwargs.pop('context') if 'context' in kwargs else {}
                #context['celeryTask'] = task
                # rdb.set_trace()
                env = Environment(cr, uid, context)
                obj_ids = []
                model_method = self.origin_func
                args = list(args)
                print 'api_name= %s ' % api_name
                # rdb.set_trace()
                if not api_name:
                    model = registry[obj]
                    args.insert(0, uid)
                    args.insert(0, cr)
                    args.insert(0, model)
                    print 'call %s ' % model_method
                    result = model_method(*args, **kwargs)

                else:
                    model = env[obj]
                    if api_name == 'model' or api_name == 'multi' or api_name == 'one':
                        if args and len(args) > 0:
                            obj_ids = args.pop(0)
                    if obj_ids:
                        recs = model.browse(obj_ids)
                    else:
                        recs = model

                    args.insert(0, recs)

                    result = model_method(*args, **kwargs)

                cr.commit()

            except Exception as exc:
                _logger.debug("Orm Cursor = %s  call %s.%s Exception " %
                              (cr, model, model_method))
                cr.rollback()
                _logger.exception(exc)
                # rdb.set_trace()

                raise exc
                # print 'SELF.REQUEST= %s ' % self.request
                # try:
                #     raise self.retry(
                #         queue=self.request.delivery_info['routing_key'],
                #         exc=exc, countdown=(self.request.retries + 1) * 60,
                #         max_retries=100)
                # except Exception as retry_exc:
                #     raise retry_exc
            finally:
                # _logger.debug("Orm Cursor = %s  end " % cr)
                cr.close()

        return result
示例#23
0
 def create_xlsx_report(self, ids, data, report):
     self.env = Environment(self.env.cr, SUPERUSER_ID, self.env.context)
     return super(
         ReportBankReconciliationSummary, self
     ).create_xlsx_report(ids, data, report)
示例#24
0
 def thread_send_notify(self, cr, uid, ids, context=None):
     detail_obj = self.pool.get('notify.to.customers.detail')
     partner_obj = self.pool.get('res.partner')
     number_limit = 50
     with Environment.manage():
         try:
             new_cr = self.pool.cursor()
             for data in self.browse(new_cr, uid, ids, context=context):
                 self.createRabbitMQforNotifyContent(
                     new_cr, uid, [self.browse(new_cr, uid, data.id)],
                     "createMQ")
                 new_cr.commit()
                 time.sleep(1)
                 self.write(new_cr,
                            uid, [data.id], {'state': 'pushing'},
                            context=context)
                 new_cr.commit()
                 customer_ids = []
                 if data.type == 'all':
                     customer_ids += partner_obj.search(
                         new_cr,
                         uid, [('customer', '=', True)],
                         context=context)
                 elif data.type == 'many':
                     customer_ids += data.partner_ids.ids
                 elif data.type == 'once':
                     customer_ids += [data.partner_id.id]
                 received_ids = map(
                     lambda x: x.partner_id.id
                     if x.partner_id else False, data.detail_ids)
                 customer_ids = [
                     x for x in customer_ids if x not in received_ids
                 ]
                 list_website_ids = [
                     x.website_id for x in partner_obj.browse(
                         new_cr, uid, customer_ids, context=context)
                 ]
                 ranger = len(customer_ids) % number_limit and (
                     len(customer_ids) /
                     number_limit) + 1 or len(customer_ids) / number_limit
                 for index in range(ranger):
                     max_item = (index + 1) * number_limit > len(
                         customer_ids) and len(customer_ids) or (
                             index + 1) * number_limit
                     partner_ids = customer_ids[index *
                                                number_limit:max_item]
                     website_ids = list_website_ids[index *
                                                    number_limit:max_item]
                     for partner_id in partner_ids:
                         detail_obj.create(new_cr,
                                           uid, {
                                               'partner_id': partner_id,
                                               'ref_id': data.id
                                           },
                                           context=context)
                         new_cr.commit()
                     message = self.makeDataQueueforSendNotify(
                         new_cr, uid, data, website_ids, "createMQ")
                     self.createRabbitMQforSendNotify(
                         new_cr, uid, [self.browse(new_cr, uid, data.id)],
                         message, "createMQ")
                     new_cr.commit()
                     time.sleep(1)
                 self.write(new_cr,
                            uid, [data.id], {'state': 'done'},
                            context=context)
                 new_cr.commit()
         except Exception as e:
             log.exception(e)
             new_cr.commit()
             log.info(e)
         finally:
             new_cr.close()
     return True
示例#25
0
def post_init_hook(cr, registry):
    with Environment.manage():
        env = Environment(cr, SUPERUSER_ID, {})

        warehouses = env['stock.warehouse'].search([])
        warehouses.create_locations_rma()
 def create_xlsx_report(self, ids, data, report):
     self.env = Environment(self.env.cr, SUPERUSER_ID, self.env.context)
     return super(ReportStockInventoryXlsx,
                  self).create_xlsx_report(ids, data, report)
示例#27
0
 def init(self, cr):
     env = Environment(cr, SUPERUSER_ID, {})
     fiscalyears = env['account.fiscalyear'].search([])
     fiscalyears.create_budget_level_config()
示例#28
0
def pre_init_hook(cr):
    env = Environment(cr, SUPERUSER_ID, {})
    env.cr.execute(
        'UPDATE hr_applicant SET partner_id=1 WHERE partner_id IS NULL')
示例#29
0
def post_init_hook(cr, pool):
    env = Environment(cr, SUPERUSER_ID, {})
    env['res.partner'].search([('birthdate', "!=", False)
                               ])._birthdate_inverse()
示例#30
0
    def mfa_login_post(self, *args, **kwargs):
        """Process MFA login attempt

        Overview:
            * Try to find a user based on the MFA login token. If this doesn't
              work, redirect to the password login page with an error message
            * Validate the confirmation code provided by the user. If it's not
              valid, redirect to the previous login step with an error message
            * Generate a long-term MFA login token for the user and log the
              user in using the token
            * Build a trusted device cookie and add it to the response if the
              trusted device option was checked
            * Redirect to the provided URL or to '/web' if one was not given
        """

        # sudo() is required because there is no request.env.uid (likely since
        # there is no user logged in at the start of the request)
        user_model_sudo = request.env['res.users'].sudo()
        device_model_sudo = user_model_sudo.env['res.users.device']
        config_model_sudo = user_model_sudo.env['ir.config_parameter']

        token = request.params.get('mfa_login_token')
        try:
            user = user_model_sudo.user_from_mfa_login_token(token)
        except (MfaTokenInvalidError, MfaTokenExpiredError) as exception:
            return http.local_redirect(
                '/web/login',
                query={
                    'redirect': request.params.get('redirect'),
                    'error': exception.message,
                },
                keep_hash=True,
            )

        confirmation_code = request.params.get('confirmation_code')
        if not user.validate_mfa_confirmation_code(confirmation_code):
            return http.local_redirect(
                '/auth_totp/login',
                query={
                    'redirect':
                    request.params.get('redirect'),
                    'error':
                    _('Your confirmation code is not correct. Please try'
                      ' again.'),
                    'mfa_login_token':
                    token,
                },
                keep_hash=True,
            )

        # These context managers trigger a safe commit, which persists the
        # changes right away and is needed for the auth call
        with Environment.manage():
            with registry(request.db).cursor() as temp_cr:
                temp_env = Environment(temp_cr, SUPERUSER_ID, request.context)
                temp_user = temp_env['res.users'].browse(user.id)
                temp_user.generate_mfa_login_token(60 * 24 * 30)
                token = temp_user.mfa_login_token
        request.session.authenticate(request.db, user.login, token, user.id)
        request.params['login_success'] = True

        redirect = request.params.get('redirect')
        if not redirect:
            redirect = '/web'
        response = http.redirect_with_hash(redirect)
        if not isinstance(response, WerkzeugResponse):
            response = Response(response)

        if request.params.get('remember_device'):
            device = device_model_sudo.create({'user_id': user.id})
            secret = config_model_sudo.get_param('database.secret')
            device_cookie = JsonSecureCookie({'device_id': device.id}, secret)
            cookie_lifetime = timedelta(days=30)
            cookie_exp = datetime.utcnow() + cookie_lifetime
            device_cookie = device_cookie.serialize(cookie_exp)
            cookie_key = 'trusted_devices_%d' % user.id
            sec_config = config_model_sudo.get_param('auth_totp.secure_cookie')
            security_flag = sec_config != '0'
            response.set_cookie(
                cookie_key,
                device_cookie,
                max_age=cookie_lifetime.total_seconds(),
                expires=cookie_exp,
                httponly=True,
                secure=security_flag,
            )

        return response
 def _send_email(self, cr, uid, ids, context={}):
     def send():
         try:
             res = config_obj.send_email_event(cr, uid, int(line['event_line_id']), context)
             if not res:
                 return 'Error'
             else:
                 return 'Successful'
         except:
             return 'Error'
     
     with Environment.manage():    
         config_obj = self.pool.get('kemas.config')
         wizard_line_obj = self.pool.get('kemas.send.notification.event.line.wizard')
         line_obj = self.pool.get('kemas.event.collaborator.line') 
         event_obj = self.pool.get('kemas.event')
         #-------------------------------------------------------------------------------------------------------------
         if type(ids).__name__ == 'list':
             wizard_id = ids[0]
         else:
             wizard_id = ids
         
         event_id = self.read(cr, uid, wizard_id, ['event_id'])['event_id']
         super(kemas_send_notification_event_wizard, self).write(cr, uid, wizard_id, {'sending_emails':True})
         cr.commit()
         super(addons.kemas.kemas.kemas_event, event_obj).write(cr, uid, event_id, {'sending_emails':True})
         cr.commit()
 
         line_ids = self.read(cr, uid, wizard_id, ['send_notification_event_line_wizard_ids'])['send_notification_event_line_wizard_ids']
         lines = wizard_line_obj.read(cr, uid, line_ids)
         _lines = []
         for line in lines:
             if line['send_email']:
                 _lines.append(line)
         
         if len(_lines) == 0:
             raise osv.except_osv(u'¡Operación no válida!', _('No staff to send notifications.'))
         
         if not self.collaborator_ids_send_email.has_key(event_id):
             self.collaborator_ids_send_email[event_id] = []
         for line in _lines:
             self.collaborator_ids_send_email[event_id].append(line['collaborator_id'][0])
             line_obj.write(cr, uid, [long(line['event_line_id'])], {
                                         'send_email_state':'Waiting',
                                         })
         cr.commit()
         for line in _lines:
             cr.commit()
             sending_emails = event_obj.read(cr, uid, event_id, ['sending_emails'])['sending_emails']
             if sending_emails == False: break
             res_email = kemas_extras.timeout(send, timeout_duration=self.timeout_send_email, default='Timeout')
             if res_email == 'Successful':
                 wizard_line_obj.write(cr, uid, [line['id']], {
                                     'state':'Successful',
                                     'send_email': False,
                                     'sent_date' : time.strftime("%Y-%m-%d %H:%M:%S")
                                     })
                 line_obj.write(cr, uid, [long(line['event_line_id'])], {
                                     'send_email_state':'Sent',
                                     'sent_date' : time.strftime("%Y-%m-%d %H:%M:%S")
                                     })
             elif res_email == 'Error':
                 wizard_line_obj.write(cr, uid, [line['id']], {
                                     'state':'Error',
                                     'send_email':True,
                                     'sent_date' : time.strftime("%Y-%m-%d %H:%M:%S")
                                     })
                 line_obj.write(cr, uid, [long(line['event_line_id'])], {
                                     'send_email_state':'Error',
                                     'sent_date' : time.strftime("%Y-%m-%d %H:%M:%S")
                                     })
             elif res_email == 'Timeout':
                 wizard_line_obj.write(cr, uid, [line['id']], {
                                     'state':'Timeout',
                                     'send_email':True,
                                     'sent_date' : time.strftime("%Y-%m-%d %H:%M:%S")
                                     })
                 line_obj.write(cr, uid, [long(line['event_line_id'])], {
                                     'send_email_state':'Timeout',
                                     'sent_date' : time.strftime("%Y-%m-%d %H:%M:%S")
                                     })
             cr.commit()
         cr.commit()
         super(kemas_send_notification_event_wizard, self).write(cr, uid, wizard_id, {'sending_emails': False})
         super(addons.kemas.kemas.kemas_event, event_obj).write(cr, uid, event_id, {'sending_emails': False})
         try:
             del self.collaborator_ids_send_email[event_id]
         except:None
         cr.commit()
示例#32
0
    def _thread_create_docker(self, kwargs):
        with openerp.sql_db.db_connect(kwargs.get('db')).cursor() as new_cr:
            with Environment.manage():
                env = Environment(new_cr, kwargs.get('uid'), {})
                project = env['project.project'].browse(
                    [kwargs.get('project_id')])
                try:
                    if not project:
                        raise Exception(
                            _("The project appears doesn't exists!"))

                    # Obtener informacion del proyecto a desplegar
                    eiqui_config = env['eiqui.config.settings'].search(
                        [], order="id DESC", limit=1)
                    git_username = None
                    git_password = None
                    if eiqui_config:
                        git_username = eiqui_config.git_username
                        git_password = eiqui_config.git_password
                    repos = []
                    modules = []
                    # Obtener Modulos y Repos a Instalar Vertical Base
                    vertical_base_id = env['eiqui.vertical'].search(
                        [('name', '=', '__base__')], limit=1)
                    if vertical_base_id:
                        branch_modules = env['eiqui.modules'].search([
                            ('repo_id.branch', '=', project.odoo_version),
                            ('id', 'in', vertical_base_id.modules.mapped('id'))
                        ])
                        for module in branch_modules:
                            modules.append(module.folder)
                            repos.append(module.repo_id.url)
                    # Obtener repos a instalar
                    branch_repos = env['eiqui.project.modules'].search([
                        ('repo_id.branch', '=', project.odoo_version),
                        ('id', 'in', project.repo_modules_ids.mapped('id'))
                    ])
                    for repo in branch_repos:
                        repos.append(repo.url)

                    # Crear Droplet
                    eiqui_utils.create_droplet(project.name,
                                               branch=project.odoo_version)
                    # Escribir Recetas Buildout
                    eiqui_utils.prepare_client_recipe(project.name,
                                                      repos,
                                                      project.odoo_version,
                                                      git_user=git_username,
                                                      git_pass=git_password)
                    # Crear dockers y resto de configuración del sistema
                    eiqui_utils.create_client(project.name)
                    # Instalar base de datos, cargar modulos, ...
                    (inst_info, adminpasswd,
                     odoo_url) = eiqui_utils.prepare_client_instance(
                         project.name, modules_installed=modules)
                    #eiqui_utils.monitor_client(project.name)
                    project.write({
                        'server_state': 'created',
                        'adminpass': adminpasswd
                    })
                    # Send Creation Mail
                    project.send_mail_plan_creation({
                        'inst_info': inst_info,
                        'adminpasswd': adminpasswd,
                        'url': odoo_url,
                    })
                except Exception:
                    env['project.issue'].create({
                        'name':
                        _('Error while creating a new plan'),
                        'description':
                        traceback.format_exc(),
                        'project_id':
                        project.id,
                        'priority':
                        '2',
                    })
                    project.write({'server_state': 'error'})
                    # Send Error Mail
                    try:
                        project.send_mail_plan_creation()
                    except:
                        pass
示例#33
0
    def create_refund(self):
        with Environment.manage():
            env_thread1 = Environment(self._cr, self._uid, self._context)
            #sale_order_obj=env_thread1['sale.order']
            for record in self:
                account_invoice_line_obj = env_thread1['account.invoice.line']
                journal_id = record.journal_id and record.journal_id.id
                inv_date = record.date_ept or fields.Date.context_today(self)
                payment_term = record.order_id.payment_term_id or False
                invoice_vals = {
                    'name':
                    record.order_id.name or '',
                    'origin':
                    account_invoice_line_obj.name,
                    'type':
                    'out_refund',
                    'reference':
                    record.order_id.client_order_ref or record.order_id.name,
                    'account_id':
                    record.order_id.partner_id.property_account_receivable_id.
                    id,
                    'partner_id':
                    record.order_id.partner_invoice_id.id,
                    'journal_id':
                    journal_id,
                    'currency_id':
                    record.order_id.pricelist_id.currency_id.id,
                    'comment':
                    record.order_id.note,
                    'payment_term_id':
                    payment_term.id,
                    'fiscal_position_id':
                    record.order_id.fiscal_position_id.id or
                    record.order_id.partner_id.property_account_position_id.id,
                    'company_id':
                    record.company_id.id,
                    'amazon_instance_id':
                    self.instance_id.id,
                    'user_id':
                    record._uid or False,
                    'date_invoice':
                    inv_date,
                    'team_id':
                    record.order_id.team_id and record.order_id.team_id.id,
                }
                invoice = env_thread1['account.invoice'].create(invoice_vals)
                record.write({'invoice_id': invoice.id})
                for line in record.amazon_refund_line_ids:
                    name = line.amazon_order_line_id.name
                    invoice_id = invoice.id
                    account = env_thread1[
                        'account.invoice.line'].get_invoice_line_account(
                            'out_refund', line.product_id,
                            record.order_id.fiscal_position_id,
                            record.company_id)
                    quantity = line.product_qty
                    price_unit = round(
                        line.total_refund / quantity,
                        self.env['decimal.precision'].precision_get(
                            'Product Price'))
                    uom_id = line.amazon_order_line_id.sale_order_line_id.product_uom.id

                    vals = {
                        'product_id': line.product_id.id,
                        'name': name,
                        'invoice_id': invoice_id,
                        'account_id': account.id,
                        'price_unit': price_unit,
                        'quantity': quantity,
                        'uom_id': uom_id,
                    }
                    new_record = account_invoice_line_obj.new(vals)
                    new_record._onchange_product_id()
                    retval = new_record._convert_to_write(
                        {name: new_record[name]
                         for name in new_record._cache})
                    retval.update({
                        'price_unit': price_unit,
                        'quantity': quantity,
                        'uom_id': uom_id,
                    })

                    account_invoice_line_obj.create(retval)
                return True
示例#34
0
def post_init_hook(cr, registry):
    """Loaded after installing the module.
    This module's DB modifications will be available.
    :param openerp.sql_db.Cursor cr:
        Database cursor.
    :param openerp.modules.registry.RegistryManager registry:
        Database registry, using v7 api.
    """
    _logger.info('Post init hook initialized')

    document_types_not_updatable(cr, registry)
    sync_padron_afip(cr, registry)

    # we don not force dependency on openupgradelib, only if available we try
    # o un de hook
    if not table_exists:
        return False

    # TODO choose:
    # odoo migration delete vouchers that where moved to payments so we make
    # a copy of voucher table and get data from thisone. Beacuse
    # account_payment ids and account_voucher ids does not match, we search
    # by move_id
    advance_column = column_exists(cr, 'account_voucher_copy',
                                   'advance_amount')
    if advance_column:
        sql = """
                SELECT receiptbook_id, afip_document_number, advance_amount
                FROM account_voucher_copy
                WHERE move_id = %s
                """
    else:
        sql = """
                SELECT receiptbook_id, afip_document_number
                FROM account_voucher_copy
                WHERE move_id = %s
                """
    if table_exists(cr, 'account_voucher_copy'):
        _logger.info('Migrating vouchers data')
        for payment_id in registry['account.payment'].search(cr, 1, []):
            _logger.info('Migrating vouchers data for payment %s' % payment_id)
            move_ids = registry['account.move'].search(
                cr, 1, [('line_ids.payment_id', '=', payment_id)], limit=1)
            if not move_ids:
                continue
            cr.execute(sql, (move_ids[0], ))
            recs = cr.fetchall()
            if recs:
                # steamos el advance_amount aunque sea para los pagos que
                # fueron validados
                if advance_column:
                    receiptbook_id, document_number, advance_amount = recs[0]
                    registry['account.payment'].write(
                        cr,
                        1,
                        [payment_id],
                        {
                            'receiptbook_id': receiptbook_id,
                            'document_number': document_number,
                            # no lo hacemos aca porque este campo es de
                            # payment.group y el payment group probablemente no
                            # existe en este momento, lo hacemos en l10 withholding
                            # 'unreconciled_amount': advance_amount,
                        })
                else:
                    receiptbook_id, document_number = recs[0]
                    registry['account.payment'].write(
                        cr, 1, [payment_id], {
                            'receiptbook_id': receiptbook_id,
                            'document_number': document_number,
                        })

    # forma horrible de saber si se esta instalando en una bd que viene migrada
    # despues de hacer esto aprendimos a usar el no_version en migrates
    # pero que en realidad tampoco nos anduvo
    env = Environment(cr, 1, {})
    if openupgrade.column_exists(cr, 'account_journal', 'old_type'):
        set_company_loc_ar(cr)
        merge_padron_into_account(cr)
        migrate_responsability_type(env)
        fix_invoice_without_date(env)
        merge_refund_journals_to_normal(env)
        map_tax_groups_to_taxes(cr, registry)

        _logger.info('Getting currency rate for invoices')
        ar_invoice_ids = registry['account.invoice'].search(
            cr, 1, [('localization', '=', 'argentina')])
        for invoice_id in ar_invoice_ids:
            vals = registry['account.invoice'].get_localization_invoice_vals(
                cr, 1, invoice_id)
            registry['account.invoice'].write(
                cr, 1, invoice_id,
                {'currency_rate': vals.get('currency_rate')})
示例#35
0
    def auto_workflow_process(self, auto_workflow_process_id=False, ids=[]):
        with Environment.manage():
            env_thread1 = Environment(self._cr, self._uid, self._context)
            sale_order_obj = env_thread1['sale.order']
            sale_order_line_obj = env_thread1['sale.order.line']
            account_payment_obj = env_thread1['account.payment']
            workflow_process_obj = env_thread1['sale.workflow.process.ept']
            if not auto_workflow_process_id:
                work_flow_process_records = workflow_process_obj.search([])
            else:
                work_flow_process_records = workflow_process_obj.browse(
                    auto_workflow_process_id)

            if not work_flow_process_records:
                return True

            for work_flow_process_record in work_flow_process_records:
                if not ids:
                    orders = sale_order_obj.search([
                        ('auto_workflow_process_id', '=',
                         work_flow_process_record.id),
                        ('state', 'not in', ('done', 'cancel', 'sale')),
                        ('invoice_status', '!=', 'invoiced')
                    ])  #('invoiced','=',False)
                else:
                    orders = sale_order_obj.search([
                        ('auto_workflow_process_id', '=',
                         work_flow_process_record.id), ('id', 'in', ids)
                    ])
                if not orders:
                    continue
                for order in orders:
                    if order.invoice_status and order.invoice_status == 'invoiced':
                        continue
                    if work_flow_process_record.validate_order:
                        order.action_confirm()
                    if work_flow_process_record.invoice_policy == 'delivery':
                        continue
                    if not work_flow_process_record.invoice_policy and not sale_order_line_obj.search(
                        [('product_id.invoice_policy', '!=', 'delivery'),
                         ('order_id', 'in', order.ids)]):
                        continue
                    if not order.invoice_ids:
                        if work_flow_process_record.create_invoice:
                            order.action_invoice_create()
                    if work_flow_process_record.validate_invoice:
                        for invoice in order.invoice_ids:
                            invoice.action_invoice_open()
                            if work_flow_process_record.register_payment:
                                if invoice.residual:
                                    # Create Invoice and Make Payment
                                    vals = {
                                        'journal_id':
                                        work_flow_process_record.journal_id.id,
                                        'invoice_ids': [(6, 0, [invoice.id])],
                                        'communication':
                                        invoice.reference,
                                        'currency_id':
                                        invoice.currency_id.id,
                                        'payment_type':
                                        'inbound',
                                        'partner_id':
                                        invoice.commercial_partner_id.id,
                                        'amount':
                                        invoice.residual,
                                        'payment_method_id':
                                        work_flow_process_record.journal_id.
                                        inbound_payment_method_ids.id,
                                        'partner_type':
                                        'customer'
                                    }
                                    new_rec = account_payment_obj.create(vals)
                                    new_rec.post()
        return True
示例#36
0
 def create_xlsx_report(self, ids, data, report):
     self.env = Environment(self.env.cr, SUPERUSER_ID, self.env.context)
     return super(ReportAccountAssetXlsx,
                  self).create_xlsx_report(ids, data, report)
示例#37
0
def post_init_hook(cr, pool):
    env = Environment(cr, SUPERUSER_ID, {})
    adjust_employee_partners_post(env)
示例#38
0
def uninstall_hook(cr, registry):
    env = Environment(cr, SUPERUSER_ID, {})
    recs = env['bi.sql.view'].search([])
    for rec in recs:
        rec.button_set_draft()
示例#39
0
def post_init_hook(cr, pool):
    env = Environment(cr, SUPERUSER_ID, {})
    adjust_menu(env)
示例#40
0
def migrate(cr, version):
    if not version:
        return

    env = Environment(cr, SUPERUSER_ID, {})
    merge_action_stages(env)
示例#41
0
 def init(self, cr):
     env = Environment(cr, SUPERUSER_ID, {})
     env['res.currency'].search([]).write({'type': 'bigger'})